1/* RTL dead store elimination.
2 Copyright (C) 2005-2025 Free Software Foundation, Inc.
3
4 Contributed by Richard Sandiford <rsandifor@codesourcery.com>
5 and Kenneth Zadeck <zadeck@naturalbridge.com>
6
7This file is part of GCC.
8
9GCC is free software; you can redistribute it and/or modify it under
10the terms of the GNU General Public License as published by the Free
11Software Foundation; either version 3, or (at your option) any later
12version.
13
14GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15WARRANTY; without even the implied warranty of MERCHANTABILITY or
16FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17for more details.
18
19You should have received a copy of the GNU General Public License
20along with GCC; see the file COPYING3. If not see
21<http://www.gnu.org/licenses/>. */
22
23#undef BASELINE
24
25#include "config.h"
26#include "system.h"
27#include "coretypes.h"
28#include "backend.h"
29#include "target.h"
30#include "rtl.h"
31#include "tree.h"
32#include "gimple.h"
33#include "predict.h"
34#include "df.h"
35#include "memmodel.h"
36#include "tm_p.h"
37#include "gimple-ssa.h"
38#include "expmed.h"
39#include "optabs.h"
40#include "emit-rtl.h"
41#include "recog.h"
42#include "alias.h"
43#include "stor-layout.h"
44#include "cfgrtl.h"
45#include "cselib.h"
46#include "tree-pass.h"
47#include "explow.h"
48#include "expr.h"
49#include "dbgcnt.h"
50#include "rtl-iter.h"
51#include "cfgcleanup.h"
52#include "calls.h"
53
54/* This file contains three techniques for performing Dead Store
55 Elimination (dse).
56
57 * The first technique performs dse locally on any base address. It
58 is based on the cselib which is a local value numbering technique.
59 This technique is local to a basic block but deals with a fairly
60 general addresses.
61
62 * The second technique performs dse globally but is restricted to
63 base addresses that are either constant or are relative to the
64 frame_pointer.
65
66 * The third technique, (which is only done after register allocation)
67 processes the spill slots. This differs from the second
68 technique because it takes advantage of the fact that spilling is
69 completely free from the effects of aliasing.
70
71 Logically, dse is a backwards dataflow problem. A store can be
72 deleted if it if cannot be reached in the backward direction by any
73 use of the value being stored. However, the local technique uses a
74 forwards scan of the basic block because cselib requires that the
75 block be processed in that order.
76
77 The pass is logically broken into 7 steps:
78
79 0) Initialization.
80
81 1) The local algorithm, as well as scanning the insns for the two
82 global algorithms.
83
84 2) Analysis to see if the global algs are necessary. In the case
85 of stores base on a constant address, there must be at least two
86 stores to that address, to make it possible to delete some of the
87 stores. In the case of stores off of the frame or spill related
88 stores, only one store to an address is necessary because those
89 stores die at the end of the function.
90
91 3) Set up the global dataflow equations based on processing the
92 info parsed in the first step.
93
94 4) Solve the dataflow equations.
95
96 5) Delete the insns that the global analysis has indicated are
97 unnecessary.
98
99 6) Delete insns that store the same value as preceding store
100 where the earlier store couldn't be eliminated.
101
102 7) Cleanup.
103
104 This step uses cselib and canon_rtx to build the largest expression
105 possible for each address. This pass is a forwards pass through
106 each basic block. From the point of view of the global technique,
107 the first pass could examine a block in either direction. The
108 forwards ordering is to accommodate cselib.
109
110 We make a simplifying assumption: addresses fall into four broad
111 categories:
112
113 1) base has rtx_varies_p == false, offset is constant.
114 2) base has rtx_varies_p == false, offset variable.
115 3) base has rtx_varies_p == true, offset constant.
116 4) base has rtx_varies_p == true, offset variable.
117
118 The local passes are able to process all 4 kinds of addresses. The
119 global pass only handles 1).
120
121 The global problem is formulated as follows:
122
123 A store, S1, to address A, where A is not relative to the stack
124 frame, can be eliminated if all paths from S1 to the end of the
125 function contain another store to A before a read to A.
126
127 If the address A is relative to the stack frame, a store S2 to A
128 can be eliminated if there are no paths from S2 that reach the
129 end of the function that read A before another store to A. In
130 this case S2 can be deleted if there are paths from S2 to the
131 end of the function that have no reads or writes to A. This
132 second case allows stores to the stack frame to be deleted that
133 would otherwise die when the function returns. This cannot be
134 done if stores_off_frame_dead_at_return is not true. See the doc
135 for that variable for when this variable is false.
136
137 The global problem is formulated as a backwards set union
138 dataflow problem where the stores are the gens and reads are the
139 kills. Set union problems are rare and require some special
140 handling given our representation of bitmaps. A straightforward
141 implementation requires a lot of bitmaps filled with 1s.
142 These are expensive and cumbersome in our bitmap formulation so
143 care has been taken to avoid large vectors filled with 1s. See
144 the comments in bb_info and in the dataflow confluence functions
145 for details.
146
147 There are two places for further enhancements to this algorithm:
148
149 1) The original dse which was embedded in a pass called flow also
150 did local address forwarding. For example in
151
152 A <- r100
153 ... <- A
154
155 flow would replace the right hand side of the second insn with a
156 reference to r100. Most of the information is available to add this
157 to this pass. It has not done it because it is a lot of work in
158 the case that either r100 is assigned to between the first and
159 second insn and/or the second insn is a load of part of the value
160 stored by the first insn.
161
162 insn 5 in gcc.c-torture/compile/990203-1.c simple case.
163 insn 15 in gcc.c-torture/execute/20001017-2.c simple case.
164 insn 25 in gcc.c-torture/execute/20001026-1.c simple case.
165 insn 44 in gcc.c-torture/execute/20010910-1.c simple case.
166
167 2) The cleaning up of spill code is quite profitable. It currently
168 depends on reading tea leaves and chicken entrails left by reload.
169 This pass depends on reload creating a singleton alias set for each
170 spill slot and telling the next dse pass which of these alias sets
171 are the singletons. Rather than analyze the addresses of the
172 spills, dse's spill processing just does analysis of the loads and
173 stores that use those alias sets. There are three cases where this
174 falls short:
175
176 a) Reload sometimes creates the slot for one mode of access, and
177 then inserts loads and/or stores for a smaller mode. In this
178 case, the current code just punts on the slot. The proper thing
179 to do is to back out and use one bit vector position for each
180 byte of the entity associated with the slot. This depends on
181 KNOWING that reload always generates the accesses for each of the
182 bytes in some canonical (read that easy to understand several
183 passes after reload happens) way.
184
185 b) Reload sometimes decides that spill slot it allocated was not
186 large enough for the mode and goes back and allocates more slots
187 with the same mode and alias set. The backout in this case is a
188 little more graceful than (a). In this case the slot is unmarked
189 as being a spill slot and if final address comes out to be based
190 off the frame pointer, the global algorithm handles this slot.
191
192 c) For any pass that may prespill, there is currently no
193 mechanism to tell the dse pass that the slot being used has the
194 special properties that reload uses. It may be that all that is
195 required is to have those passes make the same calls that reload
196 does, assuming that the alias sets can be manipulated in the same
197 way. */
198
199/* There are limits to the size of constant offsets we model for the
200 global problem. There are certainly test cases, that exceed this
201 limit, however, it is unlikely that there are important programs
202 that really have constant offsets this size. */
203#define MAX_OFFSET (64 * 1024)
204
205/* Obstack for the DSE dataflow bitmaps. We don't want to put these
206 on the default obstack because these bitmaps can grow quite large
207 (~2GB for the small (!) test case of PR54146) and we'll hold on to
208 all that memory until the end of the compiler run.
209 As a bonus, delete_tree_live_info can destroy all the bitmaps by just
210 releasing the whole obstack. */
211static bitmap_obstack dse_bitmap_obstack;
212
213/* Obstack for other data. As for above: Kinda nice to be able to
214 throw it all away at the end in one big sweep. */
215static struct obstack dse_obstack;
216
217/* Scratch bitmap for cselib's cselib_expand_value_rtx. */
218static bitmap scratch = NULL;
219
220struct insn_info_type;
221
222/* This structure holds information about a candidate store. */
223class store_info
224{
225public:
226
227 /* False means this is a clobber. */
228 bool is_set;
229
230 /* False if a single HOST_WIDE_INT bitmap is used for positions_needed. */
231 bool is_large;
232
233 /* The id of the mem group of the base address. If rtx_varies_p is
234 true, this is -1. Otherwise, it is the index into the group
235 table. */
236 int group_id;
237
238 /* This is the cselib value. */
239 cselib_val *cse_base;
240
241 /* This canonized mem. */
242 rtx mem;
243
244 /* Canonized MEM address for use by canon_true_dependence. */
245 rtx mem_addr;
246
247 /* The offset of the first byte associated with the operation. */
248 poly_int64 offset;
249
250 /* The number of bytes covered by the operation. This is always exact
251 and known (rather than -1). */
252 poly_int64 width;
253
254 /* The address space that the memory reference uses. */
255 unsigned char addrspace;
256
257 union
258 {
259 /* A bitmask as wide as the number of bytes in the word that
260 contains a 1 if the byte may be needed. The store is unused if
261 all of the bits are 0. This is used if IS_LARGE is false. */
262 unsigned HOST_WIDE_INT small_bitmask;
263
264 struct
265 {
266 /* A bitmap with one bit per byte, or null if the number of
267 bytes isn't known at compile time. A cleared bit means
268 the position is needed. Used if IS_LARGE is true. */
269 bitmap bmap;
270
271 /* When BITMAP is nonnull, this counts the number of set bits
272 (i.e. unneeded bytes) in the bitmap. If it is equal to
273 WIDTH, the whole store is unused.
274
275 When BITMAP is null:
276 - the store is definitely not needed when COUNT == 1
277 - all the store is needed when COUNT == 0 and RHS is nonnull
278 - otherwise we don't know which parts of the store are needed. */
279 int count;
280 } large;
281 } positions_needed;
282
283 /* The next store info for this insn. */
284 class store_info *next;
285
286 /* The right hand side of the store. This is used if there is a
287 subsequent reload of the mems address somewhere later in the
288 basic block. */
289 rtx rhs;
290
291 /* If rhs is or holds a constant, this contains that constant,
292 otherwise NULL. */
293 rtx const_rhs;
294
295 /* Set if this store stores the same constant value as REDUNDANT_REASON
296 insn stored. These aren't eliminated early, because doing that
297 might prevent the earlier larger store to be eliminated. */
298 struct insn_info_type *redundant_reason;
299};
300
301/* Return a bitmask with the first N low bits set. */
302
303static unsigned HOST_WIDE_INT
304lowpart_bitmask (int n)
305{
306 unsigned HOST_WIDE_INT mask = HOST_WIDE_INT_M1U;
307 return mask >> (HOST_BITS_PER_WIDE_INT - n);
308}
309
310static object_allocator<store_info> cse_store_info_pool ("cse_store_info_pool");
311
312static object_allocator<store_info> rtx_store_info_pool ("rtx_store_info_pool");
313
314/* This structure holds information about a load. These are only
315 built for rtx bases. */
316class read_info_type
317{
318public:
319 /* The id of the mem group of the base address. */
320 int group_id;
321
322 /* The offset of the first byte associated with the operation. */
323 poly_int64 offset;
324
325 /* The number of bytes covered by the operation, or -1 if not known. */
326 poly_int64 width;
327
328 /* The mem being read. */
329 rtx mem;
330
331 /* The next read_info for this insn. */
332 class read_info_type *next;
333};
334typedef class read_info_type *read_info_t;
335
336static object_allocator<read_info_type> read_info_type_pool ("read_info_pool");
337
338/* One of these records is created for each insn. */
339
340struct insn_info_type
341{
342 /* Set true if the insn contains a store but the insn itself cannot
343 be deleted. This is set if the insn is a parallel and there is
344 more than one non dead output or if the insn is in some way
345 volatile. */
346 bool cannot_delete;
347
348 /* This field is only used by the global algorithm. It is set true
349 if the insn contains any read of mem except for a (1). This is
350 also set if the insn is a call or has a clobber mem. If the insn
351 contains a wild read, the use_rec will be null. */
352 bool wild_read;
353
354 /* This is true only for CALL instructions which could potentially read
355 any non-frame memory location. This field is used by the global
356 algorithm. */
357 bool non_frame_wild_read;
358
359 /* This field is only used for the processing of const functions.
360 These functions cannot read memory, but they can read the stack
361 because that is where they may get their parms. We need to be
362 this conservative because, like the store motion pass, we don't
363 consider CALL_INSN_FUNCTION_USAGE when processing call insns.
364 Moreover, we need to distinguish two cases:
365 1. Before reload (register elimination), the stores related to
366 outgoing arguments are stack pointer based and thus deemed
367 of non-constant base in this pass. This requires special
368 handling but also means that the frame pointer based stores
369 need not be killed upon encountering a const function call.
370 2. After reload, the stores related to outgoing arguments can be
371 either stack pointer or hard frame pointer based. This means
372 that we have no other choice than also killing all the frame
373 pointer based stores upon encountering a const function call.
374 This field is set after reload for const function calls and before
375 reload for const tail function calls on targets where arg pointer
376 is the frame pointer. Having this set is less severe than a wild
377 read, it just means that all the frame related stores are killed
378 rather than all the stores. */
379 bool frame_read;
380
381 /* This field is only used for the processing of const functions.
382 It is set if the insn may contain a stack pointer based store. */
383 bool stack_pointer_based;
384
385 /* This is true if any of the sets within the store contains a
386 cselib base. Such stores can only be deleted by the local
387 algorithm. */
388 bool contains_cselib_groups;
389
390 /* The insn. */
391 rtx_insn *insn;
392
393 /* The list of mem sets or mem clobbers that are contained in this
394 insn. If the insn is deletable, it contains only one mem set.
395 But it could also contain clobbers. Insns that contain more than
396 one mem set are not deletable, but each of those mems are here in
397 order to provide info to delete other insns. */
398 store_info *store_rec;
399
400 /* The linked list of mem uses in this insn. Only the reads from
401 rtx bases are listed here. The reads to cselib bases are
402 completely processed during the first scan and so are never
403 created. */
404 read_info_t read_rec;
405
406 /* The live fixed registers. We assume only fixed registers can
407 cause trouble by being clobbered from an expanded pattern;
408 storing only the live fixed registers (rather than all registers)
409 means less memory needs to be allocated / copied for the individual
410 stores. */
411 regset fixed_regs_live;
412
413 /* The prev insn in the basic block. */
414 struct insn_info_type * prev_insn;
415
416 /* The linked list of insns that are in consideration for removal in
417 the forwards pass through the basic block. This pointer may be
418 trash as it is not cleared when a wild read occurs. The only
419 time it is guaranteed to be correct is when the traversal starts
420 at active_local_stores. */
421 struct insn_info_type * next_local_store;
422};
423typedef struct insn_info_type *insn_info_t;
424
425static object_allocator<insn_info_type> insn_info_type_pool ("insn_info_pool");
426
427/* The linked list of stores that are under consideration in this
428 basic block. */
429static insn_info_t active_local_stores;
430static int active_local_stores_len;
431
432struct dse_bb_info_type
433{
434 /* Pointer to the insn info for the last insn in the block. These
435 are linked so this is how all of the insns are reached. During
436 scanning this is the current insn being scanned. */
437 insn_info_t last_insn;
438
439 /* The info for the global dataflow problem. */
440
441
442 /* This is set if the transfer function should and in the wild_read
443 bitmap before applying the kill and gen sets. That vector knocks
444 out most of the bits in the bitmap and thus speeds up the
445 operations. */
446 bool apply_wild_read;
447
448 /* The following 4 bitvectors hold information about which positions
449 of which stores are live or dead. They are indexed by
450 get_bitmap_index. */
451
452 /* The set of store positions that exist in this block before a wild read. */
453 bitmap gen;
454
455 /* The set of load positions that exist in this block above the
456 same position of a store. */
457 bitmap kill;
458
459 /* The set of stores that reach the top of the block without being
460 killed by a read.
461
462 Do not represent the in if it is all ones. Note that this is
463 what the bitvector should logically be initialized to for a set
464 intersection problem. However, like the kill set, this is too
465 expensive. So initially, the in set will only be created for the
466 exit block and any block that contains a wild read. */
467 bitmap in;
468
469 /* The set of stores that reach the bottom of the block from it's
470 successors.
471
472 Do not represent the in if it is all ones. Note that this is
473 what the bitvector should logically be initialized to for a set
474 intersection problem. However, like the kill and in set, this is
475 too expensive. So what is done is that the confluence operator
476 just initializes the vector from one of the out sets of the
477 successors of the block. */
478 bitmap out;
479
480 /* The following bitvector is indexed by the reg number. It
481 contains the set of regs that are live at the current instruction
482 being processed. While it contains info for all of the
483 registers, only the hard registers are actually examined. It is used
484 to assure that shift and/or add sequences that are inserted do not
485 accidentally clobber live hard regs. */
486 bitmap regs_live;
487};
488
489typedef struct dse_bb_info_type *bb_info_t;
490
491static object_allocator<dse_bb_info_type> dse_bb_info_type_pool
492 ("bb_info_pool");
493
494/* Table to hold all bb_infos. */
495static bb_info_t *bb_table;
496
497/* There is a group_info for each rtx base that is used to reference
498 memory. There are also not many of the rtx bases because they are
499 very limited in scope. */
500
501struct group_info
502{
503 /* The actual base of the address. */
504 rtx rtx_base;
505
506 /* The sequential id of the base. This allows us to have a
507 canonical ordering of these that is not based on addresses. */
508 int id;
509
510 /* True if there are any positions that are to be processed
511 globally. */
512 bool process_globally;
513
514 /* True if the base of this group is either the frame_pointer or
515 hard_frame_pointer. */
516 bool frame_related;
517
518 /* A mem wrapped around the base pointer for the group in order to do
519 read dependency. It must be given BLKmode in order to encompass all
520 the possible offsets from the base. */
521 rtx base_mem;
522
523 /* Canonized version of base_mem's address. */
524 rtx canon_base_addr;
525
526 /* These two sets of two bitmaps are used to keep track of how many
527 stores are actually referencing that position from this base. We
528 only do this for rtx bases as this will be used to assign
529 positions in the bitmaps for the global problem. Bit N is set in
530 store1 on the first store for offset N. Bit N is set in store2
531 for the second store to offset N. This is all we need since we
532 only care about offsets that have two or more stores for them.
533
534 The "_n" suffix is for offsets less than 0 and the "_p" suffix is
535 for 0 and greater offsets.
536
537 There is one special case here, for stores into the stack frame,
538 we will or store1 into store2 before deciding which stores look
539 at globally. This is because stores to the stack frame that have
540 no other reads before the end of the function can also be
541 deleted. */
542 bitmap store1_n, store1_p, store2_n, store2_p;
543
544 /* These bitmaps keep track of offsets in this group escape this function.
545 An offset escapes if it corresponds to a named variable whose
546 addressable flag is set. */
547 bitmap escaped_n, escaped_p;
548
549 /* The positions in this bitmap have the same assignments as the in,
550 out, gen and kill bitmaps. This bitmap is all zeros except for
551 the positions that are occupied by stores for this group. */
552 bitmap group_kill;
553
554 /* The offset_map is used to map the offsets from this base into
555 positions in the global bitmaps. It is only created after all of
556 the all of stores have been scanned and we know which ones we
557 care about. */
558 int *offset_map_n, *offset_map_p;
559 int offset_map_size_n, offset_map_size_p;
560};
561
562static object_allocator<group_info> group_info_pool ("rtx_group_info_pool");
563
564/* Index into the rtx_group_vec. */
565static int rtx_group_next_id;
566
567
568static vec<group_info *> rtx_group_vec;
569
570
571/* This structure holds the set of changes that are being deferred
572 when removing read operation. See replace_read. */
573struct deferred_change
574{
575
576 /* The mem that is being replaced. */
577 rtx *loc;
578
579 /* The reg it is being replaced with. */
580 rtx reg;
581
582 struct deferred_change *next;
583};
584
585static object_allocator<deferred_change> deferred_change_pool
586 ("deferred_change_pool");
587
588static deferred_change *deferred_change_list = NULL;
589
590/* This is true except if cfun->stdarg -- i.e. we cannot do
591 this for vararg functions because they play games with the frame. */
592static bool stores_off_frame_dead_at_return;
593
594/* Counter for stats. */
595static int globally_deleted;
596static int locally_deleted;
597
598static bitmap all_blocks;
599
600/* Locations that are killed by calls in the global phase. */
601static bitmap kill_on_calls;
602
603/* The number of bits used in the global bitmaps. */
604static unsigned int current_position;
605
606/* Print offset range [OFFSET, OFFSET + WIDTH) to FILE. */
607
608static void
609print_range (FILE *file, poly_int64 offset, poly_int64 width)
610{
611 fprintf (stream: file, format: "[");
612 print_dec (value: offset, file, sgn: SIGNED);
613 fprintf (stream: file, format: "..");
614 print_dec (value: offset + width, file, sgn: SIGNED);
615 fprintf (stream: file, format: ")");
616}
617
618/*----------------------------------------------------------------------------
619 Zeroth step.
620
621 Initialization.
622----------------------------------------------------------------------------*/
623
624
625/* Hashtable callbacks for maintaining the "bases" field of
626 store_group_info, given that the addresses are function invariants. */
627
628struct invariant_group_base_hasher : nofree_ptr_hash <group_info>
629{
630 static inline hashval_t hash (const group_info *);
631 static inline bool equal (const group_info *, const group_info *);
632};
633
634inline bool
635invariant_group_base_hasher::equal (const group_info *gi1,
636 const group_info *gi2)
637{
638 return rtx_equal_p (gi1->rtx_base, gi2->rtx_base);
639}
640
641inline hashval_t
642invariant_group_base_hasher::hash (const group_info *gi)
643{
644 int do_not_record;
645 return hash_rtx (gi->rtx_base, Pmode, &do_not_record, NULL, false);
646}
647
648/* Tables of group_info structures, hashed by base value. */
649static hash_table<invariant_group_base_hasher> *rtx_group_table;
650
651
652/* Get the GROUP for BASE. Add a new group if it is not there. */
653
654static group_info *
655get_group_info (rtx base)
656{
657 struct group_info tmp_gi;
658 group_info *gi;
659 group_info **slot;
660
661 gcc_assert (base != NULL_RTX);
662
663 /* Find the store_base_info structure for BASE, creating a new one
664 if necessary. */
665 tmp_gi.rtx_base = base;
666 slot = rtx_group_table->find_slot (value: &tmp_gi, insert: INSERT);
667 gi = *slot;
668
669 if (gi == NULL)
670 {
671 *slot = gi = group_info_pool.allocate ();
672 gi->rtx_base = base;
673 gi->id = rtx_group_next_id++;
674 gi->base_mem = gen_rtx_MEM (BLKmode, base);
675 gi->canon_base_addr = canon_rtx (base);
676 gi->store1_n = BITMAP_ALLOC (obstack: &dse_bitmap_obstack);
677 gi->store1_p = BITMAP_ALLOC (obstack: &dse_bitmap_obstack);
678 gi->store2_n = BITMAP_ALLOC (obstack: &dse_bitmap_obstack);
679 gi->store2_p = BITMAP_ALLOC (obstack: &dse_bitmap_obstack);
680 gi->escaped_p = BITMAP_ALLOC (obstack: &dse_bitmap_obstack);
681 gi->escaped_n = BITMAP_ALLOC (obstack: &dse_bitmap_obstack);
682 gi->group_kill = BITMAP_ALLOC (obstack: &dse_bitmap_obstack);
683 gi->process_globally = false;
684 gi->frame_related =
685 (base == frame_pointer_rtx) || (base == hard_frame_pointer_rtx)
686 || (base == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]);
687 gi->offset_map_size_n = 0;
688 gi->offset_map_size_p = 0;
689 gi->offset_map_n = NULL;
690 gi->offset_map_p = NULL;
691 rtx_group_vec.safe_push (obj: gi);
692 }
693
694 return gi;
695}
696
697
698/* Initialization of data structures. */
699
700static void
701dse_step0 (void)
702{
703 locally_deleted = 0;
704 globally_deleted = 0;
705
706 bitmap_obstack_initialize (&dse_bitmap_obstack);
707 gcc_obstack_init (&dse_obstack);
708
709 scratch = BITMAP_ALLOC (obstack: &reg_obstack);
710 kill_on_calls = BITMAP_ALLOC (obstack: &dse_bitmap_obstack);
711
712
713 rtx_group_table = new hash_table<invariant_group_base_hasher> (11);
714
715 bb_table = XNEWVEC (bb_info_t, last_basic_block_for_fn (cfun));
716 rtx_group_next_id = 0;
717
718 stores_off_frame_dead_at_return = !cfun->stdarg;
719
720 init_alias_analysis ();
721}
722
723
724
725/*----------------------------------------------------------------------------
726 First step.
727
728 Scan all of the insns. Any random ordering of the blocks is fine.
729 Each block is scanned in forward order to accommodate cselib which
730 is used to remove stores with non-constant bases.
731----------------------------------------------------------------------------*/
732
733/* Delete all of the store_info recs from INSN_INFO. */
734
735static void
736free_store_info (insn_info_t insn_info)
737{
738 store_info *cur = insn_info->store_rec;
739 while (cur)
740 {
741 store_info *next = cur->next;
742 if (cur->is_large)
743 BITMAP_FREE (cur->positions_needed.large.bmap);
744 if (cur->cse_base)
745 cse_store_info_pool.remove (object: cur);
746 else
747 rtx_store_info_pool.remove (object: cur);
748 cur = next;
749 }
750
751 insn_info->cannot_delete = true;
752 insn_info->contains_cselib_groups = false;
753 insn_info->store_rec = NULL;
754}
755
756struct note_add_store_info
757{
758 rtx_insn *first, *current;
759 regset fixed_regs_live;
760 bool failure;
761};
762
763/* Callback for emit_inc_dec_insn_before via note_stores.
764 Check if a register is clobbered which is live afterwards. */
765
766static void
767note_add_store (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *data)
768{
769 rtx_insn *insn;
770 note_add_store_info *info = (note_add_store_info *) data;
771
772 if (!REG_P (loc))
773 return;
774
775 /* If this register is referenced by the current or an earlier insn,
776 that's OK. E.g. this applies to the register that is being incremented
777 with this addition. */
778 for (insn = info->first;
779 insn != NEXT_INSN (insn: info->current);
780 insn = NEXT_INSN (insn))
781 if (reg_referenced_p (loc, PATTERN (insn)))
782 return;
783
784 /* If we come here, we have a clobber of a register that's only OK
785 if that register is not live. If we don't have liveness information
786 available, fail now. */
787 if (!info->fixed_regs_live)
788 {
789 info->failure = true;
790 return;
791 }
792 /* Now check if this is a live fixed register. */
793 unsigned int end_regno = END_REGNO (x: loc);
794 for (unsigned int regno = REGNO (loc); regno < end_regno; ++regno)
795 if (REGNO_REG_SET_P (info->fixed_regs_live, regno))
796 info->failure = true;
797}
798
799/* Callback for for_each_inc_dec that emits an INSN that sets DEST to
800 SRC + SRCOFF before insn ARG. */
801
802static int
803emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED,
804 rtx op ATTRIBUTE_UNUSED,
805 rtx dest, rtx src, rtx srcoff, void *arg)
806{
807 insn_info_t insn_info = (insn_info_t) arg;
808 rtx_insn *insn = insn_info->insn, *new_insn, *cur;
809 note_add_store_info info;
810
811 /* We can reuse all operands without copying, because we are about
812 to delete the insn that contained it. */
813 if (srcoff)
814 {
815 start_sequence ();
816 emit_insn (gen_add3_insn (dest, src, srcoff));
817 new_insn = end_sequence ();
818 }
819 else
820 new_insn = gen_move_insn (dest, src);
821 info.first = new_insn;
822 info.fixed_regs_live = insn_info->fixed_regs_live;
823 info.failure = false;
824 for (cur = new_insn; cur; cur = NEXT_INSN (insn: cur))
825 {
826 info.current = cur;
827 note_stores (cur, note_add_store, &info);
828 }
829
830 /* If a failure was flagged above, return 1 so that for_each_inc_dec will
831 return it immediately, communicating the failure to its caller. */
832 if (info.failure)
833 return 1;
834
835 emit_insn_before (new_insn, insn);
836
837 return 0;
838}
839
840/* Before we delete INSN_INFO->INSN, make sure that the auto inc/dec, if it
841 is there, is split into a separate insn.
842 Return true on success (or if there was nothing to do), false on failure. */
843
844static bool
845check_for_inc_dec_1 (insn_info_t insn_info)
846{
847 rtx_insn *insn = insn_info->insn;
848 rtx note = find_reg_note (insn, REG_INC, NULL_RTX);
849 if (note)
850 return for_each_inc_dec (PATTERN (insn), emit_inc_dec_insn_before,
851 arg: insn_info) == 0;
852
853 /* Punt on stack pushes, those don't have REG_INC notes and we are
854 unprepared to deal with distribution of REG_ARGS_SIZE notes etc. */
855 subrtx_iterator::array_type array;
856 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
857 {
858 const_rtx x = *iter;
859 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
860 return false;
861 }
862
863 return true;
864}
865
866
867/* Entry point for postreload. If you work on reload_cse, or you need this
868 anywhere else, consider if you can provide register liveness information
869 and add a parameter to this function so that it can be passed down in
870 insn_info.fixed_regs_live. */
871bool
872check_for_inc_dec (rtx_insn *insn)
873{
874 insn_info_type insn_info;
875 rtx note;
876
877 insn_info.insn = insn;
878 insn_info.fixed_regs_live = NULL;
879 note = find_reg_note (insn, REG_INC, NULL_RTX);
880 if (note)
881 return for_each_inc_dec (PATTERN (insn), emit_inc_dec_insn_before,
882 arg: &insn_info) == 0;
883
884 /* Punt on stack pushes, those don't have REG_INC notes and we are
885 unprepared to deal with distribution of REG_ARGS_SIZE notes etc. */
886 subrtx_iterator::array_type array;
887 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
888 {
889 const_rtx x = *iter;
890 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
891 return false;
892 }
893
894 return true;
895}
896
897/* Delete the insn and free all of the fields inside INSN_INFO. */
898
899static void
900delete_dead_store_insn (insn_info_t insn_info)
901{
902 read_info_t read_info;
903
904 if (!dbg_cnt (index: dse))
905 return;
906
907 if (!check_for_inc_dec_1 (insn_info))
908 return;
909 if (dump_file && (dump_flags & TDF_DETAILS))
910 fprintf (stream: dump_file, format: "Locally deleting insn %d\n",
911 INSN_UID (insn: insn_info->insn));
912
913 free_store_info (insn_info);
914 read_info = insn_info->read_rec;
915
916 while (read_info)
917 {
918 read_info_t next = read_info->next;
919 read_info_type_pool.remove (object: read_info);
920 read_info = next;
921 }
922 insn_info->read_rec = NULL;
923
924 delete_insn (insn_info->insn);
925 locally_deleted++;
926 insn_info->insn = NULL;
927
928 insn_info->wild_read = false;
929}
930
931/* Return whether DECL, a local variable, can possibly escape the current
932 function scope. */
933
934static bool
935local_variable_can_escape (tree decl)
936{
937 if (TREE_ADDRESSABLE (decl))
938 return true;
939
940 /* If this is a partitioned variable, we need to consider all the variables
941 in the partition. This is necessary because a store into one of them can
942 be replaced with a store into another and this may not change the outcome
943 of the escape analysis. */
944 if (cfun->gimple_df->decls_to_pointers != NULL)
945 {
946 tree *namep = cfun->gimple_df->decls_to_pointers->get (k: decl);
947 if (namep)
948 return TREE_ADDRESSABLE (*namep);
949 }
950
951 return false;
952}
953
954/* Return whether EXPR can possibly escape the current function scope. */
955
956static bool
957can_escape (tree expr)
958{
959 tree base;
960 if (!expr)
961 return true;
962 base = get_base_address (t: expr);
963 if (DECL_P (base)
964 && !may_be_aliased (var: base)
965 && !(VAR_P (base)
966 && !DECL_EXTERNAL (base)
967 && !TREE_STATIC (base)
968 && local_variable_can_escape (decl: base)))
969 return false;
970 return true;
971}
972
973/* Set the store* bitmaps offset_map_size* fields in GROUP based on
974 OFFSET and WIDTH. */
975
976static void
977set_usage_bits (group_info *group, poly_int64 offset, poly_int64 width,
978 tree expr)
979{
980 /* Non-constant offsets and widths act as global kills, so there's no point
981 trying to use them to derive global DSE candidates. */
982 HOST_WIDE_INT i, const_offset, const_width;
983 bool expr_escapes = can_escape (expr);
984 if (offset.is_constant (const_value: &const_offset)
985 && width.is_constant (const_value: &const_width)
986 && const_offset > -MAX_OFFSET
987 && const_offset + const_width < MAX_OFFSET)
988 for (i = const_offset; i < const_offset + const_width; ++i)
989 {
990 bitmap store1;
991 bitmap store2;
992 bitmap escaped;
993 int ai;
994 if (i < 0)
995 {
996 store1 = group->store1_n;
997 store2 = group->store2_n;
998 escaped = group->escaped_n;
999 ai = -i;
1000 }
1001 else
1002 {
1003 store1 = group->store1_p;
1004 store2 = group->store2_p;
1005 escaped = group->escaped_p;
1006 ai = i;
1007 }
1008
1009 if (!bitmap_set_bit (store1, ai))
1010 bitmap_set_bit (store2, ai);
1011 else
1012 {
1013 if (i < 0)
1014 {
1015 if (group->offset_map_size_n < ai)
1016 group->offset_map_size_n = ai;
1017 }
1018 else
1019 {
1020 if (group->offset_map_size_p < ai)
1021 group->offset_map_size_p = ai;
1022 }
1023 }
1024 if (expr_escapes)
1025 bitmap_set_bit (escaped, ai);
1026 }
1027}
1028
1029static void
1030reset_active_stores (void)
1031{
1032 active_local_stores = NULL;
1033 active_local_stores_len = 0;
1034}
1035
1036/* Free all READ_REC of the LAST_INSN of BB_INFO. */
1037
1038static void
1039free_read_records (bb_info_t bb_info)
1040{
1041 insn_info_t insn_info = bb_info->last_insn;
1042 read_info_t *ptr = &insn_info->read_rec;
1043 while (*ptr)
1044 {
1045 read_info_t next = (*ptr)->next;
1046 read_info_type_pool.remove (object: *ptr);
1047 *ptr = next;
1048 }
1049}
1050
1051/* Set the BB_INFO so that the last insn is marked as a wild read. */
1052
1053static void
1054add_wild_read (bb_info_t bb_info)
1055{
1056 insn_info_t insn_info = bb_info->last_insn;
1057 insn_info->wild_read = true;
1058 free_read_records (bb_info);
1059 reset_active_stores ();
1060}
1061
1062/* Set the BB_INFO so that the last insn is marked as a wild read of
1063 non-frame locations. */
1064
1065static void
1066add_non_frame_wild_read (bb_info_t bb_info)
1067{
1068 insn_info_t insn_info = bb_info->last_insn;
1069 insn_info->non_frame_wild_read = true;
1070 free_read_records (bb_info);
1071 reset_active_stores ();
1072}
1073
1074/* Return true if X is a constant or one of the registers that behave
1075 as a constant over the life of a function. This is equivalent to
1076 !rtx_varies_p for memory addresses. */
1077
1078static bool
1079const_or_frame_p (rtx x)
1080{
1081 if (CONSTANT_P (x))
1082 return true;
1083
1084 if (GET_CODE (x) == REG)
1085 {
1086 /* Note that we have to test for the actual rtx used for the frame
1087 and arg pointers and not just the register number in case we have
1088 eliminated the frame and/or arg pointer and are using it
1089 for pseudos. */
1090 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
1091 /* The arg pointer varies if it is not a fixed register. */
1092 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
1093 || x == pic_offset_table_rtx)
1094 return true;
1095 return false;
1096 }
1097
1098 return false;
1099}
1100
1101/* Take all reasonable action to put the address of MEM into the form
1102 that we can do analysis on.
1103
1104 The gold standard is to get the address into the form: address +
1105 OFFSET where address is something that rtx_varies_p considers a
1106 constant. When we can get the address in this form, we can do
1107 global analysis on it. Note that for constant bases, address is
1108 not actually returned, only the group_id. The address can be
1109 obtained from that.
1110
1111 If that fails, we try cselib to get a value we can at least use
1112 locally. If that fails we return false.
1113
1114 The GROUP_ID is set to -1 for cselib bases and the index of the
1115 group for non_varying bases.
1116
1117 FOR_READ is true if this is a mem read and false if not. */
1118
1119static bool
1120canon_address (rtx mem,
1121 int *group_id,
1122 poly_int64 *offset,
1123 cselib_val **base)
1124{
1125 machine_mode address_mode = get_address_mode (mem);
1126 rtx mem_address = XEXP (mem, 0);
1127 rtx expanded_address, address;
1128 int expanded;
1129
1130 cselib_lookup (mem_address, address_mode, 1, GET_MODE (mem));
1131
1132 if (dump_file && (dump_flags & TDF_DETAILS))
1133 {
1134 fprintf (stream: dump_file, format: " mem: ");
1135 print_inline_rtx (dump_file, mem_address, 0);
1136 fprintf (stream: dump_file, format: "\n");
1137 }
1138
1139 /* First see if just canon_rtx (mem_address) is const or frame,
1140 if not, try cselib_expand_value_rtx and call canon_rtx on that. */
1141 address = NULL_RTX;
1142 for (expanded = 0; expanded < 2; expanded++)
1143 {
1144 if (expanded)
1145 {
1146 /* Use cselib to replace all of the reg references with the full
1147 expression. This will take care of the case where we have
1148
1149 r_x = base + offset;
1150 val = *r_x;
1151
1152 by making it into
1153
1154 val = *(base + offset); */
1155
1156 expanded_address = cselib_expand_value_rtx (mem_address,
1157 scratch, 5);
1158
1159 /* If this fails, just go with the address from first
1160 iteration. */
1161 if (!expanded_address)
1162 break;
1163 }
1164 else
1165 expanded_address = mem_address;
1166
1167 /* Split the address into canonical BASE + OFFSET terms. */
1168 address = canon_rtx (expanded_address);
1169
1170 *offset = 0;
1171
1172 if (dump_file && (dump_flags & TDF_DETAILS))
1173 {
1174 if (expanded)
1175 {
1176 fprintf (stream: dump_file, format: "\n after cselib_expand address: ");
1177 print_inline_rtx (dump_file, expanded_address, 0);
1178 fprintf (stream: dump_file, format: "\n");
1179 }
1180
1181 fprintf (stream: dump_file, format: "\n after canon_rtx address: ");
1182 print_inline_rtx (dump_file, address, 0);
1183 fprintf (stream: dump_file, format: "\n");
1184 }
1185
1186 if (GET_CODE (address) == CONST)
1187 address = XEXP (address, 0);
1188
1189 address = strip_offset_and_add (x: address, offset);
1190
1191 if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (mem))
1192 && const_or_frame_p (x: address)
1193 /* Literal addresses can alias any base, avoid creating a
1194 group for them. */
1195 && ! CONST_SCALAR_INT_P (address))
1196 {
1197 group_info *group = get_group_info (base: address);
1198
1199 if (dump_file && (dump_flags & TDF_DETAILS))
1200 {
1201 fprintf (stream: dump_file, format: " gid=%d offset=", group->id);
1202 print_dec (value: *offset, file: dump_file);
1203 fprintf (stream: dump_file, format: "\n");
1204 }
1205 *base = NULL;
1206 *group_id = group->id;
1207 return true;
1208 }
1209 }
1210
1211 *base = cselib_lookup (address, address_mode, true, GET_MODE (mem));
1212 *group_id = -1;
1213
1214 if (*base == NULL)
1215 {
1216 if (dump_file && (dump_flags & TDF_DETAILS))
1217 fprintf (stream: dump_file, format: " no cselib val - should be a wild read.\n");
1218 return false;
1219 }
1220 if (dump_file && (dump_flags & TDF_DETAILS))
1221 {
1222 fprintf (stream: dump_file, format: " varying cselib base=%u:%u offset = ",
1223 (*base)->uid, (*base)->hash);
1224 print_dec (value: *offset, file: dump_file);
1225 fprintf (stream: dump_file, format: "\n");
1226 }
1227 return true;
1228}
1229
1230
1231/* Clear the rhs field from the active_local_stores array. */
1232
1233static void
1234clear_rhs_from_active_local_stores (void)
1235{
1236 insn_info_t ptr = active_local_stores;
1237
1238 while (ptr)
1239 {
1240 store_info *store_info = ptr->store_rec;
1241 /* Skip the clobbers. */
1242 while (!store_info->is_set)
1243 store_info = store_info->next;
1244
1245 store_info->rhs = NULL;
1246 store_info->const_rhs = NULL;
1247
1248 ptr = ptr->next_local_store;
1249 }
1250}
1251
1252
1253/* Mark byte POS bytes from the beginning of store S_INFO as unneeded. */
1254
1255static inline void
1256set_position_unneeded (store_info *s_info, int pos)
1257{
1258 if (UNLIKELY (s_info->is_large))
1259 {
1260 if (bitmap_set_bit (s_info->positions_needed.large.bmap, pos))
1261 s_info->positions_needed.large.count++;
1262 }
1263 else
1264 s_info->positions_needed.small_bitmask
1265 &= ~(HOST_WIDE_INT_1U << pos);
1266}
1267
1268/* Mark the whole store S_INFO as unneeded. */
1269
1270static inline void
1271set_all_positions_unneeded (store_info *s_info)
1272{
1273 if (UNLIKELY (s_info->is_large))
1274 {
1275 HOST_WIDE_INT width;
1276 if (s_info->width.is_constant (const_value: &width))
1277 {
1278 bitmap_set_range (s_info->positions_needed.large.bmap, 0, width);
1279 s_info->positions_needed.large.count = width;
1280 }
1281 else
1282 {
1283 gcc_checking_assert (!s_info->positions_needed.large.bmap);
1284 s_info->positions_needed.large.count = 1;
1285 }
1286 }
1287 else
1288 s_info->positions_needed.small_bitmask = HOST_WIDE_INT_0U;
1289}
1290
1291/* Return TRUE if any bytes from S_INFO store are needed. */
1292
1293static inline bool
1294any_positions_needed_p (store_info *s_info)
1295{
1296 if (UNLIKELY (s_info->is_large))
1297 {
1298 HOST_WIDE_INT width;
1299 if (s_info->width.is_constant (const_value: &width))
1300 {
1301 gcc_checking_assert (s_info->positions_needed.large.bmap);
1302 return s_info->positions_needed.large.count < width;
1303 }
1304 else
1305 {
1306 gcc_checking_assert (!s_info->positions_needed.large.bmap);
1307 return s_info->positions_needed.large.count == 0;
1308 }
1309 }
1310 else
1311 return (s_info->positions_needed.small_bitmask != HOST_WIDE_INT_0U);
1312}
1313
1314/* Return TRUE if all bytes START through START+WIDTH-1 from S_INFO
1315 store are known to be needed. */
1316
1317static inline bool
1318all_positions_needed_p (store_info *s_info, poly_int64 start,
1319 poly_int64 width)
1320{
1321 gcc_assert (s_info->rhs);
1322 if (!s_info->width.is_constant ())
1323 {
1324 gcc_assert (s_info->is_large
1325 && !s_info->positions_needed.large.bmap);
1326 return s_info->positions_needed.large.count == 0;
1327 }
1328
1329 /* Otherwise, if START and WIDTH are non-constant, we're asking about
1330 a non-constant region of a constant-sized store. We can't say for
1331 sure that all positions are needed. */
1332 HOST_WIDE_INT const_start, const_width;
1333 if (!start.is_constant (const_value: &const_start)
1334 || !width.is_constant (const_value: &const_width))
1335 return false;
1336
1337 if (UNLIKELY (s_info->is_large))
1338 {
1339 for (HOST_WIDE_INT i = const_start; i < const_start + const_width; ++i)
1340 if (bitmap_bit_p (s_info->positions_needed.large.bmap, i))
1341 return false;
1342 return true;
1343 }
1344 else
1345 {
1346 unsigned HOST_WIDE_INT mask
1347 = lowpart_bitmask (n: const_width) << const_start;
1348 return (s_info->positions_needed.small_bitmask & mask) == mask;
1349 }
1350}
1351
1352
1353static rtx get_stored_val (store_info *, machine_mode, poly_int64,
1354 poly_int64, basic_block, bool);
1355
1356
1357/* BODY is an instruction pattern that belongs to INSN. Return 1 if
1358 there is a candidate store, after adding it to the appropriate
1359 local store group if so. */
1360
1361static int
1362record_store (rtx body, bb_info_t bb_info)
1363{
1364 rtx mem, rhs, const_rhs, mem_addr;
1365 poly_int64 offset = 0;
1366 poly_int64 width = 0;
1367 insn_info_t insn_info = bb_info->last_insn;
1368 store_info *store_info = NULL;
1369 int group_id;
1370 cselib_val *base = NULL;
1371 insn_info_t ptr, last, redundant_reason;
1372 bool store_is_unused;
1373
1374 if (GET_CODE (body) != SET && GET_CODE (body) != CLOBBER)
1375 return 0;
1376
1377 mem = SET_DEST (body);
1378
1379 /* If this is not used, then this cannot be used to keep the insn
1380 from being deleted. On the other hand, it does provide something
1381 that can be used to prove that another store is dead. */
1382 store_is_unused
1383 = (find_reg_note (insn_info->insn, REG_UNUSED, mem) != NULL);
1384
1385 /* Check whether that value is a suitable memory location. */
1386 if (!MEM_P (mem))
1387 {
1388 /* If the set or clobber is unused, then it does not effect our
1389 ability to get rid of the entire insn. */
1390 if (!store_is_unused)
1391 insn_info->cannot_delete = true;
1392 return 0;
1393 }
1394
1395 /* At this point we know mem is a mem. */
1396 if (GET_MODE (mem) == BLKmode)
1397 {
1398 HOST_WIDE_INT const_size;
1399 if (GET_CODE (XEXP (mem, 0)) == SCRATCH)
1400 {
1401 if (dump_file && (dump_flags & TDF_DETAILS))
1402 fprintf (stream: dump_file, format: " adding wild read for (clobber (mem:BLK (scratch))\n");
1403 add_wild_read (bb_info);
1404 insn_info->cannot_delete = true;
1405 return 0;
1406 }
1407 /* Handle (set (mem:BLK (addr) [... S36 ...]) (const_int 0))
1408 as memset (addr, 0, 36); */
1409 else if (!MEM_SIZE_KNOWN_P (mem)
1410 || maybe_le (MEM_SIZE (mem), b: 0)
1411 /* This is a limit on the bitmap size, which is only relevant
1412 for constant-sized MEMs. */
1413 || (MEM_SIZE (mem).is_constant (const_value: &const_size)
1414 && const_size > MAX_OFFSET)
1415 || GET_CODE (body) != SET
1416 || !CONST_INT_P (SET_SRC (body)))
1417 {
1418 if (!store_is_unused)
1419 {
1420 /* If the set or clobber is unused, then it does not effect our
1421 ability to get rid of the entire insn. */
1422 insn_info->cannot_delete = true;
1423 clear_rhs_from_active_local_stores ();
1424 }
1425 return 0;
1426 }
1427 }
1428
1429 /* We can still process a volatile mem, we just cannot delete it. */
1430 if (MEM_VOLATILE_P (mem))
1431 insn_info->cannot_delete = true;
1432
1433 if (!canon_address (mem, group_id: &group_id, offset: &offset, base: &base))
1434 {
1435 clear_rhs_from_active_local_stores ();
1436 return 0;
1437 }
1438
1439 if (GET_MODE (mem) == BLKmode)
1440 width = MEM_SIZE (mem);
1441 else
1442 width = GET_MODE_SIZE (GET_MODE (mem));
1443
1444 if (!endpoint_representable_p (pos: offset, size: width))
1445 {
1446 clear_rhs_from_active_local_stores ();
1447 return 0;
1448 }
1449
1450 if (known_eq (width, 0))
1451 return 0;
1452
1453 if (group_id >= 0)
1454 {
1455 /* In the restrictive case where the base is a constant or the
1456 frame pointer we can do global analysis. */
1457
1458 group_info *group
1459 = rtx_group_vec[group_id];
1460 tree expr = MEM_EXPR (mem);
1461
1462 store_info = rtx_store_info_pool.allocate ();
1463 set_usage_bits (group, offset, width, expr);
1464
1465 if (dump_file && (dump_flags & TDF_DETAILS))
1466 {
1467 fprintf (stream: dump_file, format: " processing const base store gid=%d",
1468 group_id);
1469 print_range (file: dump_file, offset, width);
1470 fprintf (stream: dump_file, format: "\n");
1471 }
1472 }
1473 else
1474 {
1475 if (may_be_sp_based_p (XEXP (mem, 0)))
1476 insn_info->stack_pointer_based = true;
1477 insn_info->contains_cselib_groups = true;
1478
1479 store_info = cse_store_info_pool.allocate ();
1480 group_id = -1;
1481
1482 if (dump_file && (dump_flags & TDF_DETAILS))
1483 {
1484 fprintf (stream: dump_file, format: " processing cselib store ");
1485 print_range (file: dump_file, offset, width);
1486 fprintf (stream: dump_file, format: "\n");
1487 }
1488 }
1489
1490 const_rhs = rhs = NULL_RTX;
1491 if (GET_CODE (body) == SET
1492 /* No place to keep the value after ra. */
1493 && !reload_completed
1494 && (REG_P (SET_SRC (body))
1495 || GET_CODE (SET_SRC (body)) == SUBREG
1496 || CONSTANT_P (SET_SRC (body)))
1497 && !MEM_VOLATILE_P (mem)
1498 /* Sometimes the store and reload is used for truncation and
1499 rounding. */
1500 && !(FLOAT_MODE_P (GET_MODE (mem)) && (flag_float_store)))
1501 {
1502 rhs = SET_SRC (body);
1503 if (CONSTANT_P (rhs))
1504 const_rhs = rhs;
1505 else if (body == PATTERN (insn: insn_info->insn))
1506 {
1507 rtx tem = find_reg_note (insn_info->insn, REG_EQUAL, NULL_RTX);
1508 if (tem && CONSTANT_P (XEXP (tem, 0)))
1509 const_rhs = XEXP (tem, 0);
1510 }
1511 if (const_rhs == NULL_RTX && REG_P (rhs))
1512 {
1513 rtx tem = cselib_expand_value_rtx (rhs, scratch, 5);
1514
1515 if (tem && CONSTANT_P (tem))
1516 const_rhs = tem;
1517 else
1518 {
1519 /* If RHS is set only once to a constant, set CONST_RHS
1520 to the constant. */
1521 rtx def_src = df_find_single_def_src (rhs);
1522 if (def_src != nullptr && CONSTANT_P (def_src))
1523 const_rhs = def_src;
1524 }
1525 }
1526 }
1527
1528 /* Check to see if this stores causes some other stores to be
1529 dead. */
1530 ptr = active_local_stores;
1531 last = NULL;
1532 redundant_reason = NULL;
1533 unsigned char addrspace = MEM_ADDR_SPACE (mem);
1534 mem = canon_rtx (mem);
1535
1536 if (group_id < 0)
1537 mem_addr = base->val_rtx;
1538 else
1539 {
1540 group_info *group = rtx_group_vec[group_id];
1541 mem_addr = group->canon_base_addr;
1542 }
1543 if (maybe_ne (a: offset, b: 0))
1544 mem_addr = plus_constant (get_address_mode (mem), mem_addr, offset);
1545
1546 while (ptr)
1547 {
1548 insn_info_t next = ptr->next_local_store;
1549 class store_info *s_info = ptr->store_rec;
1550 bool del = true;
1551
1552 /* Skip the clobbers. We delete the active insn if this insn
1553 shadows the set. To have been put on the active list, it
1554 has exactly on set. */
1555 while (!s_info->is_set)
1556 s_info = s_info->next;
1557
1558 if (s_info->group_id == group_id
1559 && s_info->cse_base == base
1560 && s_info->addrspace == addrspace)
1561 {
1562 HOST_WIDE_INT i;
1563 if (dump_file && (dump_flags & TDF_DETAILS))
1564 {
1565 fprintf (stream: dump_file, format: " trying store in insn=%d gid=%d",
1566 INSN_UID (insn: ptr->insn), s_info->group_id);
1567 print_range (file: dump_file, offset: s_info->offset, width: s_info->width);
1568 fprintf (stream: dump_file, format: "\n");
1569 }
1570
1571 /* Even if PTR won't be eliminated as unneeded, if both
1572 PTR and this insn store the same constant value, we might
1573 eliminate this insn instead. */
1574 if (s_info->const_rhs
1575 && const_rhs
1576 && known_subrange_p (pos1: offset, size1: width,
1577 pos2: s_info->offset, size2: s_info->width)
1578 && all_positions_needed_p (s_info, start: offset - s_info->offset,
1579 width)
1580 /* We can only remove the later store if the earlier aliases
1581 at least all accesses the later one. */
1582 && mems_same_for_tbaa_p (s_info->mem, mem))
1583 {
1584 if (GET_MODE (mem) == BLKmode)
1585 {
1586 if (GET_MODE (s_info->mem) == BLKmode
1587 && s_info->const_rhs == const_rhs)
1588 redundant_reason = ptr;
1589 }
1590 else if (s_info->const_rhs == const0_rtx
1591 && const_rhs == const0_rtx)
1592 redundant_reason = ptr;
1593 else
1594 {
1595 rtx val;
1596 start_sequence ();
1597 val = get_stored_val (s_info, GET_MODE (mem), offset, width,
1598 BLOCK_FOR_INSN (insn: insn_info->insn),
1599 true);
1600 if (get_insns () != NULL)
1601 val = NULL_RTX;
1602 end_sequence ();
1603 if (val && rtx_equal_p (val, const_rhs))
1604 redundant_reason = ptr;
1605 }
1606 }
1607
1608 HOST_WIDE_INT begin_unneeded, const_s_width, const_width;
1609 if (known_subrange_p (pos1: s_info->offset, size1: s_info->width, pos2: offset, size2: width))
1610 /* The new store touches every byte that S_INFO does. */
1611 set_all_positions_unneeded (s_info);
1612 else if ((offset - s_info->offset).is_constant (const_value: &begin_unneeded)
1613 && s_info->width.is_constant (const_value: &const_s_width)
1614 && width.is_constant (const_value: &const_width))
1615 {
1616 HOST_WIDE_INT end_unneeded = begin_unneeded + const_width;
1617 begin_unneeded = MAX (begin_unneeded, 0);
1618 end_unneeded = MIN (end_unneeded, const_s_width);
1619 for (i = begin_unneeded; i < end_unneeded; ++i)
1620 set_position_unneeded (s_info, pos: i);
1621 }
1622 else
1623 {
1624 /* We don't know which parts of S_INFO are needed and
1625 which aren't, so invalidate the RHS. */
1626 s_info->rhs = NULL;
1627 s_info->const_rhs = NULL;
1628 }
1629 }
1630 else if (s_info->rhs)
1631 /* Need to see if it is possible for this store to overwrite
1632 the value of store_info. If it is, set the rhs to NULL to
1633 keep it from being used to remove a load. */
1634 {
1635 if (canon_output_dependence (s_info->mem, true,
1636 mem, GET_MODE (mem),
1637 mem_addr))
1638 {
1639 s_info->rhs = NULL;
1640 s_info->const_rhs = NULL;
1641 }
1642 }
1643
1644 /* An insn can be deleted if every position of every one of
1645 its s_infos is zero. */
1646 if (any_positions_needed_p (s_info))
1647 del = false;
1648
1649 if (del)
1650 {
1651 insn_info_t insn_to_delete = ptr;
1652
1653 active_local_stores_len--;
1654 if (last)
1655 last->next_local_store = ptr->next_local_store;
1656 else
1657 active_local_stores = ptr->next_local_store;
1658
1659 if (!insn_to_delete->cannot_delete)
1660 delete_dead_store_insn (insn_info: insn_to_delete);
1661 }
1662 else
1663 last = ptr;
1664
1665 ptr = next;
1666 }
1667
1668 /* Finish filling in the store_info. */
1669 store_info->next = insn_info->store_rec;
1670 insn_info->store_rec = store_info;
1671 store_info->mem = mem;
1672 store_info->mem_addr = mem_addr;
1673 store_info->cse_base = base;
1674 HOST_WIDE_INT const_width;
1675 if (!width.is_constant (const_value: &const_width))
1676 {
1677 store_info->is_large = true;
1678 store_info->positions_needed.large.count = 0;
1679 store_info->positions_needed.large.bmap = NULL;
1680 }
1681 else if (const_width > HOST_BITS_PER_WIDE_INT)
1682 {
1683 store_info->is_large = true;
1684 store_info->positions_needed.large.count = 0;
1685 store_info->positions_needed.large.bmap = BITMAP_ALLOC (obstack: &dse_bitmap_obstack);
1686 }
1687 else
1688 {
1689 store_info->is_large = false;
1690 store_info->positions_needed.small_bitmask
1691 = lowpart_bitmask (n: const_width);
1692 }
1693 store_info->group_id = group_id;
1694 store_info->offset = offset;
1695 store_info->width = width;
1696 store_info->is_set = GET_CODE (body) == SET;
1697 store_info->rhs = rhs;
1698 store_info->const_rhs = const_rhs;
1699 store_info->redundant_reason = redundant_reason;
1700 store_info->addrspace = addrspace;
1701
1702 /* If this is a clobber, we return 0. We will only be able to
1703 delete this insn if there is only one store USED store, but we
1704 can use the clobber to delete other stores earlier. */
1705 return store_info->is_set ? 1 : 0;
1706}
1707
1708
1709static void
1710dump_insn_info (const char * start, insn_info_t insn_info)
1711{
1712 fprintf (stream: dump_file, format: "%s insn=%d %s\n", start,
1713 INSN_UID (insn: insn_info->insn),
1714 insn_info->store_rec ? "has store" : "naked");
1715}
1716
1717
1718/* If the modes are different and the value's source and target do not
1719 line up, we need to extract the value from lower part of the rhs of
1720 the store, shift it, and then put it into a form that can be shoved
1721 into the read_insn. This function generates a right SHIFT of a
1722 value that is at least ACCESS_BYTES bytes wide of READ_MODE. The
1723 shift sequence is returned or NULL if we failed to find a
1724 shift. */
1725
1726static rtx
1727find_shift_sequence (poly_int64 access_bytes,
1728 store_info *store_info,
1729 machine_mode read_mode,
1730 poly_int64 shift, bool speed, bool require_cst)
1731{
1732 machine_mode store_mode = GET_MODE (store_info->mem);
1733 scalar_int_mode new_mode;
1734 rtx read_reg = NULL;
1735
1736 /* If a constant was stored into memory, try to simplify it here,
1737 otherwise the cost of the shift might preclude this optimization
1738 e.g. at -Os, even when no actual shift will be needed. */
1739 auto access_bits = access_bytes * BITS_PER_UNIT;
1740 if (store_info->const_rhs
1741 && known_le (access_bytes, GET_MODE_SIZE (MAX_MODE_INT))
1742 && smallest_int_mode_for_size (size: access_bits).exists (mode: &new_mode))
1743 {
1744 auto byte = subreg_lowpart_offset (outermode: new_mode, innermode: store_mode);
1745 rtx ret
1746 = simplify_subreg (outermode: new_mode, op: store_info->const_rhs, innermode: store_mode, byte);
1747 if (ret && CONSTANT_P (ret))
1748 {
1749 rtx shift_rtx = gen_int_shift_amount (new_mode, shift);
1750 ret = simplify_const_binary_operation (LSHIFTRT, new_mode, ret,
1751 shift_rtx);
1752 if (ret && CONSTANT_P (ret))
1753 {
1754 byte = subreg_lowpart_offset (outermode: read_mode, innermode: new_mode);
1755 ret = simplify_subreg (outermode: read_mode, op: ret, innermode: new_mode, byte);
1756 if (ret && CONSTANT_P (ret)
1757 && (set_src_cost (x: ret, mode: read_mode, speed_p: speed)
1758 <= COSTS_N_INSNS (1)))
1759 return ret;
1760 }
1761 }
1762 }
1763
1764 if (require_cst)
1765 return NULL_RTX;
1766
1767 /* Some machines like the x86 have shift insns for each size of
1768 operand. Other machines like the ppc or the ia-64 may only have
1769 shift insns that shift values within 32 or 64 bit registers.
1770 This loop tries to find the smallest shift insn that will right
1771 justify the value we want to read but is available in one insn on
1772 the machine. */
1773
1774 opt_scalar_int_mode new_mode_iter;
1775 FOR_EACH_MODE_IN_CLASS (new_mode_iter, MODE_INT)
1776 {
1777 rtx target, new_reg, new_lhs;
1778 rtx_insn *shift_seq, *insn;
1779 int cost;
1780
1781 new_mode = new_mode_iter.require ();
1782 if (GET_MODE_BITSIZE (mode: new_mode) > BITS_PER_WORD)
1783 break;
1784 if (maybe_lt (a: GET_MODE_SIZE (mode: new_mode), b: GET_MODE_SIZE (mode: read_mode)))
1785 continue;
1786
1787 /* Try a wider mode if truncating the store mode to NEW_MODE
1788 requires a real instruction. */
1789 if (maybe_lt (a: GET_MODE_SIZE (mode: new_mode), b: GET_MODE_SIZE (mode: store_mode))
1790 && !TRULY_NOOP_TRUNCATION_MODES_P (new_mode, store_mode))
1791 continue;
1792
1793 /* Also try a wider mode if the necessary punning is either not
1794 desirable or not possible. */
1795 if (!CONSTANT_P (store_info->rhs)
1796 && !targetm.modes_tieable_p (new_mode, store_mode))
1797 continue;
1798
1799 if (multiple_p (a: shift, b: GET_MODE_BITSIZE (mode: new_mode))
1800 && known_le (GET_MODE_SIZE (new_mode), GET_MODE_SIZE (store_mode)))
1801 {
1802 /* Try to implement the shift using a subreg. */
1803 poly_int64 offset
1804 = subreg_offset_from_lsb (outer_mode: new_mode, inner_mode: store_mode, lsb_shift: shift);
1805 rtx rhs_subreg = simplify_gen_subreg (outermode: new_mode, op: store_info->rhs,
1806 innermode: store_mode, byte: offset);
1807 if (rhs_subreg)
1808 {
1809 read_reg
1810 = extract_low_bits (read_mode, new_mode, copy_rtx (rhs_subreg));
1811 break;
1812 }
1813 }
1814
1815 if (maybe_lt (a: GET_MODE_SIZE (mode: new_mode), b: access_bytes))
1816 continue;
1817
1818 new_reg = gen_reg_rtx (new_mode);
1819
1820 start_sequence ();
1821
1822 /* In theory we could also check for an ashr. Ian Taylor knows
1823 of one dsp where the cost of these two was not the same. But
1824 this really is a rare case anyway. */
1825 target = expand_binop (new_mode, lshr_optab, new_reg,
1826 gen_int_shift_amount (new_mode, shift),
1827 new_reg, 1, OPTAB_DIRECT);
1828
1829 shift_seq = end_sequence ();
1830
1831 if (target != new_reg || shift_seq == NULL)
1832 continue;
1833
1834 cost = 0;
1835 for (insn = shift_seq; insn != NULL_RTX; insn = NEXT_INSN (insn))
1836 if (INSN_P (insn))
1837 cost += insn_cost (insn, speed);
1838
1839 /* The computation up to here is essentially independent
1840 of the arguments and could be precomputed. It may
1841 not be worth doing so. We could precompute if
1842 worthwhile or at least cache the results. The result
1843 technically depends on both SHIFT and ACCESS_BYTES,
1844 but in practice the answer will depend only on ACCESS_BYTES. */
1845
1846 if (cost > COSTS_N_INSNS (1))
1847 continue;
1848
1849 new_lhs = extract_low_bits (new_mode, store_mode,
1850 copy_rtx (store_info->rhs));
1851 if (new_lhs == NULL_RTX)
1852 continue;
1853
1854 /* We found an acceptable shift. Generate a move to
1855 take the value from the store and put it into the
1856 shift pseudo, then shift it, then generate another
1857 move to put in into the target of the read. */
1858 emit_move_insn (new_reg, new_lhs);
1859 emit_insn (shift_seq);
1860 read_reg = extract_low_bits (read_mode, new_mode, new_reg);
1861 break;
1862 }
1863
1864 return read_reg;
1865}
1866
1867
1868/* Call back for note_stores to find the hard regs set or clobbered by
1869 insn. Data is a bitmap of the hardregs set so far. */
1870
1871static void
1872look_for_hardregs (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1873{
1874 bitmap regs_set = (bitmap) data;
1875
1876 if (REG_P (x)
1877 && HARD_REGISTER_P (x))
1878 bitmap_set_range (regs_set, REGNO (x), REG_NREGS (x));
1879}
1880
1881/* Helper function for replace_read and record_store.
1882 Attempt to return a value of mode READ_MODE stored in STORE_INFO,
1883 consisting of READ_WIDTH bytes starting from READ_OFFSET. Return NULL
1884 if not successful. If REQUIRE_CST is true, return always constant. */
1885
1886static rtx
1887get_stored_val (store_info *store_info, machine_mode read_mode,
1888 poly_int64 read_offset, poly_int64 read_width,
1889 basic_block bb, bool require_cst)
1890{
1891 machine_mode store_mode = GET_MODE (store_info->mem);
1892 poly_int64 gap;
1893 rtx read_reg;
1894
1895 /* To get here the read is within the boundaries of the write so
1896 shift will never be negative. Start out with the shift being in
1897 bytes. */
1898 if (store_mode == BLKmode)
1899 gap = 0;
1900 else if (BYTES_BIG_ENDIAN)
1901 gap = ((store_info->offset + store_info->width)
1902 - (read_offset + read_width));
1903 else
1904 gap = read_offset - store_info->offset;
1905
1906 if (maybe_ne (a: gap, b: 0))
1907 {
1908 if (!gap.is_constant ())
1909 return NULL_RTX;
1910
1911 poly_int64 shift = gap * BITS_PER_UNIT;
1912 poly_int64 access_size = GET_MODE_SIZE (mode: read_mode) + gap;
1913 read_reg = find_shift_sequence (access_bytes: access_size, store_info, read_mode,
1914 shift, speed: optimize_bb_for_speed_p (bb),
1915 require_cst);
1916 }
1917 else if (store_mode == BLKmode)
1918 {
1919 /* The store is a memset (addr, const_val, const_size). */
1920 gcc_assert (CONST_INT_P (store_info->rhs));
1921 scalar_int_mode int_store_mode;
1922 if (!int_mode_for_mode (read_mode).exists (mode: &int_store_mode))
1923 read_reg = NULL_RTX;
1924 else if (store_info->rhs == const0_rtx)
1925 read_reg = extract_low_bits (read_mode, int_store_mode, const0_rtx);
1926 else if (GET_MODE_BITSIZE (mode: int_store_mode) > HOST_BITS_PER_WIDE_INT
1927 || BITS_PER_UNIT >= HOST_BITS_PER_WIDE_INT)
1928 read_reg = NULL_RTX;
1929 else
1930 {
1931 unsigned HOST_WIDE_INT c
1932 = INTVAL (store_info->rhs)
1933 & ((HOST_WIDE_INT_1 << BITS_PER_UNIT) - 1);
1934 int shift = BITS_PER_UNIT;
1935 while (shift < HOST_BITS_PER_WIDE_INT)
1936 {
1937 c |= (c << shift);
1938 shift <<= 1;
1939 }
1940 read_reg = gen_int_mode (c, int_store_mode);
1941 read_reg = extract_low_bits (read_mode, int_store_mode, read_reg);
1942 }
1943 }
1944 else if (store_info->const_rhs
1945 && (require_cst
1946 || GET_MODE_CLASS (read_mode) != GET_MODE_CLASS (store_mode)))
1947 read_reg = extract_low_bits (read_mode, store_mode,
1948 copy_rtx (store_info->const_rhs));
1949 else if (VECTOR_MODE_P (read_mode) && VECTOR_MODE_P (store_mode)
1950 && known_le (GET_MODE_BITSIZE (read_mode), GET_MODE_BITSIZE (store_mode))
1951 && targetm.modes_tieable_p (read_mode, store_mode)
1952 && validate_subreg (read_mode, store_mode, copy_rtx (store_info->rhs),
1953 subreg_lowpart_offset (outermode: read_mode, innermode: store_mode)))
1954 read_reg = gen_lowpart (read_mode, copy_rtx (store_info->rhs));
1955 else
1956 read_reg = extract_low_bits (read_mode, store_mode,
1957 copy_rtx (store_info->rhs));
1958 if (require_cst && read_reg && !CONSTANT_P (read_reg))
1959 read_reg = NULL_RTX;
1960 return read_reg;
1961}
1962
1963/* Take a sequence of:
1964 A <- r1
1965 ...
1966 ... <- A
1967
1968 and change it into
1969 r2 <- r1
1970 A <- r1
1971 ...
1972 ... <- r2
1973
1974 or
1975
1976 r3 <- extract (r1)
1977 r3 <- r3 >> shift
1978 r2 <- extract (r3)
1979 ... <- r2
1980
1981 or
1982
1983 r2 <- extract (r1)
1984 ... <- r2
1985
1986 Depending on the alignment and the mode of the store and
1987 subsequent load.
1988
1989
1990 The STORE_INFO and STORE_INSN are for the store and READ_INFO
1991 and READ_INSN are for the read. Return true if the replacement
1992 went ok. */
1993
1994static bool
1995replace_read (store_info *store_info, insn_info_t store_insn,
1996 read_info_t read_info, insn_info_t read_insn, rtx *loc)
1997{
1998 machine_mode store_mode = GET_MODE (store_info->mem);
1999 machine_mode read_mode = GET_MODE (read_info->mem);
2000 rtx_insn *insns, *this_insn;
2001 rtx read_reg;
2002 basic_block bb;
2003
2004 if (!dbg_cnt (index: dse))
2005 return false;
2006
2007 /* Create a sequence of instructions to set up the read register.
2008 This sequence goes immediately before the store and its result
2009 is read by the load.
2010
2011 We need to keep this in perspective. We are replacing a read
2012 with a sequence of insns, but the read will almost certainly be
2013 in cache, so it is not going to be an expensive one. Thus, we
2014 are not willing to do a multi insn shift or worse a subroutine
2015 call to get rid of the read. */
2016 if (dump_file && (dump_flags & TDF_DETAILS))
2017 fprintf (stream: dump_file, format: "trying to replace %smode load in insn %d"
2018 " from %smode store in insn %d\n",
2019 GET_MODE_NAME (read_mode), INSN_UID (insn: read_insn->insn),
2020 GET_MODE_NAME (store_mode), INSN_UID (insn: store_insn->insn));
2021 start_sequence ();
2022 bb = BLOCK_FOR_INSN (insn: read_insn->insn);
2023 read_reg = get_stored_val (store_info,
2024 read_mode, read_offset: read_info->offset, read_width: read_info->width,
2025 bb, require_cst: false);
2026 if (read_reg == NULL_RTX)
2027 {
2028 end_sequence ();
2029 if (dump_file && (dump_flags & TDF_DETAILS))
2030 fprintf (stream: dump_file, format: " -- could not extract bits of stored value\n");
2031 return false;
2032 }
2033 /* Force the value into a new register so that it won't be clobbered
2034 between the store and the load. */
2035 if (WORD_REGISTER_OPERATIONS
2036 && GET_CODE (read_reg) == SUBREG
2037 && REG_P (SUBREG_REG (read_reg))
2038 && GET_MODE (SUBREG_REG (read_reg)) == word_mode)
2039 {
2040 /* For WORD_REGISTER_OPERATIONS with subreg of word_mode register
2041 force SUBREG_REG into a new register rather than the SUBREG. */
2042 rtx r = copy_to_mode_reg (word_mode, SUBREG_REG (read_reg));
2043 read_reg = shallow_copy_rtx (read_reg);
2044 SUBREG_REG (read_reg) = r;
2045 }
2046 else
2047 read_reg = copy_to_mode_reg (read_mode, read_reg);
2048 insns = end_sequence ();
2049
2050 if (insns != NULL_RTX)
2051 {
2052 /* Now we have to scan the set of new instructions to see if the
2053 sequence contains and sets of hardregs that happened to be
2054 live at this point. For instance, this can happen if one of
2055 the insns sets the CC and the CC happened to be live at that
2056 point. This does occasionally happen, see PR 37922. */
2057 bitmap regs_set = BITMAP_ALLOC (obstack: &reg_obstack);
2058
2059 for (this_insn = insns;
2060 this_insn != NULL_RTX; this_insn = NEXT_INSN (insn: this_insn))
2061 {
2062 if (insn_invalid_p (this_insn, false))
2063 {
2064 if (dump_file && (dump_flags & TDF_DETAILS))
2065 {
2066 fprintf (stream: dump_file, format: " -- replacing the loaded MEM with ");
2067 print_simple_rtl (dump_file, read_reg);
2068 fprintf (stream: dump_file, format: " led to an invalid instruction\n");
2069 }
2070 BITMAP_FREE (regs_set);
2071 return false;
2072 }
2073 note_stores (this_insn, look_for_hardregs, regs_set);
2074 }
2075
2076 if (store_insn->fixed_regs_live)
2077 bitmap_and_into (regs_set, store_insn->fixed_regs_live);
2078 if (!bitmap_empty_p (map: regs_set))
2079 {
2080 if (dump_file && (dump_flags & TDF_DETAILS))
2081 {
2082 fprintf (stream: dump_file, format: "abandoning replacement because sequence "
2083 "clobbers live hardregs:");
2084 df_print_regset (file: dump_file, r: regs_set);
2085 }
2086
2087 BITMAP_FREE (regs_set);
2088 return false;
2089 }
2090 BITMAP_FREE (regs_set);
2091 }
2092
2093 subrtx_iterator::array_type array;
2094 FOR_EACH_SUBRTX (iter, array, *loc, NONCONST)
2095 {
2096 const_rtx x = *iter;
2097 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
2098 {
2099 if (dump_file && (dump_flags & TDF_DETAILS))
2100 fprintf (stream: dump_file, format: " -- replacing the MEM failed due to address "
2101 "side-effects\n");
2102 return false;
2103 }
2104 }
2105
2106 if (validate_change (read_insn->insn, loc, read_reg, 0))
2107 {
2108 deferred_change *change = deferred_change_pool.allocate ();
2109
2110 /* Insert this right before the store insn where it will be safe
2111 from later insns that might change it before the read. */
2112 emit_insn_before (insns, store_insn->insn);
2113
2114 /* And now for the kludge part: cselib croaks if you just
2115 return at this point. There are two reasons for this:
2116
2117 1) Cselib has an idea of how many pseudos there are and
2118 that does not include the new ones we just added.
2119
2120 2) Cselib does not know about the move insn we added
2121 above the store_info, and there is no way to tell it
2122 about it, because it has "moved on".
2123
2124 Problem (1) is fixable with a certain amount of engineering.
2125 Problem (2) is requires starting the bb from scratch. This
2126 could be expensive.
2127
2128 So we are just going to have to lie. The move/extraction
2129 insns are not really an issue, cselib did not see them. But
2130 the use of the new pseudo read_insn is a real problem because
2131 cselib has not scanned this insn. The way that we solve this
2132 problem is that we are just going to put the mem back for now
2133 and when we are finished with the block, we undo this. We
2134 keep a table of mems to get rid of. At the end of the basic
2135 block we can put them back. */
2136
2137 *loc = read_info->mem;
2138 change->next = deferred_change_list;
2139 deferred_change_list = change;
2140 change->loc = loc;
2141 change->reg = read_reg;
2142
2143 /* Get rid of the read_info, from the point of view of the
2144 rest of dse, play like this read never happened. */
2145 read_insn->read_rec = read_info->next;
2146 read_info_type_pool.remove (object: read_info);
2147 if (dump_file && (dump_flags & TDF_DETAILS))
2148 {
2149 fprintf (stream: dump_file, format: " -- replaced the loaded MEM with ");
2150 print_simple_rtl (dump_file, read_reg);
2151 fprintf (stream: dump_file, format: "\n");
2152 }
2153 return true;
2154 }
2155 else
2156 {
2157 if (dump_file && (dump_flags & TDF_DETAILS))
2158 {
2159 fprintf (stream: dump_file, format: " -- replacing the loaded MEM with ");
2160 print_simple_rtl (dump_file, read_reg);
2161 fprintf (stream: dump_file, format: " led to an invalid instruction\n");
2162 }
2163 return false;
2164 }
2165}
2166
2167/* Check the address of MEM *LOC and kill any appropriate stores that may
2168 be active. */
2169
2170static void
2171check_mem_read_rtx (rtx *loc, bb_info_t bb_info, bool used_in_call = false)
2172{
2173 rtx mem = *loc, mem_addr;
2174 insn_info_t insn_info;
2175 poly_int64 offset = 0;
2176 poly_int64 width = 0;
2177 cselib_val *base = NULL;
2178 int group_id;
2179 read_info_t read_info;
2180
2181 insn_info = bb_info->last_insn;
2182
2183 if ((MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2184 || MEM_VOLATILE_P (mem))
2185 {
2186 if (crtl->stack_protect_guard
2187 && (MEM_EXPR (mem) == crtl->stack_protect_guard
2188 || (crtl->stack_protect_guard_decl
2189 && MEM_EXPR (mem) == crtl->stack_protect_guard_decl))
2190 && MEM_VOLATILE_P (mem))
2191 {
2192 /* This is either the stack protector canary on the stack,
2193 which ought to be written by a MEM_VOLATILE_P store and
2194 thus shouldn't be deleted and is read at the very end of
2195 function, but shouldn't conflict with any other store.
2196 Or it is __stack_chk_guard variable or TLS or whatever else
2197 MEM holding the canary value, which really shouldn't be
2198 ever modified in -fstack-protector* protected functions,
2199 otherwise the prologue store wouldn't match the epilogue
2200 check. */
2201 if (dump_file && (dump_flags & TDF_DETAILS))
2202 fprintf (stream: dump_file, format: " stack protector canary read ignored.\n");
2203 insn_info->cannot_delete = true;
2204 return;
2205 }
2206
2207 if (dump_file && (dump_flags & TDF_DETAILS))
2208 fprintf (stream: dump_file, format: " adding wild read, volatile or barrier.\n");
2209 add_wild_read (bb_info);
2210 insn_info->cannot_delete = true;
2211 return;
2212 }
2213
2214 /* If it is reading readonly mem, then there can be no conflict with
2215 another write. */
2216 if (MEM_READONLY_P (mem))
2217 return;
2218
2219 if (!canon_address (mem, group_id: &group_id, offset: &offset, base: &base))
2220 {
2221 if (dump_file && (dump_flags & TDF_DETAILS))
2222 fprintf (stream: dump_file, format: " adding wild read, canon_address failure.\n");
2223 add_wild_read (bb_info);
2224 return;
2225 }
2226
2227 if (GET_MODE (mem) == BLKmode)
2228 width = -1;
2229 else
2230 width = GET_MODE_SIZE (GET_MODE (mem));
2231
2232 if (!endpoint_representable_p (pos: offset, known_eq (width, -1) ? 1 : width))
2233 {
2234 if (dump_file && (dump_flags & TDF_DETAILS))
2235 fprintf (stream: dump_file, format: " adding wild read, due to overflow.\n");
2236 add_wild_read (bb_info);
2237 return;
2238 }
2239
2240 read_info = read_info_type_pool.allocate ();
2241 read_info->group_id = group_id;
2242 read_info->mem = mem;
2243 read_info->offset = offset;
2244 read_info->width = width;
2245 read_info->next = insn_info->read_rec;
2246 insn_info->read_rec = read_info;
2247 if (group_id < 0)
2248 mem_addr = base->val_rtx;
2249 else
2250 {
2251 group_info *group = rtx_group_vec[group_id];
2252 mem_addr = group->canon_base_addr;
2253 }
2254 if (maybe_ne (a: offset, b: 0))
2255 mem_addr = plus_constant (get_address_mode (mem), mem_addr, offset);
2256 /* Avoid passing VALUE RTXen as mem_addr to canon_true_dependence
2257 which will over and over re-create proper RTL and re-apply the
2258 offset above. See PR80960 where we almost allocate 1.6GB of PLUS
2259 RTXen that way. */
2260 mem_addr = get_addr (mem_addr);
2261
2262 if (group_id >= 0)
2263 {
2264 /* This is the restricted case where the base is a constant or
2265 the frame pointer and offset is a constant. */
2266 insn_info_t i_ptr = active_local_stores;
2267 insn_info_t last = NULL;
2268
2269 if (dump_file && (dump_flags & TDF_DETAILS))
2270 {
2271 if (!known_size_p (a: width))
2272 fprintf (stream: dump_file, format: " processing const load gid=%d[BLK]\n",
2273 group_id);
2274 else
2275 {
2276 fprintf (stream: dump_file, format: " processing const load gid=%d", group_id);
2277 print_range (file: dump_file, offset, width);
2278 fprintf (stream: dump_file, format: "\n");
2279 }
2280 }
2281
2282 while (i_ptr)
2283 {
2284 bool remove = false;
2285 store_info *store_info = i_ptr->store_rec;
2286
2287 /* Skip the clobbers. */
2288 while (!store_info->is_set)
2289 store_info = store_info->next;
2290
2291 /* There are three cases here. */
2292 if (store_info->group_id < 0)
2293 /* We have a cselib store followed by a read from a
2294 const base. */
2295 remove
2296 = canon_true_dependence (store_info->mem,
2297 GET_MODE (store_info->mem),
2298 store_info->mem_addr,
2299 mem, mem_addr);
2300
2301 else if (group_id == store_info->group_id)
2302 {
2303 /* This is a block mode load. We may get lucky and
2304 canon_true_dependence may save the day. */
2305 if (!known_size_p (a: width))
2306 remove
2307 = canon_true_dependence (store_info->mem,
2308 GET_MODE (store_info->mem),
2309 store_info->mem_addr,
2310 mem, mem_addr);
2311
2312 /* If this read is just reading back something that we just
2313 stored, rewrite the read. */
2314 else
2315 {
2316 if (!used_in_call
2317 && store_info->rhs
2318 && known_subrange_p (pos1: offset, size1: width, pos2: store_info->offset,
2319 size2: store_info->width)
2320 && all_positions_needed_p (s_info: store_info,
2321 start: offset - store_info->offset,
2322 width)
2323 && replace_read (store_info, store_insn: i_ptr, read_info,
2324 read_insn: insn_info, loc))
2325 return;
2326
2327 /* The bases are the same, just see if the offsets
2328 could overlap. */
2329 if (ranges_maybe_overlap_p (pos1: offset, size1: width,
2330 pos2: store_info->offset,
2331 size2: store_info->width))
2332 remove = true;
2333 }
2334 }
2335
2336 /* else
2337 The else case that is missing here is that the
2338 bases are constant but different. There is nothing
2339 to do here because there is no overlap. */
2340
2341 if (remove)
2342 {
2343 if (dump_file && (dump_flags & TDF_DETAILS))
2344 dump_insn_info (start: "removing from active", insn_info: i_ptr);
2345
2346 active_local_stores_len--;
2347 if (last)
2348 last->next_local_store = i_ptr->next_local_store;
2349 else
2350 active_local_stores = i_ptr->next_local_store;
2351 }
2352 else
2353 last = i_ptr;
2354 i_ptr = i_ptr->next_local_store;
2355 }
2356 }
2357 else
2358 {
2359 insn_info_t i_ptr = active_local_stores;
2360 insn_info_t last = NULL;
2361 if (dump_file && (dump_flags & TDF_DETAILS))
2362 {
2363 fprintf (stream: dump_file, format: " processing cselib load mem:");
2364 print_inline_rtx (dump_file, mem, 0);
2365 fprintf (stream: dump_file, format: "\n");
2366 }
2367
2368 while (i_ptr)
2369 {
2370 bool remove = false;
2371 store_info *store_info = i_ptr->store_rec;
2372
2373 if (dump_file && (dump_flags & TDF_DETAILS))
2374 fprintf (stream: dump_file, format: " processing cselib load against insn %d\n",
2375 INSN_UID (insn: i_ptr->insn));
2376
2377 /* Skip the clobbers. */
2378 while (!store_info->is_set)
2379 store_info = store_info->next;
2380
2381 /* If this read is just reading back something that we just
2382 stored, rewrite the read. */
2383 if (!used_in_call
2384 && store_info->rhs
2385 && store_info->group_id == -1
2386 && store_info->cse_base == base
2387 && known_subrange_p (pos1: offset, size1: width, pos2: store_info->offset,
2388 size2: store_info->width)
2389 && all_positions_needed_p (s_info: store_info,
2390 start: offset - store_info->offset, width)
2391 && replace_read (store_info, store_insn: i_ptr, read_info, read_insn: insn_info, loc))
2392 return;
2393
2394 remove = canon_true_dependence (store_info->mem,
2395 GET_MODE (store_info->mem),
2396 store_info->mem_addr,
2397 mem, mem_addr);
2398
2399 if (remove)
2400 {
2401 if (dump_file && (dump_flags & TDF_DETAILS))
2402 dump_insn_info (start: "removing from active", insn_info: i_ptr);
2403
2404 active_local_stores_len--;
2405 if (last)
2406 last->next_local_store = i_ptr->next_local_store;
2407 else
2408 active_local_stores = i_ptr->next_local_store;
2409 }
2410 else
2411 last = i_ptr;
2412 i_ptr = i_ptr->next_local_store;
2413 }
2414 }
2415}
2416
2417/* A note_uses callback in which DATA points the INSN_INFO for
2418 as check_mem_read_rtx. Nullify the pointer if i_m_r_m_r returns
2419 true for any part of *LOC. */
2420
2421static void
2422check_mem_read_use (rtx *loc, void *data)
2423{
2424 subrtx_ptr_iterator::array_type array;
2425 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
2426 {
2427 rtx *loc = *iter;
2428 if (MEM_P (*loc))
2429 check_mem_read_rtx (loc, bb_info: (bb_info_t) data);
2430 }
2431}
2432
2433
2434/* Get arguments passed to CALL_INSN. Return TRUE if successful.
2435 So far it only handles arguments passed in registers. */
2436
2437static bool
2438get_call_args (rtx call_insn, tree fn, rtx *args, int nargs)
2439{
2440 CUMULATIVE_ARGS args_so_far_v;
2441 cumulative_args_t args_so_far;
2442 tree arg;
2443 int idx;
2444
2445 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
2446 args_so_far = pack_cumulative_args (arg: &args_so_far_v);
2447
2448 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
2449 for (idx = 0;
2450 arg != void_list_node && idx < nargs;
2451 arg = TREE_CHAIN (arg), idx++)
2452 {
2453 scalar_int_mode mode;
2454 rtx reg, link, tmp;
2455
2456 if (!is_int_mode (TYPE_MODE (TREE_VALUE (arg)), int_mode: &mode))
2457 return false;
2458
2459 function_arg_info arg (mode, /*named=*/true);
2460 reg = targetm.calls.function_arg (args_so_far, arg);
2461 if (!reg || !REG_P (reg) || GET_MODE (reg) != mode)
2462 return false;
2463
2464 for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
2465 link;
2466 link = XEXP (link, 1))
2467 if (GET_CODE (XEXP (link, 0)) == USE)
2468 {
2469 scalar_int_mode arg_mode;
2470 args[idx] = XEXP (XEXP (link, 0), 0);
2471 if (REG_P (args[idx])
2472 && REGNO (args[idx]) == REGNO (reg)
2473 && (GET_MODE (args[idx]) == mode
2474 || (is_int_mode (GET_MODE (args[idx]), int_mode: &arg_mode)
2475 && (GET_MODE_SIZE (mode: arg_mode) <= UNITS_PER_WORD)
2476 && (GET_MODE_SIZE (mode: arg_mode) > GET_MODE_SIZE (mode)))))
2477 break;
2478 }
2479 if (!link)
2480 return false;
2481
2482 tmp = cselib_expand_value_rtx (args[idx], scratch, 5);
2483 if (GET_MODE (args[idx]) != mode)
2484 {
2485 if (!tmp || !CONST_INT_P (tmp))
2486 return false;
2487 tmp = gen_int_mode (INTVAL (tmp), mode);
2488 }
2489 if (tmp)
2490 args[idx] = tmp;
2491
2492 targetm.calls.function_arg_advance (args_so_far, arg);
2493 }
2494 if (arg != void_list_node || idx != nargs)
2495 return false;
2496 return true;
2497}
2498
2499/* Return a bitmap of the fixed registers contained in IN. */
2500
2501static bitmap
2502copy_fixed_regs (const_bitmap in)
2503{
2504 bitmap ret;
2505
2506 ret = ALLOC_REG_SET (NULL);
2507 bitmap_and (ret, in, bitmap_view<HARD_REG_SET> (fixed_reg_set));
2508 return ret;
2509}
2510
2511/* Apply record_store to all candidate stores in INSN. Mark INSN
2512 if some part of it is not a candidate store and assigns to a
2513 non-register target. */
2514
2515static void
2516scan_insn (bb_info_t bb_info, rtx_insn *insn, int max_active_local_stores)
2517{
2518 rtx body;
2519 insn_info_type *insn_info = insn_info_type_pool.allocate ();
2520 int mems_found = 0;
2521 memset (s: insn_info, c: 0, n: sizeof (struct insn_info_type));
2522
2523 if (dump_file && (dump_flags & TDF_DETAILS))
2524 fprintf (stream: dump_file, format: "\n**scanning insn=%d\n",
2525 INSN_UID (insn));
2526
2527 insn_info->prev_insn = bb_info->last_insn;
2528 insn_info->insn = insn;
2529 bb_info->last_insn = insn_info;
2530
2531 if (DEBUG_INSN_P (insn))
2532 {
2533 insn_info->cannot_delete = true;
2534 return;
2535 }
2536
2537 /* Look at all of the uses in the insn. */
2538 note_uses (&PATTERN (insn), check_mem_read_use, bb_info);
2539
2540 if (CALL_P (insn))
2541 {
2542 bool const_call;
2543 rtx call, sym;
2544 tree memset_call = NULL_TREE;
2545
2546 insn_info->cannot_delete = true;
2547
2548 /* Const functions cannot do anything bad i.e. read memory,
2549 however, they can read their parameters which may have
2550 been pushed onto the stack.
2551 memset and bzero don't read memory either. */
2552 const_call = RTL_CONST_CALL_P (insn);
2553 if (!const_call
2554 && (call = get_call_rtx_from (insn))
2555 && (sym = XEXP (XEXP (call, 0), 0))
2556 && GET_CODE (sym) == SYMBOL_REF
2557 && SYMBOL_REF_DECL (sym)
2558 && TREE_CODE (SYMBOL_REF_DECL (sym)) == FUNCTION_DECL
2559 && fndecl_built_in_p (SYMBOL_REF_DECL (sym), name1: BUILT_IN_MEMSET))
2560 memset_call = SYMBOL_REF_DECL (sym);
2561
2562 if (const_call || memset_call)
2563 {
2564 insn_info_t i_ptr = active_local_stores;
2565 insn_info_t last = NULL;
2566
2567 if (dump_file && (dump_flags & TDF_DETAILS))
2568 fprintf (stream: dump_file, format: "%s call %d\n",
2569 const_call ? "const" : "memset", INSN_UID (insn));
2570
2571 /* See the head comment of the frame_read field. */
2572 if (reload_completed
2573 /* Tail calls are storing their arguments using
2574 arg pointer. If it is a frame pointer on the target,
2575 even before reload we need to kill frame pointer based
2576 stores. */
2577 || (SIBLING_CALL_P (insn)
2578 && HARD_FRAME_POINTER_IS_ARG_POINTER))
2579 insn_info->frame_read = true;
2580
2581 /* Loop over the active stores and remove those which are
2582 killed by the const function call. */
2583 while (i_ptr)
2584 {
2585 bool remove_store = false;
2586
2587 /* The stack pointer based stores are always killed. */
2588 if (i_ptr->stack_pointer_based)
2589 remove_store = true;
2590
2591 /* If the frame is read, the frame related stores are killed. */
2592 else if (insn_info->frame_read)
2593 {
2594 store_info *store_info = i_ptr->store_rec;
2595
2596 /* Skip the clobbers. */
2597 while (!store_info->is_set)
2598 store_info = store_info->next;
2599
2600 if (store_info->group_id >= 0
2601 && rtx_group_vec[store_info->group_id]->frame_related)
2602 remove_store = true;
2603 }
2604
2605 if (remove_store)
2606 {
2607 if (dump_file && (dump_flags & TDF_DETAILS))
2608 dump_insn_info (start: "removing from active", insn_info: i_ptr);
2609
2610 active_local_stores_len--;
2611 if (last)
2612 last->next_local_store = i_ptr->next_local_store;
2613 else
2614 active_local_stores = i_ptr->next_local_store;
2615 }
2616 else
2617 last = i_ptr;
2618
2619 i_ptr = i_ptr->next_local_store;
2620 }
2621
2622 if (memset_call)
2623 {
2624 rtx args[3];
2625 if (get_call_args (call_insn: insn, fn: memset_call, args, nargs: 3)
2626 && CONST_INT_P (args[1])
2627 && CONST_INT_P (args[2])
2628 && INTVAL (args[2]) > 0)
2629 {
2630 rtx mem = gen_rtx_MEM (BLKmode, args[0]);
2631 set_mem_size (mem, INTVAL (args[2]));
2632 body = gen_rtx_SET (mem, args[1]);
2633 mems_found += record_store (body, bb_info);
2634 if (dump_file && (dump_flags & TDF_DETAILS))
2635 fprintf (stream: dump_file, format: "handling memset as BLKmode store\n");
2636 if (mems_found == 1)
2637 {
2638 if (active_local_stores_len++ >= max_active_local_stores)
2639 {
2640 active_local_stores_len = 1;
2641 active_local_stores = NULL;
2642 }
2643 insn_info->fixed_regs_live
2644 = copy_fixed_regs (in: bb_info->regs_live);
2645 insn_info->next_local_store = active_local_stores;
2646 active_local_stores = insn_info;
2647 }
2648 }
2649 else
2650 clear_rhs_from_active_local_stores ();
2651 }
2652 }
2653 else if (SIBLING_CALL_P (insn)
2654 && (reload_completed || HARD_FRAME_POINTER_IS_ARG_POINTER))
2655 /* Arguments for a sibling call that are pushed to memory are passed
2656 using the incoming argument pointer of the current function. After
2657 reload that might be (and likely is) frame pointer based. And, if
2658 it is a frame pointer on the target, even before reload we need to
2659 kill frame pointer based stores. */
2660 add_wild_read (bb_info);
2661 else
2662 /* Every other call, including pure functions, may read any memory
2663 that is not relative to the frame. */
2664 add_non_frame_wild_read (bb_info);
2665
2666 for (rtx link = CALL_INSN_FUNCTION_USAGE (insn);
2667 link != NULL_RTX;
2668 link = XEXP (link, 1))
2669 if (GET_CODE (XEXP (link, 0)) == USE && MEM_P (XEXP (XEXP (link, 0),0)))
2670 check_mem_read_rtx (loc: &XEXP (XEXP (link, 0),0), bb_info, used_in_call: true);
2671
2672 return;
2673 }
2674
2675 /* Assuming that there are sets in these insns, we cannot delete
2676 them. */
2677 if ((GET_CODE (PATTERN (insn)) == CLOBBER)
2678 || volatile_refs_p (PATTERN (insn))
2679 || (!cfun->can_delete_dead_exceptions && !insn_nothrow_p (insn))
2680 || (RTX_FRAME_RELATED_P (insn))
2681 || find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX))
2682 insn_info->cannot_delete = true;
2683
2684 body = PATTERN (insn);
2685 if (GET_CODE (body) == PARALLEL)
2686 {
2687 int i;
2688 for (i = 0; i < XVECLEN (body, 0); i++)
2689 mems_found += record_store (XVECEXP (body, 0, i), bb_info);
2690 }
2691 else
2692 mems_found += record_store (body, bb_info);
2693
2694 if (dump_file && (dump_flags & TDF_DETAILS))
2695 fprintf (stream: dump_file, format: "mems_found = %d, cannot_delete = %s\n",
2696 mems_found, insn_info->cannot_delete ? "true" : "false");
2697
2698 /* If we found some sets of mems, add it into the active_local_stores so
2699 that it can be locally deleted if found dead or used for
2700 replace_read and redundant constant store elimination. Otherwise mark
2701 it as cannot delete. This simplifies the processing later. */
2702 if (mems_found == 1)
2703 {
2704 if (active_local_stores_len++ >= max_active_local_stores)
2705 {
2706 active_local_stores_len = 1;
2707 active_local_stores = NULL;
2708 }
2709 insn_info->fixed_regs_live = copy_fixed_regs (in: bb_info->regs_live);
2710 insn_info->next_local_store = active_local_stores;
2711 active_local_stores = insn_info;
2712 }
2713 else
2714 insn_info->cannot_delete = true;
2715}
2716
2717
2718/* Remove BASE from the set of active_local_stores. This is a
2719 callback from cselib that is used to get rid of the stores in
2720 active_local_stores. */
2721
2722static void
2723remove_useless_values (cselib_val *base)
2724{
2725 insn_info_t insn_info = active_local_stores;
2726 insn_info_t last = NULL;
2727
2728 while (insn_info)
2729 {
2730 store_info *store_info = insn_info->store_rec;
2731 bool del = false;
2732
2733 /* If ANY of the store_infos match the cselib group that is
2734 being deleted, then the insn cannot be deleted. */
2735 while (store_info)
2736 {
2737 if ((store_info->group_id == -1)
2738 && (store_info->cse_base == base))
2739 {
2740 del = true;
2741 break;
2742 }
2743 store_info = store_info->next;
2744 }
2745
2746 if (del)
2747 {
2748 active_local_stores_len--;
2749 if (last)
2750 last->next_local_store = insn_info->next_local_store;
2751 else
2752 active_local_stores = insn_info->next_local_store;
2753 free_store_info (insn_info);
2754 }
2755 else
2756 last = insn_info;
2757
2758 insn_info = insn_info->next_local_store;
2759 }
2760}
2761
2762
2763/* Do all of step 1. */
2764
2765static void
2766dse_step1 (void)
2767{
2768 basic_block bb;
2769 bitmap regs_live = BITMAP_ALLOC (obstack: &reg_obstack);
2770
2771 cselib_init (0);
2772 all_blocks = BITMAP_ALLOC (NULL);
2773 bitmap_set_bit (all_blocks, ENTRY_BLOCK);
2774 bitmap_set_bit (all_blocks, EXIT_BLOCK);
2775
2776 /* For -O1 reduce the maximum number of active local stores for RTL DSE
2777 since this can consume huge amounts of memory (PR89115). */
2778 int max_active_local_stores = param_max_dse_active_local_stores;
2779 if (optimize < 2)
2780 max_active_local_stores /= 10;
2781
2782 FOR_ALL_BB_FN (bb, cfun)
2783 {
2784 insn_info_t ptr;
2785 bb_info_t bb_info = dse_bb_info_type_pool.allocate ();
2786
2787 memset (s: bb_info, c: 0, n: sizeof (dse_bb_info_type));
2788 bitmap_set_bit (all_blocks, bb->index);
2789 bb_info->regs_live = regs_live;
2790
2791 bitmap_copy (regs_live, DF_LR_IN (bb));
2792 df_simulate_initialize_forwards (bb, regs_live);
2793
2794 bb_table[bb->index] = bb_info;
2795 cselib_discard_hook = remove_useless_values;
2796
2797 if (bb->index >= NUM_FIXED_BLOCKS)
2798 {
2799 rtx_insn *insn;
2800
2801 active_local_stores = NULL;
2802 active_local_stores_len = 0;
2803 cselib_clear_table ();
2804
2805 /* Scan the insns. */
2806 FOR_BB_INSNS (bb, insn)
2807 {
2808 if (INSN_P (insn))
2809 scan_insn (bb_info, insn, max_active_local_stores);
2810 cselib_process_insn (insn);
2811 if (INSN_P (insn))
2812 df_simulate_one_insn_forwards (bb, insn, regs_live);
2813 }
2814
2815 /* This is something of a hack, because the global algorithm
2816 is supposed to take care of the case where stores go dead
2817 at the end of the function. However, the global
2818 algorithm must take a more conservative view of block
2819 mode reads than the local alg does. So to get the case
2820 where you have a store to the frame followed by a non
2821 overlapping block more read, we look at the active local
2822 stores at the end of the function and delete all of the
2823 frame and spill based ones. */
2824 if (stores_off_frame_dead_at_return
2825 && (EDGE_COUNT (bb->succs) == 0
2826 || (single_succ_p (bb)
2827 && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
2828 && ! crtl->calls_eh_return)))
2829 {
2830 insn_info_t i_ptr = active_local_stores;
2831 while (i_ptr)
2832 {
2833 store_info *store_info = i_ptr->store_rec;
2834
2835 /* Skip the clobbers. */
2836 while (!store_info->is_set)
2837 store_info = store_info->next;
2838 if (store_info->group_id >= 0)
2839 {
2840 group_info *group = rtx_group_vec[store_info->group_id];
2841 if (group->frame_related && !i_ptr->cannot_delete)
2842 delete_dead_store_insn (insn_info: i_ptr);
2843 }
2844
2845 i_ptr = i_ptr->next_local_store;
2846 }
2847 }
2848
2849 /* Get rid of the loads that were discovered in
2850 replace_read. Cselib is finished with this block. */
2851 while (deferred_change_list)
2852 {
2853 deferred_change *next = deferred_change_list->next;
2854
2855 /* There is no reason to validate this change. That was
2856 done earlier. */
2857 *deferred_change_list->loc = deferred_change_list->reg;
2858 deferred_change_pool.remove (object: deferred_change_list);
2859 deferred_change_list = next;
2860 }
2861
2862 /* Get rid of all of the cselib based store_infos in this
2863 block and mark the containing insns as not being
2864 deletable. */
2865 ptr = bb_info->last_insn;
2866 while (ptr)
2867 {
2868 if (ptr->contains_cselib_groups)
2869 {
2870 store_info *s_info = ptr->store_rec;
2871 while (s_info && !s_info->is_set)
2872 s_info = s_info->next;
2873 if (s_info
2874 && s_info->redundant_reason
2875 && s_info->redundant_reason->insn
2876 && !ptr->cannot_delete)
2877 {
2878 if (dump_file && (dump_flags & TDF_DETAILS))
2879 fprintf (stream: dump_file, format: "Locally deleting insn %d "
2880 "because insn %d stores the "
2881 "same value and couldn't be "
2882 "eliminated\n",
2883 INSN_UID (insn: ptr->insn),
2884 INSN_UID (insn: s_info->redundant_reason->insn));
2885 delete_dead_store_insn (insn_info: ptr);
2886 }
2887 free_store_info (insn_info: ptr);
2888 }
2889 else
2890 {
2891 store_info *s_info;
2892
2893 /* Free at least positions_needed bitmaps. */
2894 for (s_info = ptr->store_rec; s_info; s_info = s_info->next)
2895 if (s_info->is_large)
2896 {
2897 BITMAP_FREE (s_info->positions_needed.large.bmap);
2898 s_info->is_large = false;
2899 }
2900 }
2901 ptr = ptr->prev_insn;
2902 }
2903
2904 cse_store_info_pool.release ();
2905 }
2906 bb_info->regs_live = NULL;
2907 }
2908
2909 BITMAP_FREE (regs_live);
2910 cselib_finish ();
2911 rtx_group_table->empty ();
2912}
2913
2914
2915/*----------------------------------------------------------------------------
2916 Second step.
2917
2918 Assign each byte position in the stores that we are going to
2919 analyze globally to a position in the bitmaps. Returns true if
2920 there are any bit positions assigned.
2921----------------------------------------------------------------------------*/
2922
2923static void
2924dse_step2_init (void)
2925{
2926 unsigned int i;
2927 group_info *group;
2928
2929 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
2930 {
2931 /* For all non stack related bases, we only consider a store to
2932 be deletable if there are two or more stores for that
2933 position. This is because it takes one store to make the
2934 other store redundant. However, for the stores that are
2935 stack related, we consider them if there is only one store
2936 for the position. We do this because the stack related
2937 stores can be deleted if their is no read between them and
2938 the end of the function.
2939
2940 To make this work in the current framework, we take the stack
2941 related bases add all of the bits from store1 into store2.
2942 This has the effect of making the eligible even if there is
2943 only one store. */
2944
2945 if (stores_off_frame_dead_at_return && group->frame_related)
2946 {
2947 bitmap_ior_into (group->store2_n, group->store1_n);
2948 bitmap_ior_into (group->store2_p, group->store1_p);
2949 if (dump_file && (dump_flags & TDF_DETAILS))
2950 fprintf (stream: dump_file, format: "group %d is frame related ", i);
2951 }
2952
2953 group->offset_map_size_n++;
2954 group->offset_map_n = XOBNEWVEC (&dse_obstack, int,
2955 group->offset_map_size_n);
2956 group->offset_map_size_p++;
2957 group->offset_map_p = XOBNEWVEC (&dse_obstack, int,
2958 group->offset_map_size_p);
2959 group->process_globally = false;
2960 if (dump_file && (dump_flags & TDF_DETAILS))
2961 {
2962 fprintf (stream: dump_file, format: "group %d(%d+%d): ", i,
2963 (int)bitmap_count_bits (group->store2_n),
2964 (int)bitmap_count_bits (group->store2_p));
2965 bitmap_print (dump_file, group->store2_n, "n ", " ");
2966 bitmap_print (dump_file, group->store2_p, "p ", "\n");
2967 }
2968 }
2969}
2970
2971
2972/* Init the offset tables. */
2973
2974static bool
2975dse_step2 (void)
2976{
2977 unsigned int i;
2978 group_info *group;
2979 /* Position 0 is unused because 0 is used in the maps to mean
2980 unused. */
2981 current_position = 1;
2982 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
2983 {
2984 bitmap_iterator bi;
2985 unsigned int j;
2986
2987 memset (s: group->offset_map_n, c: 0, n: sizeof (int) * group->offset_map_size_n);
2988 memset (s: group->offset_map_p, c: 0, n: sizeof (int) * group->offset_map_size_p);
2989 bitmap_clear (group->group_kill);
2990
2991 EXECUTE_IF_SET_IN_BITMAP (group->store2_n, 0, j, bi)
2992 {
2993 bitmap_set_bit (group->group_kill, current_position);
2994 if (bitmap_bit_p (group->escaped_n, j))
2995 bitmap_set_bit (kill_on_calls, current_position);
2996 group->offset_map_n[j] = current_position++;
2997 group->process_globally = true;
2998 }
2999 EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
3000 {
3001 bitmap_set_bit (group->group_kill, current_position);
3002 if (bitmap_bit_p (group->escaped_p, j))
3003 bitmap_set_bit (kill_on_calls, current_position);
3004 group->offset_map_p[j] = current_position++;
3005 group->process_globally = true;
3006 }
3007 }
3008 return current_position != 1;
3009}
3010
3011
3012
3013/*----------------------------------------------------------------------------
3014 Third step.
3015
3016 Build the bit vectors for the transfer functions.
3017----------------------------------------------------------------------------*/
3018
3019
3020/* Look up the bitmap index for OFFSET in GROUP_INFO. If it is not
3021 there, return 0. */
3022
3023static int
3024get_bitmap_index (group_info *group_info, HOST_WIDE_INT offset)
3025{
3026 if (offset < 0)
3027 {
3028 HOST_WIDE_INT offset_p = -offset;
3029 if (offset_p >= group_info->offset_map_size_n)
3030 return 0;
3031 return group_info->offset_map_n[offset_p];
3032 }
3033 else
3034 {
3035 if (offset >= group_info->offset_map_size_p)
3036 return 0;
3037 return group_info->offset_map_p[offset];
3038 }
3039}
3040
3041
3042/* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
3043 may be NULL. */
3044
3045static void
3046scan_stores (store_info *store_info, bitmap gen, bitmap kill)
3047{
3048 while (store_info)
3049 {
3050 HOST_WIDE_INT i, offset, width;
3051 group_info *group_info
3052 = rtx_group_vec[store_info->group_id];
3053 /* We can (conservatively) ignore stores whose bounds aren't known;
3054 they simply don't generate new global dse opportunities. */
3055 if (group_info->process_globally
3056 && store_info->offset.is_constant (const_value: &offset)
3057 && store_info->width.is_constant (const_value: &width))
3058 {
3059 HOST_WIDE_INT end = offset + width;
3060 for (i = offset; i < end; i++)
3061 {
3062 int index = get_bitmap_index (group_info, offset: i);
3063 if (index != 0)
3064 {
3065 bitmap_set_bit (gen, index);
3066 if (kill)
3067 bitmap_clear_bit (kill, index);
3068 }
3069 }
3070 }
3071 store_info = store_info->next;
3072 }
3073}
3074
3075
3076/* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3077 may be NULL. */
3078
3079static void
3080scan_reads (insn_info_t insn_info, bitmap gen, bitmap kill)
3081{
3082 read_info_t read_info = insn_info->read_rec;
3083 int i;
3084 group_info *group;
3085
3086 /* If this insn reads the frame, kill all the frame related stores. */
3087 if (insn_info->frame_read)
3088 {
3089 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3090 if (group->process_globally && group->frame_related)
3091 {
3092 if (kill)
3093 bitmap_ior_into (kill, group->group_kill);
3094 bitmap_and_compl_into (gen, group->group_kill);
3095 }
3096 }
3097 if (insn_info->non_frame_wild_read)
3098 {
3099 /* Kill all non-frame related stores. Kill all stores of variables that
3100 escape. */
3101 if (kill)
3102 bitmap_ior_into (kill, kill_on_calls);
3103 bitmap_and_compl_into (gen, kill_on_calls);
3104 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3105 if (group->process_globally && !group->frame_related)
3106 {
3107 if (kill)
3108 bitmap_ior_into (kill, group->group_kill);
3109 bitmap_and_compl_into (gen, group->group_kill);
3110 }
3111 }
3112 while (read_info)
3113 {
3114 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3115 {
3116 if (group->process_globally)
3117 {
3118 if (i == read_info->group_id)
3119 {
3120 HOST_WIDE_INT offset, width;
3121 /* Reads with non-constant size kill all DSE opportunities
3122 in the group. */
3123 if (!read_info->offset.is_constant (const_value: &offset)
3124 || !read_info->width.is_constant (const_value: &width)
3125 || !known_size_p (a: width))
3126 {
3127 /* Handle block mode reads. */
3128 if (kill)
3129 bitmap_ior_into (kill, group->group_kill);
3130 bitmap_and_compl_into (gen, group->group_kill);
3131 }
3132 else
3133 {
3134 /* The groups are the same, just process the
3135 offsets. */
3136 HOST_WIDE_INT j;
3137 HOST_WIDE_INT end = offset + width;
3138 for (j = offset; j < end; j++)
3139 {
3140 int index = get_bitmap_index (group_info: group, offset: j);
3141 if (index != 0)
3142 {
3143 if (kill)
3144 bitmap_set_bit (kill, index);
3145 bitmap_clear_bit (gen, index);
3146 }
3147 }
3148 }
3149 }
3150 else
3151 {
3152 /* The groups are different, if the alias sets
3153 conflict, clear the entire group. We only need
3154 to apply this test if the read_info is a cselib
3155 read. Anything with a constant base cannot alias
3156 something else with a different constant
3157 base. */
3158 if ((read_info->group_id < 0)
3159 && canon_true_dependence (group->base_mem,
3160 GET_MODE (group->base_mem),
3161 group->canon_base_addr,
3162 read_info->mem, NULL_RTX))
3163 {
3164 if (kill)
3165 bitmap_ior_into (kill, group->group_kill);
3166 bitmap_and_compl_into (gen, group->group_kill);
3167 }
3168 }
3169 }
3170 }
3171
3172 read_info = read_info->next;
3173 }
3174}
3175
3176
3177/* Return the insn in BB_INFO before the first wild read or if there
3178 are no wild reads in the block, return the last insn. */
3179
3180static insn_info_t
3181find_insn_before_first_wild_read (bb_info_t bb_info)
3182{
3183 insn_info_t insn_info = bb_info->last_insn;
3184 insn_info_t last_wild_read = NULL;
3185
3186 while (insn_info)
3187 {
3188 if (insn_info->wild_read)
3189 {
3190 last_wild_read = insn_info->prev_insn;
3191 /* Block starts with wild read. */
3192 if (!last_wild_read)
3193 return NULL;
3194 }
3195
3196 insn_info = insn_info->prev_insn;
3197 }
3198
3199 if (last_wild_read)
3200 return last_wild_read;
3201 else
3202 return bb_info->last_insn;
3203}
3204
3205
3206/* Scan the insns in BB_INFO starting at PTR and going to the top of
3207 the block in order to build the gen and kill sets for the block.
3208 We start at ptr which may be the last insn in the block or may be
3209 the first insn with a wild read. In the latter case we are able to
3210 skip the rest of the block because it just does not matter:
3211 anything that happens is hidden by the wild read. */
3212
3213static void
3214dse_step3_scan (basic_block bb)
3215{
3216 bb_info_t bb_info = bb_table[bb->index];
3217 insn_info_t insn_info;
3218
3219 insn_info = find_insn_before_first_wild_read (bb_info);
3220
3221 /* In the spill case or in the no_spill case if there is no wild
3222 read in the block, we will need a kill set. */
3223 if (insn_info == bb_info->last_insn)
3224 {
3225 if (bb_info->kill)
3226 bitmap_clear (bb_info->kill);
3227 else
3228 bb_info->kill = BITMAP_ALLOC (obstack: &dse_bitmap_obstack);
3229 }
3230 else
3231 if (bb_info->kill)
3232 BITMAP_FREE (bb_info->kill);
3233
3234 while (insn_info)
3235 {
3236 /* There may have been code deleted by the dce pass run before
3237 this phase. */
3238 if (insn_info->insn && INSN_P (insn_info->insn))
3239 {
3240 scan_stores (store_info: insn_info->store_rec, gen: bb_info->gen, kill: bb_info->kill);
3241 scan_reads (insn_info, gen: bb_info->gen, kill: bb_info->kill);
3242 }
3243
3244 insn_info = insn_info->prev_insn;
3245 }
3246}
3247
3248
3249/* Set the gen set of the exit block, and also any block with no
3250 successors that does not have a wild read. */
3251
3252static void
3253dse_step3_exit_block_scan (bb_info_t bb_info)
3254{
3255 /* The gen set is all 0's for the exit block except for the
3256 frame_pointer_group. */
3257
3258 if (stores_off_frame_dead_at_return)
3259 {
3260 unsigned int i;
3261 group_info *group;
3262
3263 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3264 {
3265 if (group->process_globally && group->frame_related)
3266 bitmap_ior_into (bb_info->gen, group->group_kill);
3267 }
3268 }
3269}
3270
3271
3272/* Find all of the blocks that are not backwards reachable from the
3273 exit block or any block with no successors (BB). These are the
3274 infinite loops or infinite self loops. These blocks will still
3275 have their bits set in UNREACHABLE_BLOCKS. */
3276
3277static void
3278mark_reachable_blocks (sbitmap unreachable_blocks, basic_block bb)
3279{
3280 edge e;
3281 edge_iterator ei;
3282
3283 if (bitmap_bit_p (map: unreachable_blocks, bitno: bb->index))
3284 {
3285 bitmap_clear_bit (map: unreachable_blocks, bitno: bb->index);
3286 FOR_EACH_EDGE (e, ei, bb->preds)
3287 {
3288 mark_reachable_blocks (unreachable_blocks, bb: e->src);
3289 }
3290 }
3291}
3292
3293/* Build the transfer functions for the function. */
3294
3295static void
3296dse_step3 ()
3297{
3298 basic_block bb;
3299 sbitmap_iterator sbi;
3300 bitmap all_ones = NULL;
3301 unsigned int i;
3302
3303 auto_sbitmap unreachable_blocks (last_basic_block_for_fn (cfun));
3304 bitmap_ones (unreachable_blocks);
3305
3306 FOR_ALL_BB_FN (bb, cfun)
3307 {
3308 bb_info_t bb_info = bb_table[bb->index];
3309 if (bb_info->gen)
3310 bitmap_clear (bb_info->gen);
3311 else
3312 bb_info->gen = BITMAP_ALLOC (obstack: &dse_bitmap_obstack);
3313
3314 if (bb->index == ENTRY_BLOCK)
3315 ;
3316 else if (bb->index == EXIT_BLOCK)
3317 dse_step3_exit_block_scan (bb_info);
3318 else
3319 dse_step3_scan (bb);
3320 if (EDGE_COUNT (bb->succs) == 0)
3321 mark_reachable_blocks (unreachable_blocks, bb);
3322
3323 /* If this is the second time dataflow is run, delete the old
3324 sets. */
3325 if (bb_info->in)
3326 BITMAP_FREE (bb_info->in);
3327 if (bb_info->out)
3328 BITMAP_FREE (bb_info->out);
3329 }
3330
3331 /* For any block in an infinite loop, we must initialize the out set
3332 to all ones. This could be expensive, but almost never occurs in
3333 practice. However, it is common in regression tests. */
3334 EXECUTE_IF_SET_IN_BITMAP (unreachable_blocks, 0, i, sbi)
3335 {
3336 if (bitmap_bit_p (all_blocks, i))
3337 {
3338 bb_info_t bb_info = bb_table[i];
3339 if (!all_ones)
3340 {
3341 unsigned int j;
3342 group_info *group;
3343
3344 all_ones = BITMAP_ALLOC (obstack: &dse_bitmap_obstack);
3345 FOR_EACH_VEC_ELT (rtx_group_vec, j, group)
3346 bitmap_ior_into (all_ones, group->group_kill);
3347 }
3348 if (!bb_info->out)
3349 {
3350 bb_info->out = BITMAP_ALLOC (obstack: &dse_bitmap_obstack);
3351 bitmap_copy (bb_info->out, all_ones);
3352 }
3353 }
3354 }
3355
3356 if (all_ones)
3357 BITMAP_FREE (all_ones);
3358}
3359
3360
3361
3362/*----------------------------------------------------------------------------
3363 Fourth step.
3364
3365 Solve the bitvector equations.
3366----------------------------------------------------------------------------*/
3367
3368
3369/* Confluence function for blocks with no successors. Create an out
3370 set from the gen set of the exit block. This block logically has
3371 the exit block as a successor. */
3372
3373
3374
3375static void
3376dse_confluence_0 (basic_block bb)
3377{
3378 bb_info_t bb_info = bb_table[bb->index];
3379
3380 if (bb->index == EXIT_BLOCK)
3381 return;
3382
3383 if (!bb_info->out)
3384 {
3385 bb_info->out = BITMAP_ALLOC (obstack: &dse_bitmap_obstack);
3386 bitmap_copy (bb_info->out, bb_table[EXIT_BLOCK]->gen);
3387 }
3388}
3389
3390/* Propagate the information from the in set of the dest of E to the
3391 out set of the src of E. If the various in or out sets are not
3392 there, that means they are all ones. */
3393
3394static bool
3395dse_confluence_n (edge e)
3396{
3397 bb_info_t src_info = bb_table[e->src->index];
3398 bb_info_t dest_info = bb_table[e->dest->index];
3399
3400 if (dest_info->in)
3401 {
3402 if (src_info->out)
3403 bitmap_and_into (src_info->out, dest_info->in);
3404 else
3405 {
3406 src_info->out = BITMAP_ALLOC (obstack: &dse_bitmap_obstack);
3407 bitmap_copy (src_info->out, dest_info->in);
3408 }
3409 }
3410 return true;
3411}
3412
3413
3414/* Propagate the info from the out to the in set of BB_INDEX's basic
3415 block. There are three cases:
3416
3417 1) The block has no kill set. In this case the kill set is all
3418 ones. It does not matter what the out set of the block is, none of
3419 the info can reach the top. The only thing that reaches the top is
3420 the gen set and we just copy the set.
3421
3422 2) There is a kill set but no out set and bb has successors. In
3423 this case we just return. Eventually an out set will be created and
3424 it is better to wait than to create a set of ones.
3425
3426 3) There is both a kill and out set. We apply the obvious transfer
3427 function.
3428*/
3429
3430static bool
3431dse_transfer_function (int bb_index)
3432{
3433 bb_info_t bb_info = bb_table[bb_index];
3434
3435 if (bb_info->kill)
3436 {
3437 if (bb_info->out)
3438 {
3439 /* Case 3 above. */
3440 if (bb_info->in)
3441 return bitmap_ior_and_compl (DST: bb_info->in, A: bb_info->gen,
3442 B: bb_info->out, C: bb_info->kill);
3443 else
3444 {
3445 bb_info->in = BITMAP_ALLOC (obstack: &dse_bitmap_obstack);
3446 bitmap_ior_and_compl (DST: bb_info->in, A: bb_info->gen,
3447 B: bb_info->out, C: bb_info->kill);
3448 return true;
3449 }
3450 }
3451 else
3452 /* Case 2 above. */
3453 return false;
3454 }
3455 else
3456 {
3457 /* Case 1 above. If there is already an in set, nothing
3458 happens. */
3459 if (bb_info->in)
3460 return false;
3461 else
3462 {
3463 bb_info->in = BITMAP_ALLOC (obstack: &dse_bitmap_obstack);
3464 bitmap_copy (bb_info->in, bb_info->gen);
3465 return true;
3466 }
3467 }
3468}
3469
3470/* Solve the dataflow equations. */
3471
3472static void
3473dse_step4 (void)
3474{
3475 df_simple_dataflow (DF_BACKWARD, NULL, dse_confluence_0,
3476 dse_confluence_n, dse_transfer_function,
3477 all_blocks, df_get_postorder (DF_BACKWARD),
3478 df_get_n_blocks (DF_BACKWARD));
3479 if (dump_file && (dump_flags & TDF_DETAILS))
3480 {
3481 basic_block bb;
3482
3483 fprintf (stream: dump_file, format: "\n\n*** Global dataflow info after analysis.\n");
3484 FOR_ALL_BB_FN (bb, cfun)
3485 {
3486 bb_info_t bb_info = bb_table[bb->index];
3487
3488 df_print_bb_index (bb, file: dump_file);
3489 if (bb_info->in)
3490 bitmap_print (dump_file, bb_info->in, " in: ", "\n");
3491 else
3492 fprintf (stream: dump_file, format: " in: *MISSING*\n");
3493 if (bb_info->gen)
3494 bitmap_print (dump_file, bb_info->gen, " gen: ", "\n");
3495 else
3496 fprintf (stream: dump_file, format: " gen: *MISSING*\n");
3497 if (bb_info->kill)
3498 bitmap_print (dump_file, bb_info->kill, " kill: ", "\n");
3499 else
3500 fprintf (stream: dump_file, format: " kill: *MISSING*\n");
3501 if (bb_info->out)
3502 bitmap_print (dump_file, bb_info->out, " out: ", "\n");
3503 else
3504 fprintf (stream: dump_file, format: " out: *MISSING*\n\n");
3505 }
3506 }
3507}
3508
3509
3510
3511/*----------------------------------------------------------------------------
3512 Fifth step.
3513
3514 Delete the stores that can only be deleted using the global information.
3515----------------------------------------------------------------------------*/
3516
3517
3518static void
3519dse_step5 (void)
3520{
3521 basic_block bb;
3522 FOR_EACH_BB_FN (bb, cfun)
3523 {
3524 bb_info_t bb_info = bb_table[bb->index];
3525 insn_info_t insn_info = bb_info->last_insn;
3526 bitmap v = bb_info->out;
3527
3528 while (insn_info)
3529 {
3530 bool deleted = false;
3531 if (dump_file && insn_info->insn)
3532 {
3533 fprintf (stream: dump_file, format: "starting to process insn %d\n",
3534 INSN_UID (insn: insn_info->insn));
3535 bitmap_print (dump_file, v, " v: ", "\n");
3536 }
3537
3538 /* There may have been code deleted by the dce pass run before
3539 this phase. */
3540 if (insn_info->insn
3541 && INSN_P (insn_info->insn)
3542 && (!insn_info->cannot_delete)
3543 && (!bitmap_empty_p (map: v)))
3544 {
3545 store_info *store_info = insn_info->store_rec;
3546
3547 /* Try to delete the current insn. */
3548 deleted = true;
3549
3550 /* Skip the clobbers. */
3551 while (!store_info->is_set)
3552 store_info = store_info->next;
3553
3554 HOST_WIDE_INT i, offset, width;
3555 group_info *group_info = rtx_group_vec[store_info->group_id];
3556
3557 if (!store_info->offset.is_constant (const_value: &offset)
3558 || !store_info->width.is_constant (const_value: &width))
3559 deleted = false;
3560 else
3561 {
3562 HOST_WIDE_INT end = offset + width;
3563 for (i = offset; i < end; i++)
3564 {
3565 int index = get_bitmap_index (group_info, offset: i);
3566
3567 if (dump_file && (dump_flags & TDF_DETAILS))
3568 fprintf (stream: dump_file, format: "i = %d, index = %d\n",
3569 (int) i, index);
3570 if (index == 0 || !bitmap_bit_p (v, index))
3571 {
3572 if (dump_file && (dump_flags & TDF_DETAILS))
3573 fprintf (stream: dump_file, format: "failing at i = %d\n",
3574 (int) i);
3575 deleted = false;
3576 break;
3577 }
3578 }
3579 }
3580 if (deleted)
3581 {
3582 if (dbg_cnt (index: dse)
3583 && check_for_inc_dec_1 (insn_info))
3584 {
3585 delete_insn (insn_info->insn);
3586 insn_info->insn = NULL;
3587 globally_deleted++;
3588 }
3589 }
3590 }
3591 /* We do want to process the local info if the insn was
3592 deleted. For instance, if the insn did a wild read, we
3593 no longer need to trash the info. */
3594 if (insn_info->insn
3595 && INSN_P (insn_info->insn)
3596 && (!deleted))
3597 {
3598 scan_stores (store_info: insn_info->store_rec, gen: v, NULL);
3599 if (insn_info->wild_read)
3600 {
3601 if (dump_file && (dump_flags & TDF_DETAILS))
3602 fprintf (stream: dump_file, format: "wild read\n");
3603 bitmap_clear (v);
3604 }
3605 else if (insn_info->read_rec
3606 || insn_info->non_frame_wild_read
3607 || insn_info->frame_read)
3608 {
3609 if (dump_file && (dump_flags & TDF_DETAILS))
3610 {
3611 if (!insn_info->non_frame_wild_read
3612 && !insn_info->frame_read)
3613 fprintf (stream: dump_file, format: "regular read\n");
3614 if (insn_info->non_frame_wild_read)
3615 fprintf (stream: dump_file, format: "non-frame wild read\n");
3616 if (insn_info->frame_read)
3617 fprintf (stream: dump_file, format: "frame read\n");
3618 }
3619 scan_reads (insn_info, gen: v, NULL);
3620 }
3621 }
3622
3623 insn_info = insn_info->prev_insn;
3624 }
3625 }
3626}
3627
3628
3629
3630/*----------------------------------------------------------------------------
3631 Sixth step.
3632
3633 Delete stores made redundant by earlier stores (which store the same
3634 value) that couldn't be eliminated.
3635----------------------------------------------------------------------------*/
3636
3637static void
3638dse_step6 (void)
3639{
3640 basic_block bb;
3641
3642 FOR_ALL_BB_FN (bb, cfun)
3643 {
3644 bb_info_t bb_info = bb_table[bb->index];
3645 insn_info_t insn_info = bb_info->last_insn;
3646
3647 while (insn_info)
3648 {
3649 /* There may have been code deleted by the dce pass run before
3650 this phase. */
3651 if (insn_info->insn
3652 && INSN_P (insn_info->insn)
3653 && !insn_info->cannot_delete)
3654 {
3655 store_info *s_info = insn_info->store_rec;
3656
3657 while (s_info && !s_info->is_set)
3658 s_info = s_info->next;
3659 if (s_info
3660 && s_info->redundant_reason
3661 && s_info->redundant_reason->insn
3662 && INSN_P (s_info->redundant_reason->insn))
3663 {
3664 rtx_insn *rinsn = s_info->redundant_reason->insn;
3665 if (dump_file && (dump_flags & TDF_DETAILS))
3666 fprintf (stream: dump_file, format: "Locally deleting insn %d "
3667 "because insn %d stores the "
3668 "same value and couldn't be "
3669 "eliminated\n",
3670 INSN_UID (insn: insn_info->insn),
3671 INSN_UID (insn: rinsn));
3672 delete_dead_store_insn (insn_info);
3673 }
3674 }
3675 insn_info = insn_info->prev_insn;
3676 }
3677 }
3678}
3679
3680/*----------------------------------------------------------------------------
3681 Seventh step.
3682
3683 Destroy everything left standing.
3684----------------------------------------------------------------------------*/
3685
3686static void
3687dse_step7 (void)
3688{
3689 bitmap_obstack_release (&dse_bitmap_obstack);
3690 obstack_free (&dse_obstack, NULL);
3691
3692 end_alias_analysis ();
3693 free (ptr: bb_table);
3694 delete rtx_group_table;
3695 rtx_group_table = NULL;
3696 rtx_group_vec.release ();
3697 BITMAP_FREE (all_blocks);
3698 BITMAP_FREE (scratch);
3699
3700 rtx_store_info_pool.release ();
3701 read_info_type_pool.release ();
3702 insn_info_type_pool.release ();
3703 dse_bb_info_type_pool.release ();
3704 group_info_pool.release ();
3705 deferred_change_pool.release ();
3706}
3707
3708
3709/* -------------------------------------------------------------------------
3710 DSE
3711 ------------------------------------------------------------------------- */
3712
3713/* Callback for running pass_rtl_dse. */
3714
3715static unsigned int
3716rest_of_handle_dse (void)
3717{
3718 df_set_flags (DF_DEFER_INSN_RESCAN);
3719
3720 /* Need the notes since we must track live hardregs in the forwards
3721 direction. */
3722 df_note_add_problem ();
3723 df_analyze ();
3724
3725 dse_step0 ();
3726 dse_step1 ();
3727 /* DSE can eliminate potentially-trapping MEMs.
3728 Remove any EH edges associated with them, since otherwise
3729 DF_LR_RUN_DCE will complain later. */
3730 if ((locally_deleted || globally_deleted)
3731 && cfun->can_throw_non_call_exceptions
3732 && purge_all_dead_edges ())
3733 {
3734 free_dominance_info (CDI_DOMINATORS);
3735 delete_unreachable_blocks ();
3736 }
3737 dse_step2_init ();
3738 if (dse_step2 ())
3739 {
3740 df_set_flags (DF_LR_RUN_DCE);
3741 df_analyze ();
3742 if (dump_file && (dump_flags & TDF_DETAILS))
3743 fprintf (stream: dump_file, format: "doing global processing\n");
3744 dse_step3 ();
3745 dse_step4 ();
3746 dse_step5 ();
3747 }
3748
3749 dse_step6 ();
3750 dse_step7 ();
3751
3752 if (dump_file)
3753 fprintf (stream: dump_file, format: "dse: local deletions = %d, global deletions = %d\n",
3754 locally_deleted, globally_deleted);
3755
3756 /* DSE can eliminate potentially-trapping MEMs.
3757 Remove any EH edges associated with them. */
3758 if ((locally_deleted || globally_deleted)
3759 && cfun->can_throw_non_call_exceptions
3760 && purge_all_dead_edges ())
3761 {
3762 free_dominance_info (CDI_DOMINATORS);
3763 cleanup_cfg (0);
3764 }
3765
3766 return 0;
3767}
3768
3769namespace {
3770
3771const pass_data pass_data_rtl_dse1 =
3772{
3773 .type: RTL_PASS, /* type */
3774 .name: "dse1", /* name */
3775 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
3776 .tv_id: TV_DSE1, /* tv_id */
3777 .properties_required: 0, /* properties_required */
3778 .properties_provided: 0, /* properties_provided */
3779 .properties_destroyed: 0, /* properties_destroyed */
3780 .todo_flags_start: 0, /* todo_flags_start */
3781 TODO_df_finish, /* todo_flags_finish */
3782};
3783
3784class pass_rtl_dse1 : public rtl_opt_pass
3785{
3786public:
3787 pass_rtl_dse1 (gcc::context *ctxt)
3788 : rtl_opt_pass (pass_data_rtl_dse1, ctxt)
3789 {}
3790
3791 /* opt_pass methods: */
3792 bool gate (function *) final override
3793 {
3794 return optimize > 0 && flag_dse && dbg_cnt (index: dse1);
3795 }
3796
3797 unsigned int execute (function *) final override
3798 {
3799 return rest_of_handle_dse ();
3800 }
3801
3802}; // class pass_rtl_dse1
3803
3804} // anon namespace
3805
3806rtl_opt_pass *
3807make_pass_rtl_dse1 (gcc::context *ctxt)
3808{
3809 return new pass_rtl_dse1 (ctxt);
3810}
3811
3812namespace {
3813
3814const pass_data pass_data_rtl_dse2 =
3815{
3816 .type: RTL_PASS, /* type */
3817 .name: "dse2", /* name */
3818 .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */
3819 .tv_id: TV_DSE2, /* tv_id */
3820 .properties_required: 0, /* properties_required */
3821 .properties_provided: 0, /* properties_provided */
3822 .properties_destroyed: 0, /* properties_destroyed */
3823 .todo_flags_start: 0, /* todo_flags_start */
3824 TODO_df_finish, /* todo_flags_finish */
3825};
3826
3827class pass_rtl_dse2 : public rtl_opt_pass
3828{
3829public:
3830 pass_rtl_dse2 (gcc::context *ctxt)
3831 : rtl_opt_pass (pass_data_rtl_dse2, ctxt)
3832 {}
3833
3834 /* opt_pass methods: */
3835 bool gate (function *) final override
3836 {
3837 return optimize > 0 && flag_dse && dbg_cnt (index: dse2);
3838 }
3839
3840 unsigned int execute (function *) final override
3841 {
3842 return rest_of_handle_dse ();
3843 }
3844
3845}; // class pass_rtl_dse2
3846
3847} // anon namespace
3848
3849rtl_opt_pass *
3850make_pass_rtl_dse2 (gcc::context *ctxt)
3851{
3852 return new pass_rtl_dse2 (ctxt);
3853}
3854

Provided by KDAB

Privacy Policy
Learn to use CMake with our Intro Training
Find out more

source code of gcc/dse.cc