1/* Common subexpression elimination library for GNU compiler.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "backend.h"
24#include "target.h"
25#include "rtl.h"
26#include "tree.h"
27#include "df.h"
28#include "memmodel.h"
29#include "tm_p.h"
30#include "regs.h"
31#include "emit-rtl.h"
32#include "dumpfile.h"
33#include "cselib.h"
34#include "function-abi.h"
35#include "alias.h"
36
37/* A list of cselib_val structures. */
38struct elt_list
39{
40 struct elt_list *next;
41 cselib_val *elt;
42};
43
44static bool cselib_record_memory;
45static bool cselib_preserve_constants;
46static bool cselib_any_perm_equivs;
47static inline void promote_debug_loc (struct elt_loc_list *l);
48static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
49static void new_elt_loc_list (cselib_val *, rtx);
50static void unchain_one_value (cselib_val *);
51static void unchain_one_elt_list (struct elt_list **);
52static void unchain_one_elt_loc_list (struct elt_loc_list **);
53static void remove_useless_values (void);
54static unsigned int cselib_hash_rtx (rtx, int, machine_mode);
55static cselib_val *new_cselib_val (unsigned int, machine_mode, rtx);
56static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
57static cselib_val *cselib_lookup_mem (rtx, int);
58static void cselib_invalidate_regno (unsigned int, machine_mode);
59static void cselib_invalidate_mem (rtx);
60static void cselib_record_set (rtx, cselib_val *, cselib_val *);
61static void cselib_record_sets (rtx_insn *);
62static rtx autoinc_split (rtx, rtx *, machine_mode);
63
64#define PRESERVED_VALUE_P(RTX) \
65 (RTL_FLAG_CHECK1 ("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
66
67#define SP_BASED_VALUE_P(RTX) \
68 (RTL_FLAG_CHECK1 ("SP_BASED_VALUE_P", (RTX), VALUE)->jump)
69
70#define SP_DERIVED_VALUE_P(RTX) \
71 (RTL_FLAG_CHECK1 ("SP_DERIVED_VALUE_P", (RTX), VALUE)->call)
72
73struct expand_value_data
74{
75 bitmap regs_active;
76 cselib_expand_callback callback;
77 void *callback_arg;
78 bool dummy;
79};
80
81static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
82
83/* This is a global so we don't have to pass this through every function.
84 It is used in new_elt_loc_list to set SETTING_INSN. */
85static rtx_insn *cselib_current_insn;
86
87/* There are three ways in which cselib can look up an rtx:
88 - for a REG, the reg_values table (which is indexed by regno) is used
89 - for a MEM, we recursively look up its address and then follow the
90 addr_list of that value
91 - for everything else, we compute a hash value and go through the hash
92 table. Since different rtx's can still have the same hash value,
93 this involves walking the table entries for a given value and comparing
94 the locations of the entries with the rtx we are looking up. */
95
96struct cselib_hasher : nofree_ptr_hash <cselib_val>
97{
98 struct key {
99 /* The rtx value and its mode (needed separately for constant
100 integers). */
101 machine_mode mode;
102 rtx x;
103 /* The mode of the contaning MEM, if any, otherwise VOIDmode. */
104 machine_mode memmode;
105 };
106 typedef key *compare_type;
107 static inline hashval_t hash (const cselib_val *);
108 static inline bool equal (const cselib_val *, const key *);
109};
110
111/* The hash function for our hash table. The value is always computed with
112 cselib_hash_rtx when adding an element; this function just extracts the
113 hash value from a cselib_val structure. */
114
115inline hashval_t
116cselib_hasher::hash (const cselib_val *v)
117{
118 return v->hash;
119}
120
121/* The equality test for our hash table. The first argument V is a table
122 element (i.e. a cselib_val), while the second arg X is an rtx. We know
123 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
124 CONST of an appropriate mode. */
125
126inline bool
127cselib_hasher::equal (const cselib_val *v, const key *x_arg)
128{
129 struct elt_loc_list *l;
130 rtx x = x_arg->x;
131 machine_mode mode = x_arg->mode;
132 machine_mode memmode = x_arg->memmode;
133
134 if (mode != GET_MODE (v->val_rtx))
135 return false;
136
137 if (GET_CODE (x) == VALUE)
138 return x == v->val_rtx;
139
140 if (SP_DERIVED_VALUE_P (v->val_rtx) && GET_MODE (x) == Pmode)
141 {
142 rtx xoff = NULL;
143 if (autoinc_split (x, &xoff, memmode) == v->val_rtx && xoff == NULL_RTX)
144 return true;
145 }
146
147 /* We don't guarantee that distinct rtx's have different hash values,
148 so we need to do a comparison. */
149 for (l = v->locs; l; l = l->next)
150 if (l->setting_insn && DEBUG_INSN_P (l->setting_insn)
151 && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
152 {
153 rtx_insn *save_cselib_current_insn = cselib_current_insn;
154 /* If l is so far a debug only loc, without debug stmts it
155 would never be compared to x at all, so temporarily pretend
156 current instruction is that DEBUG_INSN so that we don't
157 promote other debug locs even for unsuccessful comparison. */
158 cselib_current_insn = l->setting_insn;
159 bool match = rtx_equal_for_cselib_1 (l->loc, x, memmode, 0);
160 cselib_current_insn = save_cselib_current_insn;
161 if (match)
162 {
163 promote_debug_loc (l);
164 return true;
165 }
166 }
167 else if (rtx_equal_for_cselib_1 (l->loc, x, memmode, 0))
168 return true;
169
170 return false;
171}
172
173/* A table that enables us to look up elts by their value. */
174static hash_table<cselib_hasher> *cselib_hash_table;
175
176/* A table to hold preserved values. */
177static hash_table<cselib_hasher> *cselib_preserved_hash_table;
178
179/* The unique id that the next create value will take. */
180static unsigned int next_uid;
181
182/* The number of registers we had when the varrays were last resized. */
183static unsigned int cselib_nregs;
184
185/* Count values without known locations, or with only locations that
186 wouldn't have been known except for debug insns. Whenever this
187 grows too big, we remove these useless values from the table.
188
189 Counting values with only debug values is a bit tricky. We don't
190 want to increment n_useless_values when we create a value for a
191 debug insn, for this would get n_useless_values out of sync, but we
192 want increment it if all locs in the list that were ever referenced
193 in nondebug insns are removed from the list.
194
195 In the general case, once we do that, we'd have to stop accepting
196 nondebug expressions in the loc list, to avoid having two values
197 equivalent that, without debug insns, would have been made into
198 separate values. However, because debug insns never introduce
199 equivalences themselves (no assignments), the only means for
200 growing loc lists is through nondebug assignments. If the locs
201 also happen to be referenced in debug insns, it will work just fine.
202
203 A consequence of this is that there's at most one debug-only loc in
204 each loc list. If we keep it in the first entry, testing whether
205 we have a debug-only loc list takes O(1).
206
207 Furthermore, since any additional entry in a loc list containing a
208 debug loc would have to come from an assignment (nondebug) that
209 references both the initial debug loc and the newly-equivalent loc,
210 the initial debug loc would be promoted to a nondebug loc, and the
211 loc list would not contain debug locs any more.
212
213 So the only case we have to be careful with in order to keep
214 n_useless_values in sync between debug and nondebug compilations is
215 to avoid incrementing n_useless_values when removing the single loc
216 from a value that turns out to not appear outside debug values. We
217 increment n_useless_debug_values instead, and leave such values
218 alone until, for other reasons, we garbage-collect useless
219 values. */
220static int n_useless_values;
221static int n_useless_debug_values;
222
223/* Count values whose locs have been taken exclusively from debug
224 insns for the entire life of the value. */
225static int n_debug_values;
226
227/* Number of useless values before we remove them from the hash table. */
228#define MAX_USELESS_VALUES 32
229
230/* This table maps from register number to values. It does not
231 contain pointers to cselib_val structures, but rather elt_lists.
232 The purpose is to be able to refer to the same register in
233 different modes. The first element of the list defines the mode in
234 which the register was set; if the mode is unknown or the value is
235 no longer valid in that mode, ELT will be NULL for the first
236 element. */
237static struct elt_list **reg_values;
238static unsigned int reg_values_size;
239#define REG_VALUES(i) reg_values[i]
240
241/* The largest number of hard regs used by any entry added to the
242 REG_VALUES table. Cleared on each cselib_clear_table() invocation. */
243static unsigned int max_value_regs;
244
245/* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
246 in cselib_clear_table() for fast emptying. */
247static unsigned int *used_regs;
248static unsigned int n_used_regs;
249
250/* We pass this to cselib_invalidate_mem to invalidate all of
251 memory for a non-const call instruction. */
252static GTY(()) rtx callmem;
253
254/* Set by discard_useless_locs if it deleted the last location of any
255 value. */
256static int values_became_useless;
257
258/* Used as stop element of the containing_mem list so we can check
259 presence in the list by checking the next pointer. */
260static cselib_val dummy_val;
261
262/* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
263 that is constant through the whole function and should never be
264 eliminated. */
265static cselib_val *cfa_base_preserved_val;
266static unsigned int cfa_base_preserved_regno = INVALID_REGNUM;
267
268/* Used to list all values that contain memory reference.
269 May or may not contain the useless values - the list is compacted
270 each time memory is invalidated. */
271static cselib_val *first_containing_mem = &dummy_val;
272
273static object_allocator<elt_list> elt_list_pool ("elt_list");
274static object_allocator<elt_loc_list> elt_loc_list_pool ("elt_loc_list");
275static object_allocator<cselib_val> cselib_val_pool ("cselib_val_list");
276
277static pool_allocator value_pool ("value", RTX_CODE_SIZE (VALUE));
278
279/* If nonnull, cselib will call this function before freeing useless
280 VALUEs. A VALUE is deemed useless if its "locs" field is null. */
281void (*cselib_discard_hook) (cselib_val *);
282
283/* If nonnull, cselib will call this function before recording sets or
284 even clobbering outputs of INSN. All the recorded sets will be
285 represented in the array sets[n_sets]. new_val_min can be used to
286 tell whether values present in sets are introduced by this
287 instruction. */
288void (*cselib_record_sets_hook) (rtx_insn *insn, struct cselib_set *sets,
289 int n_sets);
290
291
292
293/* Allocate a struct elt_list and fill in its two elements with the
294 arguments. */
295
296static inline struct elt_list *
297new_elt_list (struct elt_list *next, cselib_val *elt)
298{
299 elt_list *el = elt_list_pool.allocate ();
300 el->next = next;
301 el->elt = elt;
302 return el;
303}
304
305/* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc
306 list. */
307
308static inline void
309new_elt_loc_list (cselib_val *val, rtx loc)
310{
311 struct elt_loc_list *el, *next = val->locs;
312
313 gcc_checking_assert (!next || !next->setting_insn
314 || !DEBUG_INSN_P (next->setting_insn)
315 || cselib_current_insn == next->setting_insn);
316
317 /* If we're creating the first loc in a debug insn context, we've
318 just created a debug value. Count it. */
319 if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
320 n_debug_values++;
321
322 val = canonical_cselib_val (val);
323 next = val->locs;
324
325 if (GET_CODE (loc) == VALUE)
326 {
327 loc = canonical_cselib_val (CSELIB_VAL_PTR (loc))->val_rtx;
328
329 gcc_checking_assert (PRESERVED_VALUE_P (loc)
330 == PRESERVED_VALUE_P (val->val_rtx));
331
332 if (val->val_rtx == loc)
333 return;
334 else if (val->uid > CSELIB_VAL_PTR (loc)->uid)
335 {
336 /* Reverse the insertion. */
337 new_elt_loc_list (CSELIB_VAL_PTR (loc), loc: val->val_rtx);
338 return;
339 }
340
341 gcc_checking_assert (val->uid < CSELIB_VAL_PTR (loc)->uid);
342
343 if (CSELIB_VAL_PTR (loc)->locs)
344 {
345 /* Bring all locs from LOC to VAL. */
346 for (el = CSELIB_VAL_PTR (loc)->locs; el->next; el = el->next)
347 {
348 /* Adjust values that have LOC as canonical so that VAL
349 becomes their canonical. */
350 if (el->loc && GET_CODE (el->loc) == VALUE)
351 {
352 gcc_checking_assert (CSELIB_VAL_PTR (el->loc)->locs->loc
353 == loc);
354 CSELIB_VAL_PTR (el->loc)->locs->loc = val->val_rtx;
355 }
356 }
357 el->next = val->locs;
358 next = val->locs = CSELIB_VAL_PTR (loc)->locs;
359 }
360
361 if (CSELIB_VAL_PTR (loc)->addr_list)
362 {
363 /* Bring in addr_list into canonical node. */
364 struct elt_list *last = CSELIB_VAL_PTR (loc)->addr_list;
365 while (last->next)
366 last = last->next;
367 last->next = val->addr_list;
368 val->addr_list = CSELIB_VAL_PTR (loc)->addr_list;
369 CSELIB_VAL_PTR (loc)->addr_list = NULL;
370 }
371
372 if (CSELIB_VAL_PTR (loc)->next_containing_mem != NULL
373 && val->next_containing_mem == NULL)
374 {
375 /* Add VAL to the containing_mem list after LOC. LOC will
376 be removed when we notice it doesn't contain any
377 MEMs. */
378 val->next_containing_mem = CSELIB_VAL_PTR (loc)->next_containing_mem;
379 CSELIB_VAL_PTR (loc)->next_containing_mem = val;
380 }
381
382 /* Chain LOC back to VAL. */
383 el = elt_loc_list_pool.allocate ();
384 el->loc = val->val_rtx;
385 el->setting_insn = cselib_current_insn;
386 el->next = NULL;
387 CSELIB_VAL_PTR (loc)->locs = el;
388 }
389
390 el = elt_loc_list_pool.allocate ();
391 el->loc = loc;
392 el->setting_insn = cselib_current_insn;
393 el->next = next;
394 val->locs = el;
395}
396
397/* Promote loc L to a nondebug cselib_current_insn if L is marked as
398 originating from a debug insn, maintaining the debug values
399 count. */
400
401static inline void
402promote_debug_loc (struct elt_loc_list *l)
403{
404 if (l && l->setting_insn && DEBUG_INSN_P (l->setting_insn)
405 && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
406 {
407 n_debug_values--;
408 l->setting_insn = cselib_current_insn;
409 if (cselib_preserve_constants && l->next)
410 {
411 gcc_assert (l->next->setting_insn
412 && DEBUG_INSN_P (l->next->setting_insn)
413 && !l->next->next);
414 l->next->setting_insn = cselib_current_insn;
415 }
416 else
417 gcc_assert (!l->next);
418 }
419}
420
421/* The elt_list at *PL is no longer needed. Unchain it and free its
422 storage. */
423
424static inline void
425unchain_one_elt_list (struct elt_list **pl)
426{
427 struct elt_list *l = *pl;
428
429 *pl = l->next;
430 elt_list_pool.remove (object: l);
431}
432
433/* Likewise for elt_loc_lists. */
434
435static void
436unchain_one_elt_loc_list (struct elt_loc_list **pl)
437{
438 struct elt_loc_list *l = *pl;
439
440 *pl = l->next;
441 elt_loc_list_pool.remove (object: l);
442}
443
444/* Likewise for cselib_vals. This also frees the addr_list associated with
445 V. */
446
447static void
448unchain_one_value (cselib_val *v)
449{
450 while (v->addr_list)
451 unchain_one_elt_list (pl: &v->addr_list);
452
453 cselib_val_pool.remove (object: v);
454}
455
456/* Remove all entries from the hash table. Also used during
457 initialization. */
458
459void
460cselib_clear_table (void)
461{
462 cselib_reset_table (1);
463}
464
465/* Return TRUE if V is a constant, a function invariant or a VALUE
466 equivalence; FALSE otherwise. */
467
468static bool
469invariant_or_equiv_p (cselib_val *v)
470{
471 struct elt_loc_list *l;
472
473 if (v == cfa_base_preserved_val)
474 return true;
475
476 /* Keep VALUE equivalences around. */
477 for (l = v->locs; l; l = l->next)
478 if (GET_CODE (l->loc) == VALUE)
479 return true;
480
481 if (v->locs != NULL
482 && v->locs->next == NULL)
483 {
484 if (CONSTANT_P (v->locs->loc)
485 && (GET_CODE (v->locs->loc) != CONST
486 || !references_value_p (v->locs->loc, 0)))
487 return true;
488 /* Although a debug expr may be bound to different expressions,
489 we can preserve it as if it was constant, to get unification
490 and proper merging within var-tracking. */
491 if (GET_CODE (v->locs->loc) == DEBUG_EXPR
492 || GET_CODE (v->locs->loc) == DEBUG_IMPLICIT_PTR
493 || GET_CODE (v->locs->loc) == ENTRY_VALUE
494 || GET_CODE (v->locs->loc) == DEBUG_PARAMETER_REF)
495 return true;
496
497 /* (plus (value V) (const_int C)) is invariant iff V is invariant. */
498 if (GET_CODE (v->locs->loc) == PLUS
499 && CONST_INT_P (XEXP (v->locs->loc, 1))
500 && GET_CODE (XEXP (v->locs->loc, 0)) == VALUE
501 && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v->locs->loc, 0))))
502 return true;
503 }
504
505 return false;
506}
507
508/* Remove from hash table all VALUEs except constants, function
509 invariants and VALUE equivalences. */
510
511int
512preserve_constants_and_equivs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
513{
514 cselib_val *v = *x;
515
516 if (invariant_or_equiv_p (v))
517 {
518 cselib_hasher::key lookup = {
519 GET_MODE (v->val_rtx), .x: v->val_rtx, VOIDmode
520 };
521 cselib_val **slot
522 = cselib_preserved_hash_table->find_slot_with_hash (comparable: &lookup,
523 hash: v->hash, insert: INSERT);
524 gcc_assert (!*slot);
525 *slot = v;
526 }
527
528 cselib_hash_table->clear_slot (slot: x);
529
530 return 1;
531}
532
533/* Remove all entries from the hash table, arranging for the next
534 value to be numbered NUM. */
535
536void
537cselib_reset_table (unsigned int num)
538{
539 unsigned int i;
540
541 max_value_regs = 0;
542
543 if (cfa_base_preserved_val)
544 {
545 unsigned int regno = cfa_base_preserved_regno;
546 unsigned int new_used_regs = 0;
547 for (i = 0; i < n_used_regs; i++)
548 if (used_regs[i] == regno)
549 {
550 new_used_regs = 1;
551 continue;
552 }
553 else
554 REG_VALUES (used_regs[i]) = 0;
555 gcc_assert (new_used_regs == 1);
556 n_used_regs = new_used_regs;
557 used_regs[0] = regno;
558 max_value_regs
559 = hard_regno_nregs (regno,
560 GET_MODE (cfa_base_preserved_val->locs->loc));
561
562 /* If cfa_base is sp + const_int, need to preserve also the
563 SP_DERIVED_VALUE_P value. */
564 for (struct elt_loc_list *l = cfa_base_preserved_val->locs;
565 l; l = l->next)
566 if (GET_CODE (l->loc) == PLUS
567 && GET_CODE (XEXP (l->loc, 0)) == VALUE
568 && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
569 && CONST_INT_P (XEXP (l->loc, 1)))
570 {
571 if (! invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (l->loc, 0))))
572 {
573 rtx val = cfa_base_preserved_val->val_rtx;
574 rtx_insn *save_cselib_current_insn = cselib_current_insn;
575 cselib_current_insn = l->setting_insn;
576 new_elt_loc_list (CSELIB_VAL_PTR (XEXP (l->loc, 0)),
577 loc: plus_constant (Pmode, val,
578 -UINTVAL (XEXP (l->loc, 1))));
579 cselib_current_insn = save_cselib_current_insn;
580 }
581 break;
582 }
583 }
584 else
585 {
586 for (i = 0; i < n_used_regs; i++)
587 REG_VALUES (used_regs[i]) = 0;
588 n_used_regs = 0;
589 }
590
591 if (cselib_preserve_constants)
592 cselib_hash_table->traverse <void *, preserve_constants_and_equivs> (NULL);
593 else
594 {
595 cselib_hash_table->empty ();
596 gcc_checking_assert (!cselib_any_perm_equivs);
597 }
598
599 n_useless_values = 0;
600 n_useless_debug_values = 0;
601 n_debug_values = 0;
602
603 next_uid = num;
604
605 first_containing_mem = &dummy_val;
606}
607
608/* Return the number of the next value that will be generated. */
609
610unsigned int
611cselib_get_next_uid (void)
612{
613 return next_uid;
614}
615
616/* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
617 INSERTing if requested. When X is part of the address of a MEM,
618 MEMMODE should specify the mode of the MEM. */
619
620static cselib_val **
621cselib_find_slot (machine_mode mode, rtx x, hashval_t hash,
622 enum insert_option insert, machine_mode memmode)
623{
624 cselib_val **slot = NULL;
625 cselib_hasher::key lookup = { .mode: mode, .x: x, .memmode: memmode };
626 if (cselib_preserve_constants)
627 slot = cselib_preserved_hash_table->find_slot_with_hash (comparable: &lookup, hash,
628 insert: NO_INSERT);
629 if (!slot)
630 slot = cselib_hash_table->find_slot_with_hash (comparable: &lookup, hash, insert);
631 return slot;
632}
633
634/* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
635 only return true for values which point to a cselib_val whose value
636 element has been set to zero, which implies the cselib_val will be
637 removed. */
638
639bool
640references_value_p (const_rtx x, int only_useless)
641{
642 const enum rtx_code code = GET_CODE (x);
643 const char *fmt = GET_RTX_FORMAT (code);
644 int i, j;
645
646 if (GET_CODE (x) == VALUE
647 && (! only_useless
648 || (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x))))
649 return true;
650
651 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
652 {
653 if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
654 return true;
655 else if (fmt[i] == 'E')
656 for (j = 0; j < XVECLEN (x, i); j++)
657 if (references_value_p (XVECEXP (x, i, j), only_useless))
658 return true;
659 }
660
661 return false;
662}
663
664/* Return true if V is a useless VALUE and can be discarded as such. */
665
666static bool
667cselib_useless_value_p (cselib_val *v)
668{
669 return (v->locs == 0
670 && !PRESERVED_VALUE_P (v->val_rtx)
671 && !SP_DERIVED_VALUE_P (v->val_rtx));
672}
673
674/* For all locations found in X, delete locations that reference useless
675 values (i.e. values without any location). Called through
676 htab_traverse. */
677
678int
679discard_useless_locs (cselib_val **x, void *info ATTRIBUTE_UNUSED)
680{
681 cselib_val *v = *x;
682 struct elt_loc_list **p = &v->locs;
683 bool had_locs = v->locs != NULL;
684 rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
685
686 while (*p)
687 {
688 if (references_value_p (x: (*p)->loc, only_useless: 1))
689 unchain_one_elt_loc_list (pl: p);
690 else
691 p = &(*p)->next;
692 }
693
694 if (had_locs && cselib_useless_value_p (v))
695 {
696 if (setting_insn && DEBUG_INSN_P (setting_insn))
697 n_useless_debug_values++;
698 else
699 n_useless_values++;
700 values_became_useless = 1;
701 }
702 return 1;
703}
704
705/* If X is a value with no locations, remove it from the hashtable. */
706
707int
708discard_useless_values (cselib_val **x, void *info ATTRIBUTE_UNUSED)
709{
710 cselib_val *v = *x;
711
712 if (v->locs == 0 && cselib_useless_value_p (v))
713 {
714 if (cselib_discard_hook)
715 cselib_discard_hook (v);
716
717 CSELIB_VAL_PTR (v->val_rtx) = NULL;
718 cselib_hash_table->clear_slot (slot: x);
719 unchain_one_value (v);
720 n_useless_values--;
721 }
722
723 return 1;
724}
725
726/* Clean out useless values (i.e. those which no longer have locations
727 associated with them) from the hash table. */
728
729static void
730remove_useless_values (void)
731{
732 cselib_val **p, *v;
733
734 /* First pass: eliminate locations that reference the value. That in
735 turn can make more values useless. */
736 do
737 {
738 values_became_useless = 0;
739 cselib_hash_table->traverse <void *, discard_useless_locs> (NULL);
740 }
741 while (values_became_useless);
742
743 /* Second pass: actually remove the values. */
744
745 p = &first_containing_mem;
746 for (v = *p; v != &dummy_val; v = v->next_containing_mem)
747 if (v->locs && v == canonical_cselib_val (val: v))
748 {
749 *p = v;
750 p = &(*p)->next_containing_mem;
751 }
752 *p = &dummy_val;
753
754 n_useless_values += n_useless_debug_values;
755 n_debug_values -= n_useless_debug_values;
756 n_useless_debug_values = 0;
757
758 cselib_hash_table->traverse <void *, discard_useless_values> (NULL);
759
760 gcc_assert (!n_useless_values);
761}
762
763/* Arrange for a value to not be removed from the hash table even if
764 it becomes useless. */
765
766void
767cselib_preserve_value (cselib_val *v)
768{
769 PRESERVED_VALUE_P (v->val_rtx) = 1;
770}
771
772/* Test whether a value is preserved. */
773
774bool
775cselib_preserved_value_p (cselib_val *v)
776{
777 return PRESERVED_VALUE_P (v->val_rtx);
778}
779
780/* Arrange for a REG value to be assumed constant through the whole function,
781 never invalidated and preserved across cselib_reset_table calls. */
782
783void
784cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno)
785{
786 if (cselib_preserve_constants
787 && v->locs
788 && REG_P (v->locs->loc))
789 {
790 cfa_base_preserved_val = v;
791 cfa_base_preserved_regno = regno;
792 }
793}
794
795/* Clean all non-constant expressions in the hash table, but retain
796 their values. */
797
798void
799cselib_preserve_only_values (void)
800{
801 int i;
802
803 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
804 cselib_invalidate_regno (i, reg_raw_mode[i]);
805
806 cselib_invalidate_mem (callmem);
807
808 remove_useless_values ();
809
810 gcc_assert (first_containing_mem == &dummy_val);
811}
812
813/* Arrange for a value to be marked as based on stack pointer
814 for find_base_term purposes. */
815
816void
817cselib_set_value_sp_based (cselib_val *v)
818{
819 SP_BASED_VALUE_P (v->val_rtx) = 1;
820}
821
822/* Test whether a value is based on stack pointer for
823 find_base_term purposes. */
824
825bool
826cselib_sp_based_value_p (cselib_val *v)
827{
828 return SP_BASED_VALUE_P (v->val_rtx);
829}
830
831/* Return the mode in which a register was last set. If X is not a
832 register, return its mode. If the mode in which the register was
833 set is not known, or the value was already clobbered, return
834 VOIDmode. */
835
836machine_mode
837cselib_reg_set_mode (const_rtx x)
838{
839 if (!REG_P (x))
840 return GET_MODE (x);
841
842 if (REG_VALUES (REGNO (x)) == NULL
843 || REG_VALUES (REGNO (x))->elt == NULL)
844 return VOIDmode;
845
846 return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
847}
848
849/* If x is a PLUS or an autoinc operation, expand the operation,
850 storing the offset, if any, in *OFF. */
851
852static rtx
853autoinc_split (rtx x, rtx *off, machine_mode memmode)
854{
855 switch (GET_CODE (x))
856 {
857 case PLUS:
858 *off = XEXP (x, 1);
859 x = XEXP (x, 0);
860 break;
861
862 case PRE_DEC:
863 if (memmode == VOIDmode)
864 return x;
865
866 *off = gen_int_mode (-GET_MODE_SIZE (mode: memmode), GET_MODE (x));
867 x = XEXP (x, 0);
868 break;
869
870 case PRE_INC:
871 if (memmode == VOIDmode)
872 return x;
873
874 *off = gen_int_mode (GET_MODE_SIZE (mode: memmode), GET_MODE (x));
875 x = XEXP (x, 0);
876 break;
877
878 case PRE_MODIFY:
879 x = XEXP (x, 1);
880 break;
881
882 case POST_DEC:
883 case POST_INC:
884 case POST_MODIFY:
885 x = XEXP (x, 0);
886 break;
887
888 default:
889 break;
890 }
891
892 if (GET_MODE (x) == Pmode
893 && (REG_P (x) || MEM_P (x) || GET_CODE (x) == VALUE)
894 && (*off == NULL_RTX || CONST_INT_P (*off)))
895 {
896 cselib_val *e;
897 if (GET_CODE (x) == VALUE)
898 e = CSELIB_VAL_PTR (x);
899 else
900 e = cselib_lookup (x, GET_MODE (x), 0, memmode);
901 if (e)
902 {
903 if (SP_DERIVED_VALUE_P (e->val_rtx)
904 && (*off == NULL_RTX || *off == const0_rtx))
905 {
906 *off = NULL_RTX;
907 return e->val_rtx;
908 }
909 for (struct elt_loc_list *l = e->locs; l; l = l->next)
910 if (GET_CODE (l->loc) == PLUS
911 && GET_CODE (XEXP (l->loc, 0)) == VALUE
912 && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
913 && CONST_INT_P (XEXP (l->loc, 1)))
914 {
915 if (*off == NULL_RTX)
916 *off = XEXP (l->loc, 1);
917 else
918 *off = plus_constant (Pmode, *off,
919 INTVAL (XEXP (l->loc, 1)));
920 if (*off == const0_rtx)
921 *off = NULL_RTX;
922 return XEXP (l->loc, 0);
923 }
924 }
925 }
926 return x;
927}
928
929/* Return true if we can prove that X and Y contain the same value,
930 taking our gathered information into account. MEMMODE holds the
931 mode of the enclosing MEM, if any, as required to deal with autoinc
932 addressing modes. If X and Y are not (known to be) part of
933 addresses, MEMMODE should be VOIDmode. */
934
935bool
936rtx_equal_for_cselib_1 (rtx x, rtx y, machine_mode memmode, int depth)
937{
938 enum rtx_code code;
939 const char *fmt;
940 int i;
941
942 if (REG_P (x) || MEM_P (x))
943 {
944 cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode);
945
946 if (e)
947 x = e->val_rtx;
948 }
949
950 if (REG_P (y) || MEM_P (y))
951 {
952 cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode);
953
954 if (e)
955 y = e->val_rtx;
956 }
957
958 if (x == y)
959 return true;
960
961 if (GET_CODE (x) == VALUE)
962 {
963 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (x));
964 struct elt_loc_list *l;
965
966 if (GET_CODE (y) == VALUE)
967 return e == canonical_cselib_val (CSELIB_VAL_PTR (y));
968
969 if ((SP_DERIVED_VALUE_P (x)
970 || SP_DERIVED_VALUE_P (e->val_rtx))
971 && GET_MODE (y) == Pmode)
972 {
973 rtx yoff = NULL;
974 rtx yr = autoinc_split (x: y, off: &yoff, memmode);
975 if ((yr == x || yr == e->val_rtx) && yoff == NULL_RTX)
976 return true;
977 }
978
979 if (depth == 128)
980 return false;
981
982 for (l = e->locs; l; l = l->next)
983 {
984 rtx t = l->loc;
985
986 /* Avoid infinite recursion. We know we have the canonical
987 value, so we can just skip any values in the equivalence
988 list. */
989 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
990 continue;
991 else if (rtx_equal_for_cselib_1 (x: t, y, memmode, depth: depth + 1))
992 return true;
993 }
994
995 return false;
996 }
997 else if (GET_CODE (y) == VALUE)
998 {
999 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y));
1000 struct elt_loc_list *l;
1001
1002 if ((SP_DERIVED_VALUE_P (y)
1003 || SP_DERIVED_VALUE_P (e->val_rtx))
1004 && GET_MODE (x) == Pmode)
1005 {
1006 rtx xoff = NULL;
1007 rtx xr = autoinc_split (x, off: &xoff, memmode);
1008 if ((xr == y || xr == e->val_rtx) && xoff == NULL_RTX)
1009 return true;
1010 }
1011
1012 if (depth == 128)
1013 return false;
1014
1015 for (l = e->locs; l; l = l->next)
1016 {
1017 rtx t = l->loc;
1018
1019 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
1020 continue;
1021 else if (rtx_equal_for_cselib_1 (x, y: t, memmode, depth: depth + 1))
1022 return true;
1023 }
1024
1025 return false;
1026 }
1027
1028 if (GET_MODE (x) != GET_MODE (y))
1029 return false;
1030
1031 if (GET_CODE (x) != GET_CODE (y)
1032 || (GET_CODE (x) == PLUS
1033 && GET_MODE (x) == Pmode
1034 && CONST_INT_P (XEXP (x, 1))
1035 && CONST_INT_P (XEXP (y, 1))))
1036 {
1037 rtx xorig = x, yorig = y;
1038 rtx xoff = NULL, yoff = NULL;
1039
1040 x = autoinc_split (x, off: &xoff, memmode);
1041 y = autoinc_split (x: y, off: &yoff, memmode);
1042
1043 /* Don't recurse if nothing changed. */
1044 if (x != xorig || y != yorig)
1045 {
1046 if (!xoff != !yoff)
1047 return false;
1048
1049 if (xoff && !rtx_equal_for_cselib_1 (x: xoff, y: yoff, memmode, depth))
1050 return false;
1051
1052 return rtx_equal_for_cselib_1 (x, y, memmode, depth);
1053 }
1054
1055 if (GET_CODE (xorig) != GET_CODE (yorig))
1056 return false;
1057 }
1058
1059 /* These won't be handled correctly by the code below. */
1060 switch (GET_CODE (x))
1061 {
1062 CASE_CONST_UNIQUE:
1063 case DEBUG_EXPR:
1064 return false;
1065
1066 case CONST_VECTOR:
1067 if (!same_vector_encodings_p (x, y))
1068 return false;
1069 break;
1070
1071 case DEBUG_IMPLICIT_PTR:
1072 return DEBUG_IMPLICIT_PTR_DECL (x)
1073 == DEBUG_IMPLICIT_PTR_DECL (y);
1074
1075 case DEBUG_PARAMETER_REF:
1076 return DEBUG_PARAMETER_REF_DECL (x)
1077 == DEBUG_PARAMETER_REF_DECL (y);
1078
1079 case ENTRY_VALUE:
1080 /* ENTRY_VALUEs are function invariant, it is thus undesirable to
1081 use rtx_equal_for_cselib_1 to compare the operands. */
1082 return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
1083
1084 case LABEL_REF:
1085 return label_ref_label (ref: x) == label_ref_label (ref: y);
1086
1087 case REG:
1088 return REGNO (x) == REGNO (y);
1089
1090 case MEM:
1091 /* We have to compare any autoinc operations in the addresses
1092 using this MEM's mode. */
1093 return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x),
1094 depth);
1095
1096 default:
1097 break;
1098 }
1099
1100 code = GET_CODE (x);
1101 fmt = GET_RTX_FORMAT (code);
1102
1103 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1104 {
1105 int j;
1106
1107 switch (fmt[i])
1108 {
1109 case 'w':
1110 if (XWINT (x, i) != XWINT (y, i))
1111 return false;
1112 break;
1113
1114 case 'n':
1115 case 'i':
1116 if (XINT (x, i) != XINT (y, i))
1117 return false;
1118 break;
1119
1120 case 'p':
1121 if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
1122 return false;
1123 break;
1124
1125 case 'V':
1126 case 'E':
1127 /* Two vectors must have the same length. */
1128 if (XVECLEN (x, i) != XVECLEN (y, i))
1129 return false;
1130
1131 /* And the corresponding elements must match. */
1132 for (j = 0; j < XVECLEN (x, i); j++)
1133 if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
1134 XVECEXP (y, i, j), memmode, depth))
1135 return false;
1136 break;
1137
1138 case 'e':
1139 if (i == 1
1140 && targetm.commutative_p (x, UNKNOWN)
1141 && rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode,
1142 depth)
1143 && rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode,
1144 depth))
1145 return true;
1146 if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode,
1147 depth))
1148 return false;
1149 break;
1150
1151 case 'S':
1152 case 's':
1153 if (strcmp (XSTR (x, i), XSTR (y, i)))
1154 return false;
1155 break;
1156
1157 case 'u':
1158 /* These are just backpointers, so they don't matter. */
1159 break;
1160
1161 case '0':
1162 case 't':
1163 break;
1164
1165 /* It is believed that rtx's at this level will never
1166 contain anything but integers and other rtx's,
1167 except for within LABEL_REFs and SYMBOL_REFs. */
1168 default:
1169 gcc_unreachable ();
1170 }
1171 }
1172 return true;
1173}
1174
1175/* Wrapper for rtx_equal_for_cselib_p to determine whether a SET is
1176 truly redundant, taking into account aliasing information. */
1177bool
1178cselib_redundant_set_p (rtx set)
1179{
1180 gcc_assert (GET_CODE (set) == SET);
1181 rtx dest = SET_DEST (set);
1182 if (cselib_reg_set_mode (x: dest) != GET_MODE (dest))
1183 return false;
1184
1185 if (!rtx_equal_for_cselib_p (x: dest, SET_SRC (set)))
1186 return false;
1187
1188 while (GET_CODE (dest) == SUBREG
1189 || GET_CODE (dest) == ZERO_EXTRACT
1190 || GET_CODE (dest) == STRICT_LOW_PART)
1191 dest = XEXP (dest, 0);
1192
1193 if (!flag_strict_aliasing || !MEM_P (dest))
1194 return true;
1195
1196 /* For a store we need to check that suppressing it will not change
1197 the effective alias set. */
1198 rtx dest_addr = XEXP (dest, 0);
1199
1200 /* Lookup the equivalents to the original dest (rather than just the
1201 MEM). */
1202 cselib_val *src_val = cselib_lookup (SET_DEST (set),
1203 GET_MODE (SET_DEST (set)),
1204 0, VOIDmode);
1205
1206 if (src_val)
1207 {
1208 /* Walk the list of source equivalents to find the MEM accessing
1209 the same location. */
1210 for (elt_loc_list *l = src_val->locs; l; l = l->next)
1211 {
1212 rtx src_equiv = l->loc;
1213 while (GET_CODE (src_equiv) == SUBREG
1214 || GET_CODE (src_equiv) == ZERO_EXTRACT
1215 || GET_CODE (src_equiv) == STRICT_LOW_PART)
1216 src_equiv = XEXP (src_equiv, 0);
1217
1218 if (MEM_P (src_equiv))
1219 {
1220 /* Match the MEMs by comparing the addresses. We can
1221 only remove the later store if the earlier aliases at
1222 least all the accesses of the later one. */
1223 if (rtx_equal_for_cselib_1 (x: dest_addr, XEXP (src_equiv, 0),
1224 GET_MODE (dest), depth: 0))
1225 return mems_same_for_tbaa_p (src_equiv, dest);
1226 }
1227 }
1228 }
1229
1230 /* We failed to find a recorded value in the cselib history, so try
1231 the source of this set; this catches cases such as *p = *q when p
1232 and q have the same value. */
1233 rtx src = SET_SRC (set);
1234 while (GET_CODE (src) == SUBREG)
1235 src = XEXP (src, 0);
1236
1237 if (MEM_P (src)
1238 && rtx_equal_for_cselib_1 (x: dest_addr, XEXP (src, 0), GET_MODE (dest), depth: 0))
1239 return mems_same_for_tbaa_p (src, dest);
1240
1241 return false;
1242}
1243
1244/* Helper function for cselib_hash_rtx. Arguments like for cselib_hash_rtx,
1245 except that it hashes (plus:P x c). */
1246
1247static unsigned int
1248cselib_hash_plus_const_int (rtx x, HOST_WIDE_INT c, int create,
1249 machine_mode memmode)
1250{
1251 cselib_val *e = cselib_lookup (x, GET_MODE (x), create, memmode);
1252 if (! e)
1253 return 0;
1254
1255 if (! SP_DERIVED_VALUE_P (e->val_rtx))
1256 for (struct elt_loc_list *l = e->locs; l; l = l->next)
1257 if (GET_CODE (l->loc) == PLUS
1258 && GET_CODE (XEXP (l->loc, 0)) == VALUE
1259 && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
1260 && CONST_INT_P (XEXP (l->loc, 1)))
1261 {
1262 e = CSELIB_VAL_PTR (XEXP (l->loc, 0));
1263 c = trunc_int_for_mode (c + UINTVAL (XEXP (l->loc, 1)), Pmode);
1264 break;
1265 }
1266 if (c == 0)
1267 return e->hash;
1268
1269 unsigned hash = (unsigned) PLUS + (unsigned) GET_MODE (x);
1270 hash += e->hash;
1271 unsigned int tem_hash = (unsigned) CONST_INT + (unsigned) VOIDmode;
1272 tem_hash += ((unsigned) CONST_INT << 7) + (unsigned HOST_WIDE_INT) c;
1273 if (tem_hash == 0)
1274 tem_hash = (unsigned int) CONST_INT;
1275 hash += tem_hash;
1276 return hash ? hash : 1 + (unsigned int) PLUS;
1277}
1278
1279/* Hash an rtx. Return 0 if we couldn't hash the rtx.
1280 For registers and memory locations, we look up their cselib_val structure
1281 and return its VALUE element.
1282 Possible reasons for return 0 are: the object is volatile, or we couldn't
1283 find a register or memory location in the table and CREATE is zero. If
1284 CREATE is nonzero, table elts are created for regs and mem.
1285 N.B. this hash function returns the same hash value for RTXes that
1286 differ only in the order of operands, thus it is suitable for comparisons
1287 that take commutativity into account.
1288 If we wanted to also support associative rules, we'd have to use a different
1289 strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
1290 MEMMODE indicates the mode of an enclosing MEM, and it's only
1291 used to compute autoinc values.
1292 We used to have a MODE argument for hashing for CONST_INTs, but that
1293 didn't make sense, since it caused spurious hash differences between
1294 (set (reg:SI 1) (const_int))
1295 (plus:SI (reg:SI 2) (reg:SI 1))
1296 and
1297 (plus:SI (reg:SI 2) (const_int))
1298 If the mode is important in any context, it must be checked specifically
1299 in a comparison anyway, since relying on hash differences is unsafe. */
1300
1301static unsigned int
1302cselib_hash_rtx (rtx x, int create, machine_mode memmode)
1303{
1304 cselib_val *e;
1305 poly_int64 offset;
1306 int i, j;
1307 enum rtx_code code;
1308 const char *fmt;
1309 unsigned int hash = 0;
1310
1311 code = GET_CODE (x);
1312 hash += (unsigned) code + (unsigned) GET_MODE (x);
1313
1314 switch (code)
1315 {
1316 case VALUE:
1317 e = CSELIB_VAL_PTR (x);
1318 return e->hash;
1319
1320 case MEM:
1321 case REG:
1322 e = cselib_lookup (x, GET_MODE (x), create, memmode);
1323 if (! e)
1324 return 0;
1325
1326 return e->hash;
1327
1328 case DEBUG_EXPR:
1329 hash += ((unsigned) DEBUG_EXPR << 7)
1330 + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x));
1331 return hash ? hash : (unsigned int) DEBUG_EXPR;
1332
1333 case DEBUG_IMPLICIT_PTR:
1334 hash += ((unsigned) DEBUG_IMPLICIT_PTR << 7)
1335 + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x));
1336 return hash ? hash : (unsigned int) DEBUG_IMPLICIT_PTR;
1337
1338 case DEBUG_PARAMETER_REF:
1339 hash += ((unsigned) DEBUG_PARAMETER_REF << 7)
1340 + DECL_UID (DEBUG_PARAMETER_REF_DECL (x));
1341 return hash ? hash : (unsigned int) DEBUG_PARAMETER_REF;
1342
1343 case ENTRY_VALUE:
1344 /* ENTRY_VALUEs are function invariant, thus try to avoid
1345 recursing on argument if ENTRY_VALUE is one of the
1346 forms emitted by expand_debug_expr, otherwise
1347 ENTRY_VALUE hash would depend on the current value
1348 in some register or memory. */
1349 if (REG_P (ENTRY_VALUE_EXP (x)))
1350 hash += (unsigned int) REG
1351 + (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x))
1352 + (unsigned int) REGNO (ENTRY_VALUE_EXP (x));
1353 else if (MEM_P (ENTRY_VALUE_EXP (x))
1354 && REG_P (XEXP (ENTRY_VALUE_EXP (x), 0)))
1355 hash += (unsigned int) MEM
1356 + (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x), 0))
1357 + (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x), 0));
1358 else
1359 hash += cselib_hash_rtx (ENTRY_VALUE_EXP (x), create, memmode);
1360 return hash ? hash : (unsigned int) ENTRY_VALUE;
1361
1362 case CONST_INT:
1363 hash += ((unsigned) CONST_INT << 7) + UINTVAL (x);
1364 return hash ? hash : (unsigned int) CONST_INT;
1365
1366 case CONST_WIDE_INT:
1367 for (i = 0; i < CONST_WIDE_INT_NUNITS (x); i++)
1368 hash += CONST_WIDE_INT_ELT (x, i);
1369 return hash;
1370
1371 case CONST_POLY_INT:
1372 {
1373 inchash::hash h;
1374 h.add_int (v: hash);
1375 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
1376 h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]);
1377 return h.end ();
1378 }
1379
1380 case CONST_DOUBLE:
1381 /* This is like the general case, except that it only counts
1382 the integers representing the constant. */
1383 hash += (unsigned) code + (unsigned) GET_MODE (x);
1384 if (TARGET_SUPPORTS_WIDE_INT == 0 && GET_MODE (x) == VOIDmode)
1385 hash += ((unsigned) CONST_DOUBLE_LOW (x)
1386 + (unsigned) CONST_DOUBLE_HIGH (x));
1387 else
1388 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
1389 return hash ? hash : (unsigned int) CONST_DOUBLE;
1390
1391 case CONST_FIXED:
1392 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1393 hash += fixed_hash (CONST_FIXED_VALUE (x));
1394 return hash ? hash : (unsigned int) CONST_FIXED;
1395
1396 case CONST_VECTOR:
1397 {
1398 int units;
1399 rtx elt;
1400
1401 units = const_vector_encoded_nelts (x);
1402
1403 for (i = 0; i < units; ++i)
1404 {
1405 elt = CONST_VECTOR_ENCODED_ELT (x, i);
1406 hash += cselib_hash_rtx (x: elt, create: 0, memmode);
1407 }
1408
1409 return hash;
1410 }
1411
1412 /* Assume there is only one rtx object for any given label. */
1413 case LABEL_REF:
1414 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1415 differences and differences between each stage's debugging dumps. */
1416 hash += (((unsigned int) LABEL_REF << 7)
1417 + CODE_LABEL_NUMBER (label_ref_label (x)));
1418 return hash ? hash : (unsigned int) LABEL_REF;
1419
1420 case SYMBOL_REF:
1421 {
1422 /* Don't hash on the symbol's address to avoid bootstrap differences.
1423 Different hash values may cause expressions to be recorded in
1424 different orders and thus different registers to be used in the
1425 final assembler. This also avoids differences in the dump files
1426 between various stages. */
1427 unsigned int h = 0;
1428 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1429
1430 while (*p)
1431 h += (h << 7) + *p++; /* ??? revisit */
1432
1433 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1434 return hash ? hash : (unsigned int) SYMBOL_REF;
1435 }
1436
1437 case PRE_DEC:
1438 case PRE_INC:
1439 /* We can't compute these without knowing the MEM mode. */
1440 gcc_assert (memmode != VOIDmode);
1441 offset = GET_MODE_SIZE (mode: memmode);
1442 if (code == PRE_DEC)
1443 offset = -offset;
1444 /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
1445 like (mem:MEMMODE (plus (reg) (const_int I))). */
1446 if (GET_MODE (x) == Pmode
1447 && (REG_P (XEXP (x, 0))
1448 || MEM_P (XEXP (x, 0))
1449 || GET_CODE (XEXP (x, 0)) == VALUE))
1450 {
1451 HOST_WIDE_INT c;
1452 if (offset.is_constant (const_value: &c))
1453 return cselib_hash_plus_const_int (XEXP (x, 0),
1454 c: trunc_int_for_mode (c, Pmode),
1455 create, memmode);
1456 }
1457 hash = ((unsigned) PLUS + (unsigned) GET_MODE (x)
1458 + cselib_hash_rtx (XEXP (x, 0), create, memmode)
1459 + cselib_hash_rtx (x: gen_int_mode (offset, GET_MODE (x)),
1460 create, memmode));
1461 return hash ? hash : 1 + (unsigned) PLUS;
1462
1463 case PRE_MODIFY:
1464 gcc_assert (memmode != VOIDmode);
1465 return cselib_hash_rtx (XEXP (x, 1), create, memmode);
1466
1467 case POST_DEC:
1468 case POST_INC:
1469 case POST_MODIFY:
1470 gcc_assert (memmode != VOIDmode);
1471 return cselib_hash_rtx (XEXP (x, 0), create, memmode);
1472
1473 case PC:
1474 case CALL:
1475 case UNSPEC_VOLATILE:
1476 return 0;
1477
1478 case ASM_OPERANDS:
1479 if (MEM_VOLATILE_P (x))
1480 return 0;
1481
1482 break;
1483
1484 case PLUS:
1485 if (GET_MODE (x) == Pmode
1486 && (REG_P (XEXP (x, 0))
1487 || MEM_P (XEXP (x, 0))
1488 || GET_CODE (XEXP (x, 0)) == VALUE)
1489 && CONST_INT_P (XEXP (x, 1)))
1490 return cselib_hash_plus_const_int (XEXP (x, 0), INTVAL (XEXP (x, 1)),
1491 create, memmode);
1492 break;
1493
1494 default:
1495 break;
1496 }
1497
1498 i = GET_RTX_LENGTH (code) - 1;
1499 fmt = GET_RTX_FORMAT (code);
1500 for (; i >= 0; i--)
1501 {
1502 switch (fmt[i])
1503 {
1504 case 'e':
1505 {
1506 rtx tem = XEXP (x, i);
1507 unsigned int tem_hash = cselib_hash_rtx (x: tem, create, memmode);
1508
1509 if (tem_hash == 0)
1510 return 0;
1511
1512 hash += tem_hash;
1513 }
1514 break;
1515 case 'E':
1516 for (j = 0; j < XVECLEN (x, i); j++)
1517 {
1518 unsigned int tem_hash
1519 = cselib_hash_rtx (XVECEXP (x, i, j), create, memmode);
1520
1521 if (tem_hash == 0)
1522 return 0;
1523
1524 hash += tem_hash;
1525 }
1526 break;
1527
1528 case 's':
1529 {
1530 const unsigned char *p = (const unsigned char *) XSTR (x, i);
1531
1532 if (p)
1533 while (*p)
1534 hash += *p++;
1535 break;
1536 }
1537
1538 case 'i':
1539 hash += XINT (x, i);
1540 break;
1541
1542 case 'p':
1543 hash += constant_lower_bound (SUBREG_BYTE (x));
1544 break;
1545
1546 case '0':
1547 case 't':
1548 /* unused */
1549 break;
1550
1551 default:
1552 gcc_unreachable ();
1553 }
1554 }
1555
1556 return hash ? hash : 1 + (unsigned int) GET_CODE (x);
1557}
1558
1559/* Create a new value structure for VALUE and initialize it. The mode of the
1560 value is MODE. */
1561
1562static inline cselib_val *
1563new_cselib_val (unsigned int hash, machine_mode mode, rtx x)
1564{
1565 cselib_val *e = cselib_val_pool.allocate ();
1566
1567 gcc_assert (hash);
1568 gcc_assert (next_uid);
1569
1570 e->hash = hash;
1571 e->uid = next_uid++;
1572 /* We use an alloc pool to allocate this RTL construct because it
1573 accounts for about 8% of the overall memory usage. We know
1574 precisely when we can have VALUE RTXen (when cselib is active)
1575 so we don't need to put them in garbage collected memory.
1576 ??? Why should a VALUE be an RTX in the first place? */
1577 e->val_rtx = (rtx_def*) value_pool.allocate ();
1578 memset (s: e->val_rtx, c: 0, RTX_HDR_SIZE);
1579 PUT_CODE (e->val_rtx, VALUE);
1580 PUT_MODE (x: e->val_rtx, mode);
1581 CSELIB_VAL_PTR (e->val_rtx) = e;
1582 e->addr_list = 0;
1583 e->locs = 0;
1584 e->next_containing_mem = 0;
1585
1586 scalar_int_mode int_mode;
1587 if (REG_P (x) && is_int_mode (mode, int_mode: &int_mode)
1588 && GET_MODE_SIZE (mode: int_mode) > 1
1589 && REG_VALUES (REGNO (x)) != NULL
1590 && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
1591 {
1592 rtx copy = shallow_copy_rtx (x);
1593 scalar_int_mode narrow_mode_iter;
1594 FOR_EACH_MODE_UNTIL (narrow_mode_iter, int_mode)
1595 {
1596 PUT_MODE_RAW (copy, narrow_mode_iter);
1597 cselib_val *v = cselib_lookup (copy, narrow_mode_iter, 0, VOIDmode);
1598 if (v)
1599 {
1600 rtx sub = lowpart_subreg (outermode: narrow_mode_iter, op: e->val_rtx, innermode: int_mode);
1601 if (sub)
1602 new_elt_loc_list (val: v, loc: sub);
1603 }
1604 }
1605 }
1606
1607 if (dump_file && (dump_flags & TDF_CSELIB))
1608 {
1609 fprintf (stream: dump_file, format: "cselib value %u:%u ", e->uid, hash);
1610 if (flag_dump_noaddr || flag_dump_unnumbered)
1611 fputs (s: "# ", stream: dump_file);
1612 else
1613 fprintf (stream: dump_file, format: "%p ", (void*)e);
1614 print_rtl_single (dump_file, x);
1615 fputc (c: '\n', stream: dump_file);
1616 }
1617
1618 return e;
1619}
1620
1621/* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
1622 contains the data at this address. X is a MEM that represents the
1623 value. Update the two value structures to represent this situation. */
1624
1625static void
1626add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
1627{
1628 addr_elt = canonical_cselib_val (val: addr_elt);
1629 mem_elt = canonical_cselib_val (val: mem_elt);
1630
1631 /* Avoid duplicates. */
1632 addr_space_t as = MEM_ADDR_SPACE (x);
1633 for (elt_loc_list *l = mem_elt->locs; l; l = l->next)
1634 if (MEM_P (l->loc)
1635 && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt
1636 && MEM_ADDR_SPACE (l->loc) == as)
1637 {
1638 promote_debug_loc (l);
1639 return;
1640 }
1641
1642 addr_elt->addr_list = new_elt_list (next: addr_elt->addr_list, elt: mem_elt);
1643 new_elt_loc_list (val: mem_elt,
1644 loc: replace_equiv_address_nv (x, addr_elt->val_rtx));
1645 if (mem_elt->next_containing_mem == NULL)
1646 {
1647 mem_elt->next_containing_mem = first_containing_mem;
1648 first_containing_mem = mem_elt;
1649 }
1650}
1651
1652/* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
1653 If CREATE, make a new one if we haven't seen it before. */
1654
1655static cselib_val *
1656cselib_lookup_mem (rtx x, int create)
1657{
1658 machine_mode mode = GET_MODE (x);
1659 machine_mode addr_mode;
1660 cselib_val **slot;
1661 cselib_val *addr;
1662 cselib_val *mem_elt;
1663
1664 if (MEM_VOLATILE_P (x) || mode == BLKmode
1665 || !cselib_record_memory
1666 || (FLOAT_MODE_P (mode) && flag_float_store))
1667 return 0;
1668
1669 addr_mode = GET_MODE (XEXP (x, 0));
1670 if (addr_mode == VOIDmode)
1671 addr_mode = Pmode;
1672
1673 /* Look up the value for the address. */
1674 addr = cselib_lookup (XEXP (x, 0), addr_mode, create, mode);
1675 if (! addr)
1676 return 0;
1677 addr = canonical_cselib_val (val: addr);
1678
1679 /* Find a value that describes a value of our mode at that address. */
1680 addr_space_t as = MEM_ADDR_SPACE (x);
1681 for (elt_list *l = addr->addr_list; l; l = l->next)
1682 if (GET_MODE (l->elt->val_rtx) == mode)
1683 {
1684 for (elt_loc_list *l2 = l->elt->locs; l2; l2 = l2->next)
1685 if (MEM_P (l2->loc) && MEM_ADDR_SPACE (l2->loc) == as)
1686 {
1687 promote_debug_loc (l: l->elt->locs);
1688 return l->elt;
1689 }
1690 }
1691
1692 if (! create)
1693 return 0;
1694
1695 mem_elt = new_cselib_val (hash: next_uid, mode, x);
1696 add_mem_for_addr (addr_elt: addr, mem_elt, x);
1697 slot = cselib_find_slot (mode, x, hash: mem_elt->hash, insert: INSERT, VOIDmode);
1698 *slot = mem_elt;
1699 return mem_elt;
1700}
1701
1702/* Search through the possible substitutions in P. We prefer a non reg
1703 substitution because this allows us to expand the tree further. If
1704 we find, just a reg, take the lowest regno. There may be several
1705 non-reg results, we just take the first one because they will all
1706 expand to the same place. */
1707
1708static rtx
1709expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
1710 int max_depth)
1711{
1712 rtx reg_result = NULL;
1713 unsigned int regno = UINT_MAX;
1714 struct elt_loc_list *p_in = p;
1715
1716 for (; p; p = p->next)
1717 {
1718 /* Return these right away to avoid returning stack pointer based
1719 expressions for frame pointer and vice versa, which is something
1720 that would confuse DSE. See the comment in cselib_expand_value_rtx_1
1721 for more details. */
1722 if (REG_P (p->loc)
1723 && (REGNO (p->loc) == STACK_POINTER_REGNUM
1724 || REGNO (p->loc) == FRAME_POINTER_REGNUM
1725 || REGNO (p->loc) == HARD_FRAME_POINTER_REGNUM
1726 || REGNO (p->loc) == cfa_base_preserved_regno))
1727 return p->loc;
1728 /* Avoid infinite recursion trying to expand a reg into a
1729 the same reg. */
1730 if ((REG_P (p->loc))
1731 && (REGNO (p->loc) < regno)
1732 && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
1733 {
1734 reg_result = p->loc;
1735 regno = REGNO (p->loc);
1736 }
1737 /* Avoid infinite recursion and do not try to expand the
1738 value. */
1739 else if (GET_CODE (p->loc) == VALUE
1740 && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1741 continue;
1742 else if (!REG_P (p->loc))
1743 {
1744 rtx result, note;
1745 if (dump_file && (dump_flags & TDF_CSELIB))
1746 {
1747 print_inline_rtx (dump_file, p->loc, 0);
1748 fprintf (stream: dump_file, format: "\n");
1749 }
1750 if (GET_CODE (p->loc) == LO_SUM
1751 && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1752 && p->setting_insn
1753 && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1754 && XEXP (note, 0) == XEXP (p->loc, 1))
1755 return XEXP (p->loc, 1);
1756 result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
1757 if (result)
1758 return result;
1759 }
1760
1761 }
1762
1763 if (regno != UINT_MAX)
1764 {
1765 rtx result;
1766 if (dump_file && (dump_flags & TDF_CSELIB))
1767 fprintf (stream: dump_file, format: "r%d\n", regno);
1768
1769 result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
1770 if (result)
1771 return result;
1772 }
1773
1774 if (dump_file && (dump_flags & TDF_CSELIB))
1775 {
1776 if (reg_result)
1777 {
1778 print_inline_rtx (dump_file, reg_result, 0);
1779 fprintf (stream: dump_file, format: "\n");
1780 }
1781 else
1782 fprintf (stream: dump_file, format: "NULL\n");
1783 }
1784 return reg_result;
1785}
1786
1787
1788/* Forward substitute and expand an expression out to its roots.
1789 This is the opposite of common subexpression. Because local value
1790 numbering is such a weak optimization, the expanded expression is
1791 pretty much unique (not from a pointer equals point of view but
1792 from a tree shape point of view.
1793
1794 This function returns NULL if the expansion fails. The expansion
1795 will fail if there is no value number for one of the operands or if
1796 one of the operands has been overwritten between the current insn
1797 and the beginning of the basic block. For instance x has no
1798 expansion in:
1799
1800 r1 <- r1 + 3
1801 x <- r1 + 8
1802
1803 REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1804 It is clear on return. */
1805
1806rtx
1807cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
1808{
1809 struct expand_value_data evd;
1810
1811 evd.regs_active = regs_active;
1812 evd.callback = NULL;
1813 evd.callback_arg = NULL;
1814 evd.dummy = false;
1815
1816 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1817}
1818
1819/* Same as cselib_expand_value_rtx, but using a callback to try to
1820 resolve some expressions. The CB function should return ORIG if it
1821 can't or does not want to deal with a certain RTX. Any other
1822 return value, including NULL, will be used as the expansion for
1823 VALUE, without any further changes. */
1824
1825rtx
1826cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1827 cselib_expand_callback cb, void *data)
1828{
1829 struct expand_value_data evd;
1830
1831 evd.regs_active = regs_active;
1832 evd.callback = cb;
1833 evd.callback_arg = data;
1834 evd.dummy = false;
1835
1836 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1837}
1838
1839/* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1840 or simplified. Useful to find out whether cselib_expand_value_rtx_cb
1841 would return NULL or non-NULL, without allocating new rtx. */
1842
1843bool
1844cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1845 cselib_expand_callback cb, void *data)
1846{
1847 struct expand_value_data evd;
1848
1849 evd.regs_active = regs_active;
1850 evd.callback = cb;
1851 evd.callback_arg = data;
1852 evd.dummy = true;
1853
1854 return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1855}
1856
1857/* Internal implementation of cselib_expand_value_rtx and
1858 cselib_expand_value_rtx_cb. */
1859
1860static rtx
1861cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1862 int max_depth)
1863{
1864 rtx copy, scopy;
1865 int i, j;
1866 RTX_CODE code;
1867 const char *format_ptr;
1868 machine_mode mode;
1869
1870 code = GET_CODE (orig);
1871
1872 /* For the context of dse, if we end up expand into a huge tree, we
1873 will not have a useful address, so we might as well just give up
1874 quickly. */
1875 if (max_depth <= 0)
1876 return NULL;
1877
1878 switch (code)
1879 {
1880 case REG:
1881 {
1882 struct elt_list *l = REG_VALUES (REGNO (orig));
1883
1884 if (l && l->elt == NULL)
1885 l = l->next;
1886 for (; l; l = l->next)
1887 if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1888 {
1889 rtx result;
1890 unsigned regno = REGNO (orig);
1891
1892 /* The only thing that we are not willing to do (this
1893 is requirement of dse and if others potential uses
1894 need this function we should add a parm to control
1895 it) is that we will not substitute the
1896 STACK_POINTER_REGNUM, FRAME_POINTER or the
1897 HARD_FRAME_POINTER.
1898
1899 These expansions confuses the code that notices that
1900 stores into the frame go dead at the end of the
1901 function and that the frame is not effected by calls
1902 to subroutines. If you allow the
1903 STACK_POINTER_REGNUM substitution, then dse will
1904 think that parameter pushing also goes dead which is
1905 wrong. If you allow the FRAME_POINTER or the
1906 HARD_FRAME_POINTER then you lose the opportunity to
1907 make the frame assumptions. */
1908 if (regno == STACK_POINTER_REGNUM
1909 || regno == FRAME_POINTER_REGNUM
1910 || regno == HARD_FRAME_POINTER_REGNUM
1911 || regno == cfa_base_preserved_regno)
1912 return orig;
1913
1914 bitmap_set_bit (evd->regs_active, regno);
1915
1916 if (dump_file && (dump_flags & TDF_CSELIB))
1917 fprintf (stream: dump_file, format: "expanding: r%d into: ", regno);
1918
1919 result = expand_loc (p: l->elt->locs, evd, max_depth);
1920 bitmap_clear_bit (evd->regs_active, regno);
1921
1922 if (result)
1923 return result;
1924 else
1925 return orig;
1926 }
1927 return orig;
1928 }
1929
1930 CASE_CONST_ANY:
1931 case SYMBOL_REF:
1932 case CODE_LABEL:
1933 case PC:
1934 case SCRATCH:
1935 /* SCRATCH must be shared because they represent distinct values. */
1936 return orig;
1937 case CLOBBER:
1938 if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1939 return orig;
1940 break;
1941
1942 case CONST:
1943 if (shared_const_p (orig))
1944 return orig;
1945 break;
1946
1947 case SUBREG:
1948 {
1949 rtx subreg;
1950
1951 if (evd->callback)
1952 {
1953 subreg = evd->callback (orig, evd->regs_active, max_depth,
1954 evd->callback_arg);
1955 if (subreg != orig)
1956 return subreg;
1957 }
1958
1959 subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
1960 max_depth: max_depth - 1);
1961 if (!subreg)
1962 return NULL;
1963 scopy = simplify_gen_subreg (GET_MODE (orig), op: subreg,
1964 GET_MODE (SUBREG_REG (orig)),
1965 SUBREG_BYTE (orig));
1966 if (scopy == NULL
1967 || (GET_CODE (scopy) == SUBREG
1968 && !REG_P (SUBREG_REG (scopy))
1969 && !MEM_P (SUBREG_REG (scopy))))
1970 return NULL;
1971
1972 return scopy;
1973 }
1974
1975 case VALUE:
1976 {
1977 rtx result;
1978
1979 if (dump_file && (dump_flags & TDF_CSELIB))
1980 {
1981 fputs (s: "\nexpanding ", stream: dump_file);
1982 print_rtl_single (dump_file, orig);
1983 fputs (s: " into...", stream: dump_file);
1984 }
1985
1986 if (evd->callback)
1987 {
1988 result = evd->callback (orig, evd->regs_active, max_depth,
1989 evd->callback_arg);
1990
1991 if (result != orig)
1992 return result;
1993 }
1994
1995 result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
1996 return result;
1997 }
1998
1999 case DEBUG_EXPR:
2000 if (evd->callback)
2001 return evd->callback (orig, evd->regs_active, max_depth,
2002 evd->callback_arg);
2003 return orig;
2004
2005 default:
2006 break;
2007 }
2008
2009 /* Copy the various flags, fields, and other information. We assume
2010 that all fields need copying, and then clear the fields that should
2011 not be copied. That is the sensible default behavior, and forces
2012 us to explicitly document why we are *not* copying a flag. */
2013 if (evd->dummy)
2014 copy = NULL;
2015 else
2016 copy = shallow_copy_rtx (orig);
2017
2018 format_ptr = GET_RTX_FORMAT (code);
2019
2020 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2021 switch (*format_ptr++)
2022 {
2023 case 'e':
2024 if (XEXP (orig, i) != NULL)
2025 {
2026 rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
2027 max_depth: max_depth - 1);
2028 if (!result)
2029 return NULL;
2030 if (copy)
2031 XEXP (copy, i) = result;
2032 }
2033 break;
2034
2035 case 'E':
2036 case 'V':
2037 if (XVEC (orig, i) != NULL)
2038 {
2039 if (copy)
2040 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
2041 for (j = 0; j < XVECLEN (orig, i); j++)
2042 {
2043 rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
2044 evd, max_depth: max_depth - 1);
2045 if (!result)
2046 return NULL;
2047 if (copy)
2048 XVECEXP (copy, i, j) = result;
2049 }
2050 }
2051 break;
2052
2053 case 't':
2054 case 'w':
2055 case 'i':
2056 case 's':
2057 case 'S':
2058 case 'T':
2059 case 'u':
2060 case 'B':
2061 case '0':
2062 /* These are left unchanged. */
2063 break;
2064
2065 default:
2066 gcc_unreachable ();
2067 }
2068
2069 if (evd->dummy)
2070 return orig;
2071
2072 mode = GET_MODE (copy);
2073 /* If an operand has been simplified into CONST_INT, which doesn't
2074 have a mode and the mode isn't derivable from whole rtx's mode,
2075 try simplify_*_operation first with mode from original's operand
2076 and as a fallback wrap CONST_INT into gen_rtx_CONST. */
2077 scopy = copy;
2078 switch (GET_RTX_CLASS (code))
2079 {
2080 case RTX_UNARY:
2081 if (CONST_INT_P (XEXP (copy, 0))
2082 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
2083 {
2084 scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
2085 GET_MODE (XEXP (orig, 0)));
2086 if (scopy)
2087 return scopy;
2088 }
2089 break;
2090 case RTX_COMM_ARITH:
2091 case RTX_BIN_ARITH:
2092 /* These expressions can derive operand modes from the whole rtx's mode. */
2093 break;
2094 case RTX_TERNARY:
2095 case RTX_BITFIELD_OPS:
2096 if (CONST_INT_P (XEXP (copy, 0))
2097 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
2098 {
2099 scopy = simplify_ternary_operation (code, mode,
2100 GET_MODE (XEXP (orig, 0)),
2101 XEXP (copy, 0), XEXP (copy, 1),
2102 XEXP (copy, 2));
2103 if (scopy)
2104 return scopy;
2105 }
2106 break;
2107 case RTX_COMPARE:
2108 case RTX_COMM_COMPARE:
2109 if (CONST_INT_P (XEXP (copy, 0))
2110 && GET_MODE (XEXP (copy, 1)) == VOIDmode
2111 && (GET_MODE (XEXP (orig, 0)) != VOIDmode
2112 || GET_MODE (XEXP (orig, 1)) != VOIDmode))
2113 {
2114 scopy = simplify_relational_operation (code, mode,
2115 op_mode: (GET_MODE (XEXP (orig, 0))
2116 != VOIDmode)
2117 ? GET_MODE (XEXP (orig, 0))
2118 : GET_MODE (XEXP (orig, 1)),
2119 XEXP (copy, 0),
2120 XEXP (copy, 1));
2121 if (scopy)
2122 return scopy;
2123 }
2124 break;
2125 default:
2126 break;
2127 }
2128 scopy = simplify_rtx (copy);
2129 if (scopy)
2130 return scopy;
2131 return copy;
2132}
2133
2134/* Walk rtx X and replace all occurrences of REG and MEM subexpressions
2135 with VALUE expressions. This way, it becomes independent of changes
2136 to registers and memory.
2137 X isn't actually modified; if modifications are needed, new rtl is
2138 allocated. However, the return value can share rtl with X.
2139 If X is within a MEM, MEMMODE must be the mode of the MEM. */
2140
2141rtx
2142cselib_subst_to_values (rtx x, machine_mode memmode)
2143{
2144 enum rtx_code code = GET_CODE (x);
2145 const char *fmt = GET_RTX_FORMAT (code);
2146 cselib_val *e;
2147 struct elt_list *l;
2148 rtx copy = x;
2149 int i;
2150 poly_int64 offset;
2151
2152 switch (code)
2153 {
2154 case REG:
2155 l = REG_VALUES (REGNO (x));
2156 if (l && l->elt == NULL)
2157 l = l->next;
2158 for (; l; l = l->next)
2159 if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
2160 return l->elt->val_rtx;
2161
2162 gcc_unreachable ();
2163
2164 case MEM:
2165 e = cselib_lookup_mem (x, create: 0);
2166 /* This used to happen for autoincrements, but we deal with them
2167 properly now. Remove the if stmt for the next release. */
2168 if (! e)
2169 {
2170 /* Assign a value that doesn't match any other. */
2171 e = new_cselib_val (hash: next_uid, GET_MODE (x), x);
2172 }
2173 return e->val_rtx;
2174
2175 case ENTRY_VALUE:
2176 e = cselib_lookup (x, GET_MODE (x), 0, memmode);
2177 if (! e)
2178 break;
2179 return e->val_rtx;
2180
2181 CASE_CONST_ANY:
2182 return x;
2183
2184 case PRE_DEC:
2185 case PRE_INC:
2186 gcc_assert (memmode != VOIDmode);
2187 offset = GET_MODE_SIZE (mode: memmode);
2188 if (code == PRE_DEC)
2189 offset = -offset;
2190 return cselib_subst_to_values (x: plus_constant (GET_MODE (x),
2191 XEXP (x, 0), offset),
2192 memmode);
2193
2194 case PRE_MODIFY:
2195 gcc_assert (memmode != VOIDmode);
2196 return cselib_subst_to_values (XEXP (x, 1), memmode);
2197
2198 case POST_DEC:
2199 case POST_INC:
2200 case POST_MODIFY:
2201 gcc_assert (memmode != VOIDmode);
2202 return cselib_subst_to_values (XEXP (x, 0), memmode);
2203
2204 case PLUS:
2205 if (GET_MODE (x) == Pmode && CONST_INT_P (XEXP (x, 1)))
2206 {
2207 rtx t = cselib_subst_to_values (XEXP (x, 0), memmode);
2208 if (GET_CODE (t) == VALUE)
2209 {
2210 if (SP_DERIVED_VALUE_P (t) && XEXP (x, 1) == const0_rtx)
2211 return t;
2212 for (struct elt_loc_list *l = CSELIB_VAL_PTR (t)->locs;
2213 l; l = l->next)
2214 if (GET_CODE (l->loc) == PLUS
2215 && GET_CODE (XEXP (l->loc, 0)) == VALUE
2216 && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2217 && CONST_INT_P (XEXP (l->loc, 1)))
2218 return plus_constant (Pmode, l->loc, INTVAL (XEXP (x, 1)));
2219 }
2220 if (t != XEXP (x, 0))
2221 {
2222 copy = shallow_copy_rtx (x);
2223 XEXP (copy, 0) = t;
2224 }
2225 return copy;
2226 }
2227
2228 default:
2229 break;
2230 }
2231
2232 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2233 {
2234 if (fmt[i] == 'e')
2235 {
2236 rtx t = cselib_subst_to_values (XEXP (x, i), memmode);
2237
2238 if (t != XEXP (x, i))
2239 {
2240 if (x == copy)
2241 copy = shallow_copy_rtx (x);
2242 XEXP (copy, i) = t;
2243 }
2244 }
2245 else if (fmt[i] == 'E')
2246 {
2247 int j;
2248
2249 for (j = 0; j < XVECLEN (x, i); j++)
2250 {
2251 rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode);
2252
2253 if (t != XVECEXP (x, i, j))
2254 {
2255 if (XVEC (x, i) == XVEC (copy, i))
2256 {
2257 if (x == copy)
2258 copy = shallow_copy_rtx (x);
2259 XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
2260 }
2261 XVECEXP (copy, i, j) = t;
2262 }
2263 }
2264 }
2265 }
2266
2267 return copy;
2268}
2269
2270/* Wrapper for cselib_subst_to_values, that indicates X is in INSN. */
2271
2272rtx
2273cselib_subst_to_values_from_insn (rtx x, machine_mode memmode, rtx_insn *insn)
2274{
2275 rtx ret;
2276 gcc_assert (!cselib_current_insn);
2277 cselib_current_insn = insn;
2278 ret = cselib_subst_to_values (x, memmode);
2279 cselib_current_insn = NULL;
2280 return ret;
2281}
2282
2283/* Look up the rtl expression X in our tables and return the value it
2284 has. If CREATE is zero, we return NULL if we don't know the value.
2285 Otherwise, we create a new one if possible, using mode MODE if X
2286 doesn't have a mode (i.e. because it's a constant). When X is part
2287 of an address, MEMMODE should be the mode of the enclosing MEM if
2288 we're tracking autoinc expressions. */
2289
2290static cselib_val *
2291cselib_lookup_1 (rtx x, machine_mode mode,
2292 int create, machine_mode memmode)
2293{
2294 cselib_val **slot;
2295 cselib_val *e;
2296 unsigned int hashval;
2297
2298 if (GET_MODE (x) != VOIDmode)
2299 mode = GET_MODE (x);
2300
2301 if (GET_CODE (x) == VALUE)
2302 return CSELIB_VAL_PTR (x);
2303
2304 if (REG_P (x))
2305 {
2306 struct elt_list *l;
2307 unsigned int i = REGNO (x);
2308
2309 l = REG_VALUES (i);
2310 if (l && l->elt == NULL)
2311 l = l->next;
2312 for (; l; l = l->next)
2313 if (mode == GET_MODE (l->elt->val_rtx))
2314 {
2315 promote_debug_loc (l: l->elt->locs);
2316 return l->elt;
2317 }
2318
2319 if (! create)
2320 return 0;
2321
2322 if (i < FIRST_PSEUDO_REGISTER)
2323 {
2324 unsigned int n = hard_regno_nregs (regno: i, mode);
2325
2326 if (n > max_value_regs)
2327 max_value_regs = n;
2328 }
2329
2330 e = new_cselib_val (hash: next_uid, GET_MODE (x), x);
2331 if (GET_MODE (x) == Pmode && x == stack_pointer_rtx)
2332 SP_DERIVED_VALUE_P (e->val_rtx) = 1;
2333 new_elt_loc_list (val: e, loc: x);
2334
2335 scalar_int_mode int_mode;
2336 if (REG_VALUES (i) == 0)
2337 {
2338 /* Maintain the invariant that the first entry of
2339 REG_VALUES, if present, must be the value used to set the
2340 register, or NULL. */
2341 used_regs[n_used_regs++] = i;
2342 REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
2343 }
2344 else if (cselib_preserve_constants
2345 && is_int_mode (mode, int_mode: &int_mode))
2346 {
2347 /* During var-tracking, try harder to find equivalences
2348 for SUBREGs. If a setter sets say a DImode register
2349 and user uses that register only in SImode, add a lowpart
2350 subreg location. */
2351 struct elt_list *lwider = NULL;
2352 scalar_int_mode lmode;
2353 l = REG_VALUES (i);
2354 if (l && l->elt == NULL)
2355 l = l->next;
2356 for (; l; l = l->next)
2357 if (is_int_mode (GET_MODE (l->elt->val_rtx), int_mode: &lmode)
2358 && GET_MODE_SIZE (mode: lmode) > GET_MODE_SIZE (mode: int_mode)
2359 && (lwider == NULL
2360 || partial_subreg_p (outermode: lmode,
2361 GET_MODE (lwider->elt->val_rtx))))
2362 {
2363 struct elt_loc_list *el;
2364 if (i < FIRST_PSEUDO_REGISTER
2365 && hard_regno_nregs (regno: i, mode: lmode) != 1)
2366 continue;
2367 for (el = l->elt->locs; el; el = el->next)
2368 if (!REG_P (el->loc))
2369 break;
2370 if (el)
2371 lwider = l;
2372 }
2373 if (lwider)
2374 {
2375 rtx sub = lowpart_subreg (outermode: int_mode, op: lwider->elt->val_rtx,
2376 GET_MODE (lwider->elt->val_rtx));
2377 if (sub)
2378 new_elt_loc_list (val: e, loc: sub);
2379 }
2380 }
2381 REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, elt: e);
2382 slot = cselib_find_slot (mode, x, hash: e->hash, insert: INSERT, memmode);
2383 *slot = e;
2384 return e;
2385 }
2386
2387 if (MEM_P (x))
2388 return cselib_lookup_mem (x, create);
2389
2390 hashval = cselib_hash_rtx (x, create, memmode);
2391 /* Can't even create if hashing is not possible. */
2392 if (! hashval)
2393 return 0;
2394
2395 slot = cselib_find_slot (mode, x, hash: hashval,
2396 insert: create ? INSERT : NO_INSERT, memmode);
2397 if (slot == 0)
2398 return 0;
2399
2400 e = (cselib_val *) *slot;
2401 if (e)
2402 return e;
2403
2404 e = new_cselib_val (hash: hashval, mode, x);
2405
2406 /* We have to fill the slot before calling cselib_subst_to_values:
2407 the hash table is inconsistent until we do so, and
2408 cselib_subst_to_values will need to do lookups. */
2409 *slot = e;
2410 rtx v = cselib_subst_to_values (x, memmode);
2411
2412 /* If cselib_preserve_constants, we might get a SP_DERIVED_VALUE_P
2413 VALUE that isn't in the hash tables anymore. */
2414 if (GET_CODE (v) == VALUE && SP_DERIVED_VALUE_P (v) && PRESERVED_VALUE_P (v))
2415 PRESERVED_VALUE_P (e->val_rtx) = 1;
2416
2417 new_elt_loc_list (val: e, loc: v);
2418 return e;
2419}
2420
2421/* Wrapper for cselib_lookup, that indicates X is in INSN. */
2422
2423cselib_val *
2424cselib_lookup_from_insn (rtx x, machine_mode mode,
2425 int create, machine_mode memmode, rtx_insn *insn)
2426{
2427 cselib_val *ret;
2428
2429 gcc_assert (!cselib_current_insn);
2430 cselib_current_insn = insn;
2431
2432 ret = cselib_lookup (x, mode, create, memmode);
2433
2434 cselib_current_insn = NULL;
2435
2436 return ret;
2437}
2438
2439/* Wrapper for cselib_lookup_1, that logs the lookup result and
2440 maintains invariants related with debug insns. */
2441
2442cselib_val *
2443cselib_lookup (rtx x, machine_mode mode,
2444 int create, machine_mode memmode)
2445{
2446 cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode);
2447
2448 /* ??? Should we return NULL if we're not to create an entry, the
2449 found loc is a debug loc and cselib_current_insn is not DEBUG?
2450 If so, we should also avoid converting val to non-DEBUG; probably
2451 easiest setting cselib_current_insn to NULL before the call
2452 above. */
2453
2454 if (dump_file && (dump_flags & TDF_CSELIB))
2455 {
2456 fputs (s: "cselib lookup ", stream: dump_file);
2457 print_inline_rtx (dump_file, x, 2);
2458 fprintf (stream: dump_file, format: " => %u:%u\n",
2459 ret ? ret->uid : 0,
2460 ret ? ret->hash : 0);
2461 }
2462
2463 return ret;
2464}
2465
2466/* Invalidate the value at *L, which is part of REG_VALUES (REGNO). */
2467
2468static void
2469cselib_invalidate_regno_val (unsigned int regno, struct elt_list **l)
2470{
2471 cselib_val *v = (*l)->elt;
2472 if (*l == REG_VALUES (regno))
2473 {
2474 /* Maintain the invariant that the first entry of
2475 REG_VALUES, if present, must be the value used to set
2476 the register, or NULL. This is also nice because
2477 then we won't push the same regno onto user_regs
2478 multiple times. */
2479 (*l)->elt = NULL;
2480 l = &(*l)->next;
2481 }
2482 else
2483 unchain_one_elt_list (pl: l);
2484
2485 v = canonical_cselib_val (val: v);
2486
2487 bool had_locs = v->locs != NULL;
2488 rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
2489
2490 /* Now, we clear the mapping from value to reg. It must exist, so
2491 this code will crash intentionally if it doesn't. */
2492 for (elt_loc_list **p = &v->locs; ; p = &(*p)->next)
2493 {
2494 rtx x = (*p)->loc;
2495
2496 if (REG_P (x) && REGNO (x) == regno)
2497 {
2498 unchain_one_elt_loc_list (pl: p);
2499 break;
2500 }
2501 }
2502
2503 if (had_locs && cselib_useless_value_p (v))
2504 {
2505 if (setting_insn && DEBUG_INSN_P (setting_insn))
2506 n_useless_debug_values++;
2507 else
2508 n_useless_values++;
2509 }
2510}
2511
2512/* Invalidate any entries in reg_values that overlap REGNO. This is called
2513 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
2514 is used to determine how many hard registers are being changed. If MODE
2515 is VOIDmode, then only REGNO is being changed; this is used when
2516 invalidating call clobbered registers across a call. */
2517
2518static void
2519cselib_invalidate_regno (unsigned int regno, machine_mode mode)
2520{
2521 unsigned int endregno;
2522 unsigned int i;
2523
2524 /* If we see pseudos after reload, something is _wrong_. */
2525 gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
2526 || reg_renumber[regno] < 0);
2527
2528 /* Determine the range of registers that must be invalidated. For
2529 pseudos, only REGNO is affected. For hard regs, we must take MODE
2530 into account, and we must also invalidate lower register numbers
2531 if they contain values that overlap REGNO. */
2532 if (regno < FIRST_PSEUDO_REGISTER)
2533 {
2534 gcc_assert (mode != VOIDmode);
2535
2536 if (regno < max_value_regs)
2537 i = 0;
2538 else
2539 i = regno - max_value_regs;
2540
2541 endregno = end_hard_regno (mode, regno);
2542 }
2543 else
2544 {
2545 i = regno;
2546 endregno = regno + 1;
2547 }
2548
2549 for (; i < endregno; i++)
2550 {
2551 struct elt_list **l = &REG_VALUES (i);
2552
2553 /* Go through all known values for this reg; if it overlaps the range
2554 we're invalidating, remove the value. */
2555 while (*l)
2556 {
2557 cselib_val *v = (*l)->elt;
2558 unsigned int this_last = i;
2559
2560 if (i < FIRST_PSEUDO_REGISTER && v != NULL)
2561 this_last = end_hard_regno (GET_MODE (v->val_rtx), regno: i) - 1;
2562
2563 if (this_last < regno || v == NULL
2564 || (v == cfa_base_preserved_val
2565 && i == cfa_base_preserved_regno))
2566 {
2567 l = &(*l)->next;
2568 continue;
2569 }
2570
2571 /* We have an overlap. */
2572 cselib_invalidate_regno_val (regno: i, l);
2573 }
2574 }
2575}
2576
2577/* Invalidate any locations in the table which are changed because of a
2578 store to MEM_RTX. If this is called because of a non-const call
2579 instruction, MEM_RTX is (mem:BLK const0_rtx). */
2580
2581static void
2582cselib_invalidate_mem (rtx mem_rtx)
2583{
2584 cselib_val **vp, *v, *next;
2585 int num_mems = 0;
2586 rtx mem_addr;
2587
2588 mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
2589 mem_rtx = canon_rtx (mem_rtx);
2590
2591 vp = &first_containing_mem;
2592 for (v = *vp; v != &dummy_val; v = next)
2593 {
2594 bool has_mem = false;
2595 struct elt_loc_list **p = &v->locs;
2596 bool had_locs = v->locs != NULL;
2597 rtx_insn *setting_insn = v->locs ? v->locs->setting_insn : NULL;
2598
2599 while (*p)
2600 {
2601 rtx x = (*p)->loc;
2602 cselib_val *addr;
2603 struct elt_list **mem_chain;
2604
2605 /* MEMs may occur in locations only at the top level; below
2606 that every MEM or REG is substituted by its VALUE. */
2607 if (!MEM_P (x))
2608 {
2609 p = &(*p)->next;
2610 continue;
2611 }
2612 if (num_mems < param_max_cselib_memory_locations
2613 && ! canon_anti_dependence (x, false, mem_rtx,
2614 GET_MODE (mem_rtx), mem_addr))
2615 {
2616 has_mem = true;
2617 num_mems++;
2618 p = &(*p)->next;
2619 continue;
2620 }
2621
2622 /* This one overlaps. */
2623 /* We must have a mapping from this MEM's address to the
2624 value (E). Remove that, too. */
2625 addr = cselib_lookup (XEXP (x, 0), VOIDmode, create: 0, GET_MODE (x));
2626 addr = canonical_cselib_val (val: addr);
2627 gcc_checking_assert (v == canonical_cselib_val (v));
2628 mem_chain = &addr->addr_list;
2629 for (;;)
2630 {
2631 cselib_val *canon = canonical_cselib_val (val: (*mem_chain)->elt);
2632
2633 if (canon == v)
2634 {
2635 unchain_one_elt_list (pl: mem_chain);
2636 break;
2637 }
2638
2639 /* Record canonicalized elt. */
2640 (*mem_chain)->elt = canon;
2641
2642 mem_chain = &(*mem_chain)->next;
2643 }
2644
2645 unchain_one_elt_loc_list (pl: p);
2646 }
2647
2648 if (had_locs && cselib_useless_value_p (v))
2649 {
2650 if (setting_insn && DEBUG_INSN_P (setting_insn))
2651 n_useless_debug_values++;
2652 else
2653 n_useless_values++;
2654 }
2655
2656 next = v->next_containing_mem;
2657 if (has_mem)
2658 {
2659 *vp = v;
2660 vp = &(*vp)->next_containing_mem;
2661 }
2662 else
2663 v->next_containing_mem = NULL;
2664 }
2665 *vp = &dummy_val;
2666}
2667
2668/* Invalidate DEST. */
2669
2670void
2671cselib_invalidate_rtx (rtx dest)
2672{
2673 while (GET_CODE (dest) == SUBREG
2674 || GET_CODE (dest) == ZERO_EXTRACT
2675 || GET_CODE (dest) == STRICT_LOW_PART)
2676 dest = XEXP (dest, 0);
2677
2678 if (REG_P (dest))
2679 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
2680 else if (MEM_P (dest))
2681 cselib_invalidate_mem (mem_rtx: dest);
2682}
2683
2684/* A wrapper for cselib_invalidate_rtx to be called via note_stores. */
2685
2686static void
2687cselib_invalidate_rtx_note_stores (rtx dest, const_rtx,
2688 void *data ATTRIBUTE_UNUSED)
2689{
2690 cselib_invalidate_rtx (dest);
2691}
2692
2693/* Record the result of a SET instruction. DEST is being set; the source
2694 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
2695 describes its address. */
2696
2697static void
2698cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
2699{
2700 if (src_elt == 0 || side_effects_p (dest))
2701 return;
2702
2703 if (REG_P (dest))
2704 {
2705 unsigned int dreg = REGNO (dest);
2706 if (dreg < FIRST_PSEUDO_REGISTER)
2707 {
2708 unsigned int n = REG_NREGS (dest);
2709
2710 if (n > max_value_regs)
2711 max_value_regs = n;
2712 }
2713
2714 if (REG_VALUES (dreg) == 0)
2715 {
2716 used_regs[n_used_regs++] = dreg;
2717 REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), elt: src_elt);
2718 }
2719 else
2720 {
2721 /* The register should have been invalidated. */
2722 gcc_assert (REG_VALUES (dreg)->elt == 0);
2723 REG_VALUES (dreg)->elt = src_elt;
2724 }
2725
2726 if (cselib_useless_value_p (v: src_elt))
2727 n_useless_values--;
2728 new_elt_loc_list (val: src_elt, loc: dest);
2729 }
2730 else if (MEM_P (dest) && dest_addr_elt != 0
2731 && cselib_record_memory)
2732 {
2733 if (cselib_useless_value_p (v: src_elt))
2734 n_useless_values--;
2735 add_mem_for_addr (addr_elt: dest_addr_elt, mem_elt: src_elt, x: dest);
2736 }
2737}
2738
2739/* Make ELT and X's VALUE equivalent to each other at INSN. */
2740
2741void
2742cselib_add_permanent_equiv (cselib_val *elt, rtx x, rtx_insn *insn)
2743{
2744 cselib_val *nelt;
2745 rtx_insn *save_cselib_current_insn = cselib_current_insn;
2746
2747 gcc_checking_assert (elt);
2748 gcc_checking_assert (PRESERVED_VALUE_P (elt->val_rtx));
2749 gcc_checking_assert (!side_effects_p (x));
2750
2751 cselib_current_insn = insn;
2752
2753 nelt = cselib_lookup (x, GET_MODE (elt->val_rtx), create: 1, VOIDmode);
2754
2755 if (nelt != elt)
2756 {
2757 cselib_any_perm_equivs = true;
2758
2759 if (!PRESERVED_VALUE_P (nelt->val_rtx))
2760 cselib_preserve_value (v: nelt);
2761
2762 new_elt_loc_list (val: nelt, loc: elt->val_rtx);
2763 }
2764
2765 cselib_current_insn = save_cselib_current_insn;
2766}
2767
2768/* Return TRUE if any permanent equivalences have been recorded since
2769 the table was last initialized. */
2770bool
2771cselib_have_permanent_equivalences (void)
2772{
2773 return cselib_any_perm_equivs;
2774}
2775
2776/* Record stack_pointer_rtx to be equal to
2777 (plus:P cfa_base_preserved_val offset). Used by var-tracking
2778 at the start of basic blocks for !frame_pointer_needed functions. */
2779
2780void
2781cselib_record_sp_cfa_base_equiv (HOST_WIDE_INT offset, rtx_insn *insn)
2782{
2783 rtx sp_derived_value = NULL_RTX;
2784 for (struct elt_loc_list *l = cfa_base_preserved_val->locs; l; l = l->next)
2785 if (GET_CODE (l->loc) == VALUE
2786 && SP_DERIVED_VALUE_P (l->loc))
2787 {
2788 sp_derived_value = l->loc;
2789 break;
2790 }
2791 else if (GET_CODE (l->loc) == PLUS
2792 && GET_CODE (XEXP (l->loc, 0)) == VALUE
2793 && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2794 && CONST_INT_P (XEXP (l->loc, 1)))
2795 {
2796 sp_derived_value = XEXP (l->loc, 0);
2797 offset = offset + UINTVAL (XEXP (l->loc, 1));
2798 break;
2799 }
2800 if (sp_derived_value == NULL_RTX)
2801 return;
2802 cselib_val *val
2803 = cselib_lookup_from_insn (x: plus_constant (Pmode, sp_derived_value, offset),
2804 Pmode, create: 1, VOIDmode, insn);
2805 if (val != NULL)
2806 {
2807 PRESERVED_VALUE_P (val->val_rtx) = 1;
2808 cselib_record_set (stack_pointer_rtx, src_elt: val, NULL);
2809 }
2810}
2811
2812/* Return true if V is SP_DERIVED_VALUE_P (or SP_DERIVED_VALUE_P + CONST_INT)
2813 that can be expressed using cfa_base_preserved_val + CONST_INT. */
2814
2815bool
2816cselib_sp_derived_value_p (cselib_val *v)
2817{
2818 if (!SP_DERIVED_VALUE_P (v->val_rtx))
2819 for (struct elt_loc_list *l = v->locs; l; l = l->next)
2820 if (GET_CODE (l->loc) == PLUS
2821 && GET_CODE (XEXP (l->loc, 0)) == VALUE
2822 && SP_DERIVED_VALUE_P (XEXP (l->loc, 0))
2823 && CONST_INT_P (XEXP (l->loc, 1)))
2824 v = CSELIB_VAL_PTR (XEXP (l->loc, 0));
2825 if (!SP_DERIVED_VALUE_P (v->val_rtx))
2826 return false;
2827 for (struct elt_loc_list *l = v->locs; l; l = l->next)
2828 if (l->loc == cfa_base_preserved_val->val_rtx)
2829 return true;
2830 else if (GET_CODE (l->loc) == PLUS
2831 && XEXP (l->loc, 0) == cfa_base_preserved_val->val_rtx
2832 && CONST_INT_P (XEXP (l->loc, 1)))
2833 return true;
2834 return false;
2835}
2836
2837/* There is no good way to determine how many elements there can be
2838 in a PARALLEL. Since it's fairly cheap, use a really large number. */
2839#define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2840
2841struct cselib_record_autoinc_data
2842{
2843 struct cselib_set *sets;
2844 int n_sets;
2845};
2846
2847/* Callback for for_each_inc_dec. Records in ARG the SETs implied by
2848 autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST. */
2849
2850static int
2851cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED,
2852 rtx dest, rtx src, rtx srcoff, void *arg)
2853{
2854 struct cselib_record_autoinc_data *data;
2855 data = (struct cselib_record_autoinc_data *)arg;
2856
2857 data->sets[data->n_sets].dest = dest;
2858
2859 if (srcoff)
2860 data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff);
2861 else
2862 data->sets[data->n_sets].src = src;
2863
2864 data->n_sets++;
2865
2866 return 0;
2867}
2868
2869/* Record the effects of any sets and autoincs in INSN. */
2870static void
2871cselib_record_sets (rtx_insn *insn)
2872{
2873 int n_sets = 0;
2874 int i;
2875 struct cselib_set sets[MAX_SETS];
2876 rtx cond = 0;
2877 int n_sets_before_autoinc;
2878 int n_strict_low_parts = 0;
2879 struct cselib_record_autoinc_data data;
2880
2881 rtx body = PATTERN (insn);
2882 if (GET_CODE (body) == COND_EXEC)
2883 {
2884 cond = COND_EXEC_TEST (body);
2885 body = COND_EXEC_CODE (body);
2886 }
2887
2888 /* Find all sets. */
2889 if (GET_CODE (body) == SET)
2890 {
2891 sets[0].src = SET_SRC (body);
2892 sets[0].dest = SET_DEST (body);
2893 n_sets = 1;
2894 }
2895 else if (GET_CODE (body) == PARALLEL)
2896 {
2897 /* Look through the PARALLEL and record the values being
2898 set, if possible. Also handle any CLOBBERs. */
2899 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
2900 {
2901 rtx x = XVECEXP (body, 0, i);
2902
2903 if (GET_CODE (x) == SET)
2904 {
2905 sets[n_sets].src = SET_SRC (x);
2906 sets[n_sets].dest = SET_DEST (x);
2907 n_sets++;
2908 }
2909 }
2910 }
2911
2912 if (n_sets == 1
2913 && MEM_P (sets[0].src)
2914 && !cselib_record_memory
2915 && MEM_READONLY_P (sets[0].src))
2916 {
2917 rtx note = find_reg_equal_equiv_note (insn);
2918
2919 if (note && CONSTANT_P (XEXP (note, 0)))
2920 sets[0].src = XEXP (note, 0);
2921 }
2922
2923 data.sets = sets;
2924 data.n_sets = n_sets_before_autoinc = n_sets;
2925 for_each_inc_dec (PATTERN (insn), cselib_record_autoinc_cb, arg: &data);
2926 n_sets = data.n_sets;
2927
2928 /* Look up the values that are read. Do this before invalidating the
2929 locations that are written. */
2930 for (i = 0; i < n_sets; i++)
2931 {
2932 rtx dest = sets[i].dest;
2933 rtx orig = dest;
2934
2935 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
2936 the low part after invalidating any knowledge about larger modes. */
2937 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
2938 sets[i].dest = dest = XEXP (dest, 0);
2939
2940 /* We don't know how to record anything but REG or MEM. */
2941 if (REG_P (dest)
2942 || (MEM_P (dest) && cselib_record_memory))
2943 {
2944 rtx src = sets[i].src;
2945 if (cond)
2946 src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
2947 sets[i].src_elt = cselib_lookup (x: src, GET_MODE (dest), create: 1, VOIDmode);
2948 if (MEM_P (dest))
2949 {
2950 machine_mode address_mode = get_address_mode (mem: dest);
2951
2952 sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
2953 mode: address_mode, create: 1,
2954 GET_MODE (dest));
2955 }
2956 else
2957 sets[i].dest_addr_elt = 0;
2958 }
2959
2960 /* Improve handling of STRICT_LOW_PART if the current value is known
2961 to be const0_rtx, then the low bits will be set to dest and higher
2962 bits will remain zero. Used in code like:
2963
2964 {di:SI=0;clobber flags:CC;}
2965 flags:CCNO=cmp(bx:SI,0)
2966 strict_low_part(di:QI)=flags:CCNO<=0
2967
2968 where we can note both that di:QI=flags:CCNO<=0 and
2969 also that because di:SI is known to be 0 and strict_low_part(di:QI)
2970 preserves the upper bits that di:SI=zero_extend(flags:CCNO<=0). */
2971 scalar_int_mode mode;
2972 if (dest != orig
2973 && cselib_record_sets_hook
2974 && REG_P (dest)
2975 && HARD_REGISTER_P (dest)
2976 && sets[i].src_elt
2977 && is_a <scalar_int_mode> (GET_MODE (dest), result: &mode)
2978 && n_sets + n_strict_low_parts < MAX_SETS)
2979 {
2980 opt_scalar_int_mode wider_mode_iter;
2981 FOR_EACH_WIDER_MODE (wider_mode_iter, mode)
2982 {
2983 scalar_int_mode wider_mode = wider_mode_iter.require ();
2984 if (GET_MODE_PRECISION (mode: wider_mode) > BITS_PER_WORD)
2985 break;
2986
2987 rtx reg = gen_lowpart (wider_mode, dest);
2988 if (!REG_P (reg))
2989 break;
2990
2991 cselib_val *v = cselib_lookup (x: reg, mode: wider_mode, create: 0, VOIDmode);
2992 if (!v)
2993 continue;
2994
2995 struct elt_loc_list *l;
2996 for (l = v->locs; l; l = l->next)
2997 if (l->loc == const0_rtx)
2998 break;
2999
3000 if (!l)
3001 continue;
3002
3003 sets[n_sets + n_strict_low_parts].dest = reg;
3004 sets[n_sets + n_strict_low_parts].src = dest;
3005 sets[n_sets + n_strict_low_parts++].src_elt = sets[i].src_elt;
3006 break;
3007 }
3008 }
3009 }
3010
3011 if (cselib_record_sets_hook)
3012 cselib_record_sets_hook (insn, sets, n_sets);
3013
3014 /* Invalidate all locations written by this insn. Note that the elts we
3015 looked up in the previous loop aren't affected, just some of their
3016 locations may go away. */
3017 note_pattern_stores (body, cselib_invalidate_rtx_note_stores, NULL);
3018
3019 for (i = n_sets_before_autoinc; i < n_sets; i++)
3020 cselib_invalidate_rtx (dest: sets[i].dest);
3021
3022 /* If this is an asm, look for duplicate sets. This can happen when the
3023 user uses the same value as an output multiple times. This is valid
3024 if the outputs are not actually used thereafter. Treat this case as
3025 if the value isn't actually set. We do this by smashing the destination
3026 to pc_rtx, so that we won't record the value later. */
3027 if (n_sets >= 2 && asm_noperands (body) >= 0)
3028 {
3029 for (i = 0; i < n_sets; i++)
3030 {
3031 rtx dest = sets[i].dest;
3032 if (REG_P (dest) || MEM_P (dest))
3033 {
3034 int j;
3035 for (j = i + 1; j < n_sets; j++)
3036 if (rtx_equal_p (dest, sets[j].dest))
3037 {
3038 sets[i].dest = pc_rtx;
3039 sets[j].dest = pc_rtx;
3040 }
3041 }
3042 }
3043 }
3044
3045 /* Now enter the equivalences in our tables. */
3046 for (i = 0; i < n_sets; i++)
3047 {
3048 rtx dest = sets[i].dest;
3049 if (REG_P (dest)
3050 || (MEM_P (dest) && cselib_record_memory))
3051 cselib_record_set (dest, src_elt: sets[i].src_elt, dest_addr_elt: sets[i].dest_addr_elt);
3052 }
3053
3054 /* And deal with STRICT_LOW_PART. */
3055 for (i = 0; i < n_strict_low_parts; i++)
3056 {
3057 if (! PRESERVED_VALUE_P (sets[n_sets + i].src_elt->val_rtx))
3058 continue;
3059 machine_mode dest_mode = GET_MODE (sets[n_sets + i].dest);
3060 cselib_val *v
3061 = cselib_lookup (x: sets[n_sets + i].dest, mode: dest_mode, create: 1, VOIDmode);
3062 cselib_preserve_value (v);
3063 rtx r = gen_rtx_ZERO_EXTEND (dest_mode,
3064 sets[n_sets + i].src_elt->val_rtx);
3065 cselib_add_permanent_equiv (elt: v, x: r, insn);
3066 }
3067}
3068
3069/* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
3070
3071bool
3072fp_setter_insn (rtx_insn *insn)
3073{
3074 rtx expr, pat = NULL_RTX;
3075
3076 if (!RTX_FRAME_RELATED_P (insn))
3077 return false;
3078
3079 expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
3080 if (expr)
3081 pat = XEXP (expr, 0);
3082 if (!modified_in_p (hard_frame_pointer_rtx, pat ? pat : insn))
3083 return false;
3084
3085 /* Don't return true for frame pointer restores in the epilogue. */
3086 if (find_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx))
3087 return false;
3088 return true;
3089}
3090
3091/* V is one of the values in REG_VALUES (REGNO). Return true if it
3092 would be invalidated by CALLEE_ABI. */
3093
3094static bool
3095cselib_invalidated_by_call_p (const function_abi &callee_abi,
3096 unsigned int regno, cselib_val *v)
3097{
3098 machine_mode mode = GET_MODE (v->val_rtx);
3099 if (mode == VOIDmode)
3100 {
3101 v = REG_VALUES (regno)->elt;
3102 if (!v)
3103 /* If we don't know what the mode of the constant value is, and we
3104 don't know what mode the register was set in, conservatively
3105 assume that the register is clobbered. The value's going to be
3106 essentially useless in this case anyway. */
3107 return true;
3108 mode = GET_MODE (v->val_rtx);
3109 }
3110 return callee_abi.clobbers_reg_p (mode, regno);
3111}
3112
3113/* Record the effects of INSN. */
3114
3115void
3116cselib_process_insn (rtx_insn *insn)
3117{
3118 int i;
3119 rtx x;
3120
3121 cselib_current_insn = insn;
3122
3123 /* Forget everything at a CODE_LABEL or a setjmp. */
3124 if ((LABEL_P (insn)
3125 || (CALL_P (insn)
3126 && find_reg_note (insn, REG_SETJMP, NULL)))
3127 && !cselib_preserve_constants)
3128 {
3129 cselib_reset_table (num: next_uid);
3130 cselib_current_insn = NULL;
3131 return;
3132 }
3133
3134 if (! INSN_P (insn))
3135 {
3136 cselib_current_insn = NULL;
3137 return;
3138 }
3139
3140 /* If this is a call instruction, forget anything stored in a
3141 call clobbered register, or, if this is not a const call, in
3142 memory. */
3143 if (CALL_P (insn))
3144 {
3145 function_abi callee_abi = insn_callee_abi (insn);
3146 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3147 {
3148 elt_list **l = &REG_VALUES (i);
3149 while (*l)
3150 {
3151 cselib_val *v = (*l)->elt;
3152 if (v && cselib_invalidated_by_call_p (callee_abi, regno: i, v))
3153 cselib_invalidate_regno_val (regno: i, l);
3154 else
3155 l = &(*l)->next;
3156 }
3157 }
3158
3159 /* Since it is not clear how cselib is going to be used, be
3160 conservative here and treat looping pure or const functions
3161 as if they were regular functions. */
3162 if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
3163 || !(RTL_CONST_OR_PURE_CALL_P (insn)))
3164 cselib_invalidate_mem (mem_rtx: callmem);
3165 else
3166 /* For const/pure calls, invalidate any argument slots because
3167 they are owned by the callee. */
3168 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
3169 if (GET_CODE (XEXP (x, 0)) == USE
3170 && MEM_P (XEXP (XEXP (x, 0), 0)))
3171 cselib_invalidate_mem (XEXP (XEXP (x, 0), 0));
3172 }
3173
3174 cselib_record_sets (insn);
3175
3176 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
3177 after we have processed the insn. */
3178 if (CALL_P (insn))
3179 {
3180 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
3181 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
3182 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
3183
3184 /* Flush everything on setjmp. */
3185 if (cselib_preserve_constants
3186 && find_reg_note (insn, REG_SETJMP, NULL))
3187 {
3188 cselib_preserve_only_values ();
3189 cselib_reset_table (num: next_uid);
3190 }
3191 }
3192
3193 /* On setter of the hard frame pointer if frame_pointer_needed,
3194 invalidate stack_pointer_rtx, so that sp and {,h}fp based
3195 VALUEs are distinct. */
3196 if (reload_completed
3197 && frame_pointer_needed
3198 && fp_setter_insn (insn))
3199 cselib_invalidate_rtx (stack_pointer_rtx);
3200
3201 cselib_current_insn = NULL;
3202
3203 if (n_useless_values > MAX_USELESS_VALUES
3204 /* remove_useless_values is linear in the hash table size. Avoid
3205 quadratic behavior for very large hashtables with very few
3206 useless elements. */
3207 && ((unsigned int)n_useless_values
3208 > (cselib_hash_table->elements () - n_debug_values) / 4))
3209 remove_useless_values ();
3210}
3211
3212/* Initialize cselib for one pass. The caller must also call
3213 init_alias_analysis. */
3214
3215void
3216cselib_init (int record_what)
3217{
3218 cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
3219 cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
3220 cselib_any_perm_equivs = false;
3221
3222 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
3223 see canon_true_dependence. This is only created once. */
3224 if (! callmem)
3225 callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
3226
3227 cselib_nregs = max_reg_num ();
3228
3229 /* We preserve reg_values to allow expensive clearing of the whole thing.
3230 Reallocate it however if it happens to be too large. */
3231 if (!reg_values || reg_values_size < cselib_nregs
3232 || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
3233 {
3234 free (ptr: reg_values);
3235 /* Some space for newly emit instructions so we don't end up
3236 reallocating in between passes. */
3237 reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
3238 reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
3239 }
3240 used_regs = XNEWVEC (unsigned int, cselib_nregs);
3241 n_used_regs = 0;
3242 /* FIXME: enable sanitization (PR87845) */
3243 cselib_hash_table
3244 = new hash_table<cselib_hasher> (31, /* ggc */ false,
3245 /* sanitize_eq_and_hash */ false);
3246 if (cselib_preserve_constants)
3247 cselib_preserved_hash_table
3248 = new hash_table<cselib_hasher> (31, /* ggc */ false,
3249 /* sanitize_eq_and_hash */ false);
3250 next_uid = 1;
3251}
3252
3253/* Called when the current user is done with cselib. */
3254
3255void
3256cselib_finish (void)
3257{
3258 bool preserved = cselib_preserve_constants;
3259 cselib_discard_hook = NULL;
3260 cselib_preserve_constants = false;
3261 cselib_any_perm_equivs = false;
3262 cfa_base_preserved_val = NULL;
3263 cfa_base_preserved_regno = INVALID_REGNUM;
3264 elt_list_pool.release ();
3265 elt_loc_list_pool.release ();
3266 cselib_val_pool.release ();
3267 value_pool.release ();
3268 cselib_clear_table ();
3269 delete cselib_hash_table;
3270 cselib_hash_table = NULL;
3271 if (preserved)
3272 delete cselib_preserved_hash_table;
3273 cselib_preserved_hash_table = NULL;
3274 free (ptr: used_regs);
3275 used_regs = 0;
3276 n_useless_values = 0;
3277 n_useless_debug_values = 0;
3278 n_debug_values = 0;
3279 next_uid = 0;
3280}
3281
3282/* Dump the cselib_val *X to FILE *OUT. */
3283
3284int
3285dump_cselib_val (cselib_val **x, FILE *out)
3286{
3287 cselib_val *v = *x;
3288 bool need_lf = true;
3289
3290 print_inline_rtx (out, v->val_rtx, 0);
3291
3292 if (v->locs)
3293 {
3294 struct elt_loc_list *l = v->locs;
3295 if (need_lf)
3296 {
3297 fputc (c: '\n', stream: out);
3298 need_lf = false;
3299 }
3300 fputs (s: " locs:", stream: out);
3301 do
3302 {
3303 if (l->setting_insn)
3304 fprintf (stream: out, format: "\n from insn %i ",
3305 INSN_UID (insn: l->setting_insn));
3306 else
3307 fprintf (stream: out, format: "\n ");
3308 print_inline_rtx (out, l->loc, 4);
3309 }
3310 while ((l = l->next));
3311 fputc (c: '\n', stream: out);
3312 }
3313 else
3314 {
3315 fputs (s: " no locs", stream: out);
3316 need_lf = true;
3317 }
3318
3319 if (v->addr_list)
3320 {
3321 struct elt_list *e = v->addr_list;
3322 if (need_lf)
3323 {
3324 fputc (c: '\n', stream: out);
3325 need_lf = false;
3326 }
3327 fputs (s: " addr list:", stream: out);
3328 do
3329 {
3330 fputs (s: "\n ", stream: out);
3331 print_inline_rtx (out, e->elt->val_rtx, 2);
3332 }
3333 while ((e = e->next));
3334 fputc (c: '\n', stream: out);
3335 }
3336 else
3337 {
3338 fputs (s: " no addrs", stream: out);
3339 need_lf = true;
3340 }
3341
3342 if (v->next_containing_mem == &dummy_val)
3343 fputs (s: " last mem\n", stream: out);
3344 else if (v->next_containing_mem)
3345 {
3346 fputs (s: " next mem ", stream: out);
3347 print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
3348 fputc (c: '\n', stream: out);
3349 }
3350 else if (need_lf)
3351 fputc (c: '\n', stream: out);
3352
3353 return 1;
3354}
3355
3356/* Dump to OUT everything in the CSELIB table. */
3357
3358void
3359dump_cselib_table (FILE *out)
3360{
3361 fprintf (stream: out, format: "cselib hash table:\n");
3362 cselib_hash_table->traverse <FILE *, dump_cselib_val> (argument: out);
3363 fprintf (stream: out, format: "cselib preserved hash table:\n");
3364 cselib_preserved_hash_table->traverse <FILE *, dump_cselib_val> (argument: out);
3365 if (first_containing_mem != &dummy_val)
3366 {
3367 fputs (s: "first mem ", stream: out);
3368 print_inline_rtx (out, first_containing_mem->val_rtx, 2);
3369 fputc (c: '\n', stream: out);
3370 }
3371 fprintf (stream: out, format: "next uid %i\n", next_uid);
3372}
3373
3374#include "gt-cselib.h"
3375

source code of gcc/cselib.cc