1 | /* Variable tracking routines for the GNU compiler. |
2 | Copyright (C) 2002-2023 Free Software Foundation, Inc. |
3 | |
4 | This file is part of GCC. |
5 | |
6 | GCC is free software; you can redistribute it and/or modify it |
7 | under the terms of the GNU General Public License as published by |
8 | the Free Software Foundation; either version 3, or (at your option) |
9 | any later version. |
10 | |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT |
12 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY |
13 | or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public |
14 | License for more details. |
15 | |
16 | You should have received a copy of the GNU General Public License |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ |
19 | |
20 | /* This file contains the variable tracking pass. It computes where |
21 | variables are located (which registers or where in memory) at each position |
22 | in instruction stream and emits notes describing the locations. |
23 | Debug information (DWARF2 location lists) is finally generated from |
24 | these notes. |
25 | With this debug information, it is possible to show variables |
26 | even when debugging optimized code. |
27 | |
28 | How does the variable tracking pass work? |
29 | |
30 | First, it scans RTL code for uses, stores and clobbers (register/memory |
31 | references in instructions), for call insns and for stack adjustments |
32 | separately for each basic block and saves them to an array of micro |
33 | operations. |
34 | The micro operations of one instruction are ordered so that |
35 | pre-modifying stack adjustment < use < use with no var < call insn < |
36 | < clobber < set < post-modifying stack adjustment |
37 | |
38 | Then, a forward dataflow analysis is performed to find out how locations |
39 | of variables change through code and to propagate the variable locations |
40 | along control flow graph. |
41 | The IN set for basic block BB is computed as a union of OUT sets of BB's |
42 | predecessors, the OUT set for BB is copied from the IN set for BB and |
43 | is changed according to micro operations in BB. |
44 | |
45 | The IN and OUT sets for basic blocks consist of a current stack adjustment |
46 | (used for adjusting offset of variables addressed using stack pointer), |
47 | the table of structures describing the locations of parts of a variable |
48 | and for each physical register a linked list for each physical register. |
49 | The linked list is a list of variable parts stored in the register, |
50 | i.e. it is a list of triplets (reg, decl, offset) where decl is |
51 | REG_EXPR (reg) and offset is REG_OFFSET (reg). The linked list is used for |
52 | effective deleting appropriate variable parts when we set or clobber the |
53 | register. |
54 | |
55 | There may be more than one variable part in a register. The linked lists |
56 | should be pretty short so it is a good data structure here. |
57 | For example in the following code, register allocator may assign same |
58 | register to variables A and B, and both of them are stored in the same |
59 | register in CODE: |
60 | |
61 | if (cond) |
62 | set A; |
63 | else |
64 | set B; |
65 | CODE; |
66 | if (cond) |
67 | use A; |
68 | else |
69 | use B; |
70 | |
71 | Finally, the NOTE_INSN_VAR_LOCATION notes describing the variable locations |
72 | are emitted to appropriate positions in RTL code. Each such a note describes |
73 | the location of one variable at the point in instruction stream where the |
74 | note is. There is no need to emit a note for each variable before each |
75 | instruction, we only emit these notes where the location of variable changes |
76 | (this means that we also emit notes for changes between the OUT set of the |
77 | previous block and the IN set of the current block). |
78 | |
79 | The notes consist of two parts: |
80 | 1. the declaration (from REG_EXPR or MEM_EXPR) |
81 | 2. the location of a variable - it is either a simple register/memory |
82 | reference (for simple variables, for example int), |
83 | or a parallel of register/memory references (for a large variables |
84 | which consist of several parts, for example long long). |
85 | |
86 | */ |
87 | |
88 | #include "config.h" |
89 | #include "system.h" |
90 | #include "coretypes.h" |
91 | #include "backend.h" |
92 | #include "target.h" |
93 | #include "rtl.h" |
94 | #include "tree.h" |
95 | #include "cfghooks.h" |
96 | #include "alloc-pool.h" |
97 | #include "tree-pass.h" |
98 | #include "memmodel.h" |
99 | #include "tm_p.h" |
100 | #include "insn-config.h" |
101 | #include "regs.h" |
102 | #include "emit-rtl.h" |
103 | #include "recog.h" |
104 | #include "diagnostic.h" |
105 | #include "varasm.h" |
106 | #include "stor-layout.h" |
107 | #include "cfgrtl.h" |
108 | #include "cfganal.h" |
109 | #include "reload.h" |
110 | #include "ira.h" |
111 | #include "lra.h" |
112 | #include "calls.h" |
113 | #include "tree-dfa.h" |
114 | #include "tree-ssa.h" |
115 | #include "cselib.h" |
116 | #include "tree-pretty-print.h" |
117 | #include "rtl-iter.h" |
118 | #include "fibonacci_heap.h" |
119 | #include "print-rtl.h" |
120 | #include "function-abi.h" |
121 | #include "mux-utils.h" |
122 | |
123 | typedef fibonacci_heap <long, basic_block_def> bb_heap_t; |
124 | |
125 | /* var-tracking.cc assumes that tree code with the same value as VALUE rtx code |
126 | has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl. |
127 | Currently the value is the same as IDENTIFIER_NODE, which has such |
128 | a property. If this compile time assertion ever fails, make sure that |
129 | the new tree code that equals (int) VALUE has the same property. */ |
130 | extern char check_value_val[(int) VALUE == (int) IDENTIFIER_NODE ? 1 : -1]; |
131 | |
132 | /* Type of micro operation. */ |
133 | enum micro_operation_type |
134 | { |
135 | MO_USE, /* Use location (REG or MEM). */ |
136 | MO_USE_NO_VAR,/* Use location which is not associated with a variable |
137 | or the variable is not trackable. */ |
138 | MO_VAL_USE, /* Use location which is associated with a value. */ |
139 | MO_VAL_LOC, /* Use location which appears in a debug insn. */ |
140 | MO_VAL_SET, /* Set location associated with a value. */ |
141 | MO_SET, /* Set location. */ |
142 | MO_COPY, /* Copy the same portion of a variable from one |
143 | location to another. */ |
144 | MO_CLOBBER, /* Clobber location. */ |
145 | MO_CALL, /* Call insn. */ |
146 | MO_ADJUST /* Adjust stack pointer. */ |
147 | |
148 | }; |
149 | |
150 | static const char * const ATTRIBUTE_UNUSED |
151 | micro_operation_type_name[] = { |
152 | "MO_USE" , |
153 | "MO_USE_NO_VAR" , |
154 | "MO_VAL_USE" , |
155 | "MO_VAL_LOC" , |
156 | "MO_VAL_SET" , |
157 | "MO_SET" , |
158 | "MO_COPY" , |
159 | "MO_CLOBBER" , |
160 | "MO_CALL" , |
161 | "MO_ADJUST" |
162 | }; |
163 | |
164 | /* Where shall the note be emitted? BEFORE or AFTER the instruction. |
165 | Notes emitted as AFTER_CALL are to take effect during the call, |
166 | rather than after the call. */ |
167 | enum emit_note_where |
168 | { |
169 | EMIT_NOTE_BEFORE_INSN, |
170 | EMIT_NOTE_AFTER_INSN, |
171 | EMIT_NOTE_AFTER_CALL_INSN |
172 | }; |
173 | |
174 | /* Structure holding information about micro operation. */ |
175 | struct micro_operation |
176 | { |
177 | /* Type of micro operation. */ |
178 | enum micro_operation_type type; |
179 | |
180 | /* The instruction which the micro operation is in, for MO_USE, |
181 | MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent |
182 | instruction or note in the original flow (before any var-tracking |
183 | notes are inserted, to simplify emission of notes), for MO_SET |
184 | and MO_CLOBBER. */ |
185 | rtx_insn *insn; |
186 | |
187 | union { |
188 | /* Location. For MO_SET and MO_COPY, this is the SET that |
189 | performs the assignment, if known, otherwise it is the target |
190 | of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a |
191 | CONCAT of the VALUE and the LOC associated with it. For |
192 | MO_VAL_LOC, it is a CONCAT of the VALUE and the VAR_LOCATION |
193 | associated with it. */ |
194 | rtx loc; |
195 | |
196 | /* Stack adjustment. */ |
197 | HOST_WIDE_INT adjust; |
198 | } u; |
199 | }; |
200 | |
201 | |
202 | /* A declaration of a variable, or an RTL value being handled like a |
203 | declaration by pointer_mux. */ |
204 | typedef pointer_mux<tree_node, rtx_def> decl_or_value; |
205 | |
206 | /* Return true if a decl_or_value DV is a DECL or NULL. */ |
207 | static inline bool |
208 | dv_is_decl_p (decl_or_value dv) |
209 | { |
210 | return dv.is_first (); |
211 | } |
212 | |
213 | /* Return true if a decl_or_value is a VALUE rtl. */ |
214 | static inline bool |
215 | dv_is_value_p (decl_or_value dv) |
216 | { |
217 | return dv && !dv_is_decl_p (dv); |
218 | } |
219 | |
220 | /* Return the decl in the decl_or_value. */ |
221 | static inline tree |
222 | dv_as_decl (decl_or_value dv) |
223 | { |
224 | gcc_checking_assert (dv_is_decl_p (dv)); |
225 | return dv.known_first (); |
226 | } |
227 | |
228 | /* Return the value in the decl_or_value. */ |
229 | static inline rtx |
230 | dv_as_value (decl_or_value dv) |
231 | { |
232 | gcc_checking_assert (dv_is_value_p (dv)); |
233 | return dv.known_second (); |
234 | } |
235 | |
236 | |
237 | /* Description of location of a part of a variable. The content of a physical |
238 | register is described by a chain of these structures. |
239 | The chains are pretty short (usually 1 or 2 elements) and thus |
240 | chain is the best data structure. */ |
241 | struct attrs |
242 | { |
243 | /* Pointer to next member of the list. */ |
244 | attrs *next; |
245 | |
246 | /* The rtx of register. */ |
247 | rtx loc; |
248 | |
249 | /* The declaration corresponding to LOC. */ |
250 | decl_or_value dv; |
251 | |
252 | /* Offset from start of DECL. */ |
253 | HOST_WIDE_INT offset; |
254 | }; |
255 | |
256 | /* Structure for chaining the locations. */ |
257 | struct location_chain |
258 | { |
259 | /* Next element in the chain. */ |
260 | location_chain *next; |
261 | |
262 | /* The location (REG, MEM or VALUE). */ |
263 | rtx loc; |
264 | |
265 | /* The "value" stored in this location. */ |
266 | rtx set_src; |
267 | |
268 | /* Initialized? */ |
269 | enum var_init_status init; |
270 | }; |
271 | |
272 | /* A vector of loc_exp_dep holds the active dependencies of a one-part |
273 | DV on VALUEs, i.e., the VALUEs expanded so as to form the current |
274 | location of DV. Each entry is also part of VALUE' s linked-list of |
275 | backlinks back to DV. */ |
276 | struct loc_exp_dep |
277 | { |
278 | /* The dependent DV. */ |
279 | decl_or_value dv; |
280 | /* The dependency VALUE or DECL_DEBUG. */ |
281 | rtx value; |
282 | /* The next entry in VALUE's backlinks list. */ |
283 | struct loc_exp_dep *next; |
284 | /* A pointer to the pointer to this entry (head or prev's next) in |
285 | the doubly-linked list. */ |
286 | struct loc_exp_dep **pprev; |
287 | }; |
288 | |
289 | |
290 | /* This data structure holds information about the depth of a variable |
291 | expansion. */ |
292 | struct expand_depth |
293 | { |
294 | /* This measures the complexity of the expanded expression. It |
295 | grows by one for each level of expansion that adds more than one |
296 | operand. */ |
297 | int complexity; |
298 | /* This counts the number of ENTRY_VALUE expressions in an |
299 | expansion. We want to minimize their use. */ |
300 | int entryvals; |
301 | }; |
302 | |
303 | /* Type for dependencies actively used when expand FROM into cur_loc. */ |
304 | typedef vec<loc_exp_dep, va_heap, vl_embed> deps_vec; |
305 | |
306 | /* This data structure is allocated for one-part variables at the time |
307 | of emitting notes. */ |
308 | struct onepart_aux |
309 | { |
310 | /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc |
311 | computation used the expansion of this variable, and that ought |
312 | to be notified should this variable change. If the DV's cur_loc |
313 | expanded to NULL, all components of the loc list are regarded as |
314 | active, so that any changes in them give us a chance to get a |
315 | location. Otherwise, only components of the loc that expanded to |
316 | non-NULL are regarded as active dependencies. */ |
317 | loc_exp_dep *backlinks; |
318 | /* This holds the LOC that was expanded into cur_loc. We need only |
319 | mark a one-part variable as changed if the FROM loc is removed, |
320 | or if it has no known location and a loc is added, or if it gets |
321 | a change notification from any of its active dependencies. */ |
322 | rtx from; |
323 | /* The depth of the cur_loc expression. */ |
324 | expand_depth depth; |
325 | /* Dependencies actively used when expand FROM into cur_loc. */ |
326 | deps_vec deps; |
327 | }; |
328 | |
329 | /* Structure describing one part of variable. */ |
330 | struct variable_part |
331 | { |
332 | /* Chain of locations of the part. */ |
333 | location_chain *loc_chain; |
334 | |
335 | /* Location which was last emitted to location list. */ |
336 | rtx cur_loc; |
337 | |
338 | union variable_aux |
339 | { |
340 | /* The offset in the variable, if !var->onepart. */ |
341 | HOST_WIDE_INT offset; |
342 | |
343 | /* Pointer to auxiliary data, if var->onepart and emit_notes. */ |
344 | struct onepart_aux *onepaux; |
345 | } aux; |
346 | }; |
347 | |
348 | /* Maximum number of location parts. */ |
349 | #define MAX_VAR_PARTS 16 |
350 | |
351 | /* Enumeration type used to discriminate various types of one-part |
352 | variables. */ |
353 | enum onepart_enum |
354 | { |
355 | /* Not a one-part variable. */ |
356 | NOT_ONEPART = 0, |
357 | /* A one-part DECL that is not a DEBUG_EXPR_DECL. */ |
358 | ONEPART_VDECL = 1, |
359 | /* A DEBUG_EXPR_DECL. */ |
360 | ONEPART_DEXPR = 2, |
361 | /* A VALUE. */ |
362 | ONEPART_VALUE = 3 |
363 | }; |
364 | |
365 | /* Structure describing where the variable is located. */ |
366 | struct variable |
367 | { |
368 | /* The declaration of the variable, or an RTL value being handled |
369 | like a declaration. */ |
370 | decl_or_value dv; |
371 | |
372 | /* Reference count. */ |
373 | int refcount; |
374 | |
375 | /* Number of variable parts. */ |
376 | char n_var_parts; |
377 | |
378 | /* What type of DV this is, according to enum onepart_enum. */ |
379 | ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT; |
380 | |
381 | /* True if this variable_def struct is currently in the |
382 | changed_variables hash table. */ |
383 | bool in_changed_variables; |
384 | |
385 | /* The variable parts. */ |
386 | variable_part var_part[1]; |
387 | }; |
388 | |
389 | /* Pointer to the BB's information specific to variable tracking pass. */ |
390 | #define VTI(BB) ((variable_tracking_info *) (BB)->aux) |
391 | |
392 | /* Return MEM_OFFSET (MEM) as a HOST_WIDE_INT, or 0 if we can't. */ |
393 | |
394 | static inline HOST_WIDE_INT |
395 | int_mem_offset (const_rtx mem) |
396 | { |
397 | HOST_WIDE_INT offset; |
398 | if (MEM_OFFSET_KNOWN_P (mem) && MEM_OFFSET (mem).is_constant (const_value: &offset)) |
399 | return offset; |
400 | return 0; |
401 | } |
402 | |
403 | #if CHECKING_P && (GCC_VERSION >= 2007) |
404 | |
405 | /* Access VAR's Ith part's offset, checking that it's not a one-part |
406 | variable. */ |
407 | #define VAR_PART_OFFSET(var, i) __extension__ \ |
408 | (*({ variable *const __v = (var); \ |
409 | gcc_checking_assert (!__v->onepart); \ |
410 | &__v->var_part[(i)].aux.offset; })) |
411 | |
412 | /* Access VAR's one-part auxiliary data, checking that it is a |
413 | one-part variable. */ |
414 | #define VAR_LOC_1PAUX(var) __extension__ \ |
415 | (*({ variable *const __v = (var); \ |
416 | gcc_checking_assert (__v->onepart); \ |
417 | &__v->var_part[0].aux.onepaux; })) |
418 | |
419 | #else |
420 | #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset) |
421 | #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux) |
422 | #endif |
423 | |
424 | /* These are accessor macros for the one-part auxiliary data. When |
425 | convenient for users, they're guarded by tests that the data was |
426 | allocated. */ |
427 | #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \ |
428 | ? VAR_LOC_1PAUX (var)->backlinks \ |
429 | : NULL) |
430 | #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \ |
431 | ? &VAR_LOC_1PAUX (var)->backlinks \ |
432 | : NULL) |
433 | #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from) |
434 | #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth) |
435 | #define VAR_LOC_DEP_VEC(var) var_loc_dep_vec (var) |
436 | |
437 | /* Implements the VAR_LOC_DEP_VEC above as a function to work around |
438 | a bogus -Wnonnull (PR c/95554). */ |
439 | |
440 | static inline deps_vec* |
441 | var_loc_dep_vec (variable *var) |
442 | { |
443 | return VAR_LOC_1PAUX (var) ? &VAR_LOC_1PAUX (var)->deps : NULL; |
444 | } |
445 | |
446 | |
447 | typedef unsigned int dvuid; |
448 | |
449 | /* Return the uid of DV. */ |
450 | |
451 | static inline dvuid |
452 | dv_uid (decl_or_value dv) |
453 | { |
454 | if (dv_is_value_p (dv)) |
455 | return CSELIB_VAL_PTR (dv_as_value (dv))->uid; |
456 | else |
457 | return DECL_UID (dv_as_decl (dv)); |
458 | } |
459 | |
460 | /* Compute the hash from the uid. */ |
461 | |
462 | static inline hashval_t |
463 | dv_uid2hash (dvuid uid) |
464 | { |
465 | return uid; |
466 | } |
467 | |
468 | /* The hash function for a mask table in a shared_htab chain. */ |
469 | |
470 | static inline hashval_t |
471 | dv_htab_hash (decl_or_value dv) |
472 | { |
473 | return dv_uid2hash (uid: dv_uid (dv)); |
474 | } |
475 | |
476 | static void variable_htab_free (void *); |
477 | |
478 | /* Variable hashtable helpers. */ |
479 | |
480 | struct variable_hasher : pointer_hash <variable> |
481 | { |
482 | typedef decl_or_value compare_type; |
483 | static inline hashval_t hash (const variable *); |
484 | static inline bool equal (const variable *, const decl_or_value); |
485 | static inline void remove (variable *); |
486 | }; |
487 | |
488 | /* The hash function for variable_htab, computes the hash value |
489 | from the declaration of variable X. */ |
490 | |
491 | inline hashval_t |
492 | variable_hasher::hash (const variable *v) |
493 | { |
494 | return dv_htab_hash (dv: v->dv); |
495 | } |
496 | |
497 | /* Compare the declaration of variable X with declaration Y. */ |
498 | |
499 | inline bool |
500 | variable_hasher::equal (const variable *v, const decl_or_value y) |
501 | { |
502 | return v->dv == y; |
503 | } |
504 | |
505 | /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */ |
506 | |
507 | inline void |
508 | variable_hasher::remove (variable *var) |
509 | { |
510 | variable_htab_free (var); |
511 | } |
512 | |
513 | typedef hash_table<variable_hasher> variable_table_type; |
514 | typedef variable_table_type::iterator variable_iterator_type; |
515 | |
516 | /* Structure for passing some other parameters to function |
517 | emit_note_insn_var_location. */ |
518 | struct emit_note_data |
519 | { |
520 | /* The instruction which the note will be emitted before/after. */ |
521 | rtx_insn *insn; |
522 | |
523 | /* Where the note will be emitted (before/after insn)? */ |
524 | enum emit_note_where where; |
525 | |
526 | /* The variables and values active at this point. */ |
527 | variable_table_type *vars; |
528 | }; |
529 | |
530 | /* Structure holding a refcounted hash table. If refcount > 1, |
531 | it must be first unshared before modified. */ |
532 | struct shared_hash |
533 | { |
534 | /* Reference count. */ |
535 | int refcount; |
536 | |
537 | /* Actual hash table. */ |
538 | variable_table_type *htab; |
539 | }; |
540 | |
541 | /* Structure holding the IN or OUT set for a basic block. */ |
542 | struct dataflow_set |
543 | { |
544 | /* Adjustment of stack offset. */ |
545 | HOST_WIDE_INT stack_adjust; |
546 | |
547 | /* Attributes for registers (lists of attrs). */ |
548 | attrs *regs[FIRST_PSEUDO_REGISTER]; |
549 | |
550 | /* Variable locations. */ |
551 | shared_hash *vars; |
552 | |
553 | /* Vars that is being traversed. */ |
554 | shared_hash *traversed_vars; |
555 | }; |
556 | |
557 | /* The structure (one for each basic block) containing the information |
558 | needed for variable tracking. */ |
559 | struct variable_tracking_info |
560 | { |
561 | /* The vector of micro operations. */ |
562 | vec<micro_operation> mos; |
563 | |
564 | /* The IN and OUT set for dataflow analysis. */ |
565 | dataflow_set in; |
566 | dataflow_set out; |
567 | |
568 | /* The permanent-in dataflow set for this block. This is used to |
569 | hold values for which we had to compute entry values. ??? This |
570 | should probably be dynamically allocated, to avoid using more |
571 | memory in non-debug builds. */ |
572 | dataflow_set *permp; |
573 | |
574 | /* Has the block been visited in DFS? */ |
575 | bool visited; |
576 | |
577 | /* Has the block been flooded in VTA? */ |
578 | bool flooded; |
579 | |
580 | }; |
581 | |
582 | /* Alloc pool for struct attrs_def. */ |
583 | object_allocator<attrs> attrs_pool ("attrs pool" ); |
584 | |
585 | /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */ |
586 | |
587 | static pool_allocator var_pool |
588 | ("variable_def pool" , sizeof (variable) + |
589 | (MAX_VAR_PARTS - 1) * sizeof (((variable *)NULL)->var_part[0])); |
590 | |
591 | /* Alloc pool for struct variable_def with a single var_part entry. */ |
592 | static pool_allocator valvar_pool |
593 | ("small variable_def pool" , sizeof (variable)); |
594 | |
595 | /* Alloc pool for struct location_chain. */ |
596 | static object_allocator<location_chain> location_chain_pool |
597 | ("location_chain pool" ); |
598 | |
599 | /* Alloc pool for struct shared_hash. */ |
600 | static object_allocator<shared_hash> shared_hash_pool ("shared_hash pool" ); |
601 | |
602 | /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */ |
603 | object_allocator<loc_exp_dep> loc_exp_dep_pool ("loc_exp_dep pool" ); |
604 | |
605 | /* Changed variables, notes will be emitted for them. */ |
606 | static variable_table_type *changed_variables; |
607 | |
608 | /* Shall notes be emitted? */ |
609 | static bool emit_notes; |
610 | |
611 | /* Values whose dynamic location lists have gone empty, but whose |
612 | cselib location lists are still usable. Use this to hold the |
613 | current location, the backlinks, etc, during emit_notes. */ |
614 | static variable_table_type *dropped_values; |
615 | |
616 | /* Empty shared hashtable. */ |
617 | static shared_hash *empty_shared_hash; |
618 | |
619 | /* Scratch register bitmap used by cselib_expand_value_rtx. */ |
620 | static bitmap scratch_regs = NULL; |
621 | |
622 | #ifdef HAVE_window_save |
623 | struct GTY(()) parm_reg { |
624 | rtx outgoing; |
625 | rtx incoming; |
626 | }; |
627 | |
628 | |
629 | /* Vector of windowed parameter registers, if any. */ |
630 | static vec<parm_reg, va_gc> *windowed_parm_regs = NULL; |
631 | #endif |
632 | |
633 | /* Variable used to tell whether cselib_process_insn called our hook. */ |
634 | static bool cselib_hook_called; |
635 | |
636 | /* Local function prototypes. */ |
637 | static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *, |
638 | HOST_WIDE_INT *); |
639 | static void insn_stack_adjust_offset_pre_post (rtx_insn *, HOST_WIDE_INT *, |
640 | HOST_WIDE_INT *); |
641 | static bool vt_stack_adjustments (void); |
642 | |
643 | static void init_attrs_list_set (attrs **); |
644 | static void attrs_list_clear (attrs **); |
645 | static attrs *attrs_list_member (attrs *, decl_or_value, HOST_WIDE_INT); |
646 | static void attrs_list_insert (attrs **, decl_or_value, HOST_WIDE_INT, rtx); |
647 | static void attrs_list_copy (attrs **, attrs *); |
648 | static void attrs_list_union (attrs **, attrs *); |
649 | |
650 | static variable **unshare_variable (dataflow_set *set, variable **slot, |
651 | variable *var, enum var_init_status); |
652 | static void vars_copy (variable_table_type *, variable_table_type *); |
653 | static tree var_debug_decl (tree); |
654 | static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx); |
655 | static void var_reg_delete_and_set (dataflow_set *, rtx, bool, |
656 | enum var_init_status, rtx); |
657 | static void var_reg_delete (dataflow_set *, rtx, bool); |
658 | static void var_regno_delete (dataflow_set *, int); |
659 | static void var_mem_set (dataflow_set *, rtx, enum var_init_status, rtx); |
660 | static void var_mem_delete_and_set (dataflow_set *, rtx, bool, |
661 | enum var_init_status, rtx); |
662 | static void var_mem_delete (dataflow_set *, rtx, bool); |
663 | |
664 | static void dataflow_set_init (dataflow_set *); |
665 | static void dataflow_set_clear (dataflow_set *); |
666 | static void dataflow_set_copy (dataflow_set *, dataflow_set *); |
667 | static int variable_union_info_cmp_pos (const void *, const void *); |
668 | static void dataflow_set_union (dataflow_set *, dataflow_set *); |
669 | static location_chain *find_loc_in_1pdv (rtx, variable *, |
670 | variable_table_type *); |
671 | static bool canon_value_cmp (rtx, rtx); |
672 | static int loc_cmp (rtx, rtx); |
673 | static bool variable_part_different_p (variable_part *, variable_part *); |
674 | static bool onepart_variable_different_p (variable *, variable *); |
675 | static bool variable_different_p (variable *, variable *); |
676 | static bool dataflow_set_different (dataflow_set *, dataflow_set *); |
677 | static void dataflow_set_destroy (dataflow_set *); |
678 | |
679 | static bool track_expr_p (tree, bool); |
680 | static void add_uses_1 (rtx *, void *); |
681 | static void add_stores (rtx, const_rtx, void *); |
682 | static bool compute_bb_dataflow (basic_block); |
683 | static bool vt_find_locations (void); |
684 | |
685 | static void dump_attrs_list (attrs *); |
686 | static void dump_var (variable *); |
687 | static void dump_vars (variable_table_type *); |
688 | static void dump_dataflow_set (dataflow_set *); |
689 | static void dump_dataflow_sets (void); |
690 | |
691 | static void set_dv_changed (decl_or_value, bool); |
692 | static void variable_was_changed (variable *, dataflow_set *); |
693 | static variable **set_slot_part (dataflow_set *, rtx, variable **, |
694 | decl_or_value, HOST_WIDE_INT, |
695 | enum var_init_status, rtx); |
696 | static void set_variable_part (dataflow_set *, rtx, |
697 | decl_or_value, HOST_WIDE_INT, |
698 | enum var_init_status, rtx, enum insert_option); |
699 | static variable **clobber_slot_part (dataflow_set *, rtx, |
700 | variable **, HOST_WIDE_INT, rtx); |
701 | static void clobber_variable_part (dataflow_set *, rtx, |
702 | decl_or_value, HOST_WIDE_INT, rtx); |
703 | static variable **delete_slot_part (dataflow_set *, rtx, variable **, |
704 | HOST_WIDE_INT); |
705 | static void delete_variable_part (dataflow_set *, rtx, |
706 | decl_or_value, HOST_WIDE_INT); |
707 | static void emit_notes_in_bb (basic_block, dataflow_set *); |
708 | static void vt_emit_notes (void); |
709 | |
710 | static void vt_add_function_parameters (void); |
711 | static bool vt_initialize (void); |
712 | static void vt_finalize (void); |
713 | |
714 | /* Callback for stack_adjust_offset_pre_post, called via for_each_inc_dec. */ |
715 | |
716 | static int |
717 | stack_adjust_offset_pre_post_cb (rtx, rtx op, rtx dest, rtx src, rtx srcoff, |
718 | void *arg) |
719 | { |
720 | if (dest != stack_pointer_rtx) |
721 | return 0; |
722 | |
723 | switch (GET_CODE (op)) |
724 | { |
725 | case PRE_INC: |
726 | case PRE_DEC: |
727 | ((HOST_WIDE_INT *)arg)[0] -= INTVAL (srcoff); |
728 | return 0; |
729 | case POST_INC: |
730 | case POST_DEC: |
731 | ((HOST_WIDE_INT *)arg)[1] -= INTVAL (srcoff); |
732 | return 0; |
733 | case PRE_MODIFY: |
734 | case POST_MODIFY: |
735 | /* We handle only adjustments by constant amount. */ |
736 | gcc_assert (GET_CODE (src) == PLUS |
737 | && CONST_INT_P (XEXP (src, 1)) |
738 | && XEXP (src, 0) == stack_pointer_rtx); |
739 | ((HOST_WIDE_INT *)arg)[GET_CODE (op) == POST_MODIFY] |
740 | -= INTVAL (XEXP (src, 1)); |
741 | return 0; |
742 | default: |
743 | gcc_unreachable (); |
744 | } |
745 | } |
746 | |
747 | /* Given a SET, calculate the amount of stack adjustment it contains |
748 | PRE- and POST-modifying stack pointer. |
749 | This function is similar to stack_adjust_offset. */ |
750 | |
751 | static void |
752 | stack_adjust_offset_pre_post (rtx pattern, HOST_WIDE_INT *pre, |
753 | HOST_WIDE_INT *post) |
754 | { |
755 | rtx src = SET_SRC (pattern); |
756 | rtx dest = SET_DEST (pattern); |
757 | enum rtx_code code; |
758 | |
759 | if (dest == stack_pointer_rtx) |
760 | { |
761 | /* (set (reg sp) (plus (reg sp) (const_int))) */ |
762 | code = GET_CODE (src); |
763 | if (! (code == PLUS || code == MINUS) |
764 | || XEXP (src, 0) != stack_pointer_rtx |
765 | || !CONST_INT_P (XEXP (src, 1))) |
766 | return; |
767 | |
768 | if (code == MINUS) |
769 | *post += INTVAL (XEXP (src, 1)); |
770 | else |
771 | *post -= INTVAL (XEXP (src, 1)); |
772 | return; |
773 | } |
774 | HOST_WIDE_INT res[2] = { 0, 0 }; |
775 | for_each_inc_dec (pattern, stack_adjust_offset_pre_post_cb, arg: res); |
776 | *pre += res[0]; |
777 | *post += res[1]; |
778 | } |
779 | |
780 | /* Given an INSN, calculate the amount of stack adjustment it contains |
781 | PRE- and POST-modifying stack pointer. */ |
782 | |
783 | static void |
784 | insn_stack_adjust_offset_pre_post (rtx_insn *insn, HOST_WIDE_INT *pre, |
785 | HOST_WIDE_INT *post) |
786 | { |
787 | rtx pattern; |
788 | |
789 | *pre = 0; |
790 | *post = 0; |
791 | |
792 | pattern = PATTERN (insn); |
793 | if (RTX_FRAME_RELATED_P (insn)) |
794 | { |
795 | rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX); |
796 | if (expr) |
797 | pattern = XEXP (expr, 0); |
798 | } |
799 | |
800 | if (GET_CODE (pattern) == SET) |
801 | stack_adjust_offset_pre_post (pattern, pre, post); |
802 | else if (GET_CODE (pattern) == PARALLEL |
803 | || GET_CODE (pattern) == SEQUENCE) |
804 | { |
805 | int i; |
806 | |
807 | /* There may be stack adjustments inside compound insns. Search |
808 | for them. */ |
809 | for ( i = XVECLEN (pattern, 0) - 1; i >= 0; i--) |
810 | if (GET_CODE (XVECEXP (pattern, 0, i)) == SET) |
811 | stack_adjust_offset_pre_post (XVECEXP (pattern, 0, i), pre, post); |
812 | } |
813 | } |
814 | |
815 | /* Compute stack adjustments for all blocks by traversing DFS tree. |
816 | Return true when the adjustments on all incoming edges are consistent. |
817 | Heavily borrowed from pre_and_rev_post_order_compute. */ |
818 | |
819 | static bool |
820 | vt_stack_adjustments (void) |
821 | { |
822 | edge_iterator *stack; |
823 | int sp; |
824 | |
825 | /* Initialize entry block. */ |
826 | VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true; |
827 | VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust |
828 | = INCOMING_FRAME_SP_OFFSET; |
829 | VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust |
830 | = INCOMING_FRAME_SP_OFFSET; |
831 | |
832 | /* Allocate stack for back-tracking up CFG. */ |
833 | stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1); |
834 | sp = 0; |
835 | |
836 | /* Push the first edge on to the stack. */ |
837 | stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs); |
838 | |
839 | while (sp) |
840 | { |
841 | edge_iterator ei; |
842 | basic_block src; |
843 | basic_block dest; |
844 | |
845 | /* Look at the edge on the top of the stack. */ |
846 | ei = stack[sp - 1]; |
847 | src = ei_edge (i: ei)->src; |
848 | dest = ei_edge (i: ei)->dest; |
849 | |
850 | /* Check if the edge destination has been visited yet. */ |
851 | if (!VTI (dest)->visited) |
852 | { |
853 | rtx_insn *insn; |
854 | HOST_WIDE_INT pre, post, offset; |
855 | VTI (dest)->visited = true; |
856 | VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust; |
857 | |
858 | if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)) |
859 | for (insn = BB_HEAD (dest); |
860 | insn != NEXT_INSN (BB_END (dest)); |
861 | insn = NEXT_INSN (insn)) |
862 | if (INSN_P (insn)) |
863 | { |
864 | insn_stack_adjust_offset_pre_post (insn, pre: &pre, post: &post); |
865 | offset += pre + post; |
866 | } |
867 | |
868 | VTI (dest)->out.stack_adjust = offset; |
869 | |
870 | if (EDGE_COUNT (dest->succs) > 0) |
871 | /* Since the DEST node has been visited for the first |
872 | time, check its successors. */ |
873 | stack[sp++] = ei_start (dest->succs); |
874 | } |
875 | else |
876 | { |
877 | /* We can end up with different stack adjustments for the exit block |
878 | of a shrink-wrapped function if stack_adjust_offset_pre_post |
879 | doesn't understand the rtx pattern used to restore the stack |
880 | pointer in the epilogue. For example, on s390(x), the stack |
881 | pointer is often restored via a load-multiple instruction |
882 | and so no stack_adjust offset is recorded for it. This means |
883 | that the stack offset at the end of the epilogue block is the |
884 | same as the offset before the epilogue, whereas other paths |
885 | to the exit block will have the correct stack_adjust. |
886 | |
887 | It is safe to ignore these differences because (a) we never |
888 | use the stack_adjust for the exit block in this pass and |
889 | (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped |
890 | function are correct. |
891 | |
892 | We must check whether the adjustments on other edges are |
893 | the same though. */ |
894 | if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun) |
895 | && VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust) |
896 | { |
897 | free (ptr: stack); |
898 | return false; |
899 | } |
900 | |
901 | if (! ei_one_before_end_p (i: ei)) |
902 | /* Go to the next edge. */ |
903 | ei_next (i: &stack[sp - 1]); |
904 | else |
905 | /* Return to previous level if there are no more edges. */ |
906 | sp--; |
907 | } |
908 | } |
909 | |
910 | free (ptr: stack); |
911 | return true; |
912 | } |
913 | |
914 | /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or |
915 | hard_frame_pointer_rtx is being mapped to it and offset for it. */ |
916 | static rtx cfa_base_rtx; |
917 | static HOST_WIDE_INT cfa_base_offset; |
918 | |
919 | /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx |
920 | or hard_frame_pointer_rtx. */ |
921 | |
922 | static inline rtx |
923 | compute_cfa_pointer (poly_int64 adjustment) |
924 | { |
925 | return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset); |
926 | } |
927 | |
928 | /* Adjustment for hard_frame_pointer_rtx to cfa base reg, |
929 | or -1 if the replacement shouldn't be done. */ |
930 | static poly_int64 hard_frame_pointer_adjustment = -1; |
931 | |
932 | /* Data for adjust_mems callback. */ |
933 | |
934 | class adjust_mem_data |
935 | { |
936 | public: |
937 | bool store; |
938 | machine_mode mem_mode; |
939 | HOST_WIDE_INT stack_adjust; |
940 | auto_vec<rtx> side_effects; |
941 | }; |
942 | |
943 | /* Helper for adjust_mems. Return true if X is suitable for |
944 | transformation of wider mode arithmetics to narrower mode. */ |
945 | |
946 | static bool |
947 | use_narrower_mode_test (rtx x, const_rtx subreg) |
948 | { |
949 | subrtx_var_iterator::array_type array; |
950 | FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST) |
951 | { |
952 | rtx x = *iter; |
953 | if (CONSTANT_P (x)) |
954 | iter.skip_subrtxes (); |
955 | else |
956 | switch (GET_CODE (x)) |
957 | { |
958 | case REG: |
959 | if (cselib_lookup (x, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode)) |
960 | return false; |
961 | if (!validate_subreg (GET_MODE (subreg), GET_MODE (x), x, |
962 | subreg_lowpart_offset (GET_MODE (subreg), |
963 | GET_MODE (x)))) |
964 | return false; |
965 | break; |
966 | case PLUS: |
967 | case MINUS: |
968 | case MULT: |
969 | break; |
970 | case ASHIFT: |
971 | if (GET_MODE (XEXP (x, 1)) != VOIDmode) |
972 | { |
973 | enum machine_mode mode = GET_MODE (subreg); |
974 | rtx op1 = XEXP (x, 1); |
975 | enum machine_mode op1_mode = GET_MODE (op1); |
976 | if (GET_MODE_PRECISION (mode: as_a <scalar_int_mode> (m: mode)) |
977 | < GET_MODE_PRECISION (mode: as_a <scalar_int_mode> (m: op1_mode))) |
978 | { |
979 | poly_uint64 byte = subreg_lowpart_offset (outermode: mode, innermode: op1_mode); |
980 | if (GET_CODE (op1) == SUBREG || GET_CODE (op1) == CONCAT) |
981 | { |
982 | if (!simplify_subreg (outermode: mode, op: op1, innermode: op1_mode, byte)) |
983 | return false; |
984 | } |
985 | else if (!validate_subreg (mode, op1_mode, op1, byte)) |
986 | return false; |
987 | } |
988 | } |
989 | iter.substitute (XEXP (x, 0)); |
990 | break; |
991 | default: |
992 | return false; |
993 | } |
994 | } |
995 | return true; |
996 | } |
997 | |
998 | /* Transform X into narrower mode MODE from wider mode WMODE. */ |
999 | |
1000 | static rtx |
1001 | use_narrower_mode (rtx x, scalar_int_mode mode, scalar_int_mode wmode) |
1002 | { |
1003 | rtx op0, op1; |
1004 | if (CONSTANT_P (x)) |
1005 | return lowpart_subreg (outermode: mode, op: x, innermode: wmode); |
1006 | switch (GET_CODE (x)) |
1007 | { |
1008 | case REG: |
1009 | return lowpart_subreg (outermode: mode, op: x, innermode: wmode); |
1010 | case PLUS: |
1011 | case MINUS: |
1012 | case MULT: |
1013 | op0 = use_narrower_mode (XEXP (x, 0), mode, wmode); |
1014 | op1 = use_narrower_mode (XEXP (x, 1), mode, wmode); |
1015 | return simplify_gen_binary (GET_CODE (x), mode, op0, op1); |
1016 | case ASHIFT: |
1017 | op0 = use_narrower_mode (XEXP (x, 0), mode, wmode); |
1018 | op1 = XEXP (x, 1); |
1019 | /* Ensure shift amount is not wider than mode. */ |
1020 | if (GET_MODE (op1) == VOIDmode) |
1021 | op1 = lowpart_subreg (outermode: mode, op: op1, innermode: wmode); |
1022 | else if (GET_MODE_PRECISION (mode) |
1023 | < GET_MODE_PRECISION (mode: as_a <scalar_int_mode> (GET_MODE (op1)))) |
1024 | op1 = lowpart_subreg (outermode: mode, op: op1, GET_MODE (op1)); |
1025 | return simplify_gen_binary (code: ASHIFT, mode, op0, op1); |
1026 | default: |
1027 | gcc_unreachable (); |
1028 | } |
1029 | } |
1030 | |
1031 | /* Helper function for adjusting used MEMs. */ |
1032 | |
1033 | static rtx |
1034 | adjust_mems (rtx loc, const_rtx old_rtx, void *data) |
1035 | { |
1036 | class adjust_mem_data *amd = (class adjust_mem_data *) data; |
1037 | rtx mem, addr = loc, tem; |
1038 | machine_mode mem_mode_save; |
1039 | bool store_save; |
1040 | scalar_int_mode tem_mode, tem_subreg_mode; |
1041 | poly_int64 size; |
1042 | switch (GET_CODE (loc)) |
1043 | { |
1044 | case REG: |
1045 | /* Don't do any sp or fp replacements outside of MEM addresses |
1046 | on the LHS. */ |
1047 | if (amd->mem_mode == VOIDmode && amd->store) |
1048 | return loc; |
1049 | if (loc == stack_pointer_rtx |
1050 | && !frame_pointer_needed |
1051 | && cfa_base_rtx) |
1052 | return compute_cfa_pointer (adjustment: amd->stack_adjust); |
1053 | else if (loc == hard_frame_pointer_rtx |
1054 | && frame_pointer_needed |
1055 | && maybe_ne (a: hard_frame_pointer_adjustment, b: -1) |
1056 | && cfa_base_rtx) |
1057 | return compute_cfa_pointer (adjustment: hard_frame_pointer_adjustment); |
1058 | gcc_checking_assert (loc != virtual_incoming_args_rtx); |
1059 | return loc; |
1060 | case MEM: |
1061 | mem = loc; |
1062 | if (!amd->store) |
1063 | { |
1064 | mem = targetm.delegitimize_address (mem); |
1065 | if (mem != loc && !MEM_P (mem)) |
1066 | return simplify_replace_fn_rtx (mem, old_rtx, fn: adjust_mems, data); |
1067 | } |
1068 | |
1069 | addr = XEXP (mem, 0); |
1070 | mem_mode_save = amd->mem_mode; |
1071 | amd->mem_mode = GET_MODE (mem); |
1072 | store_save = amd->store; |
1073 | amd->store = false; |
1074 | addr = simplify_replace_fn_rtx (addr, old_rtx, fn: adjust_mems, data); |
1075 | amd->store = store_save; |
1076 | amd->mem_mode = mem_mode_save; |
1077 | if (mem == loc) |
1078 | addr = targetm.delegitimize_address (addr); |
1079 | if (addr != XEXP (mem, 0)) |
1080 | mem = replace_equiv_address_nv (mem, addr); |
1081 | if (!amd->store) |
1082 | mem = avoid_constant_pool_reference (mem); |
1083 | return mem; |
1084 | case PRE_INC: |
1085 | case PRE_DEC: |
1086 | size = GET_MODE_SIZE (mode: amd->mem_mode); |
1087 | addr = plus_constant (GET_MODE (loc), XEXP (loc, 0), |
1088 | GET_CODE (loc) == PRE_INC ? size : -size); |
1089 | /* FALLTHRU */ |
1090 | case POST_INC: |
1091 | case POST_DEC: |
1092 | if (addr == loc) |
1093 | addr = XEXP (loc, 0); |
1094 | gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode); |
1095 | addr = simplify_replace_fn_rtx (addr, old_rtx, fn: adjust_mems, data); |
1096 | size = GET_MODE_SIZE (mode: amd->mem_mode); |
1097 | tem = plus_constant (GET_MODE (loc), XEXP (loc, 0), |
1098 | (GET_CODE (loc) == PRE_INC |
1099 | || GET_CODE (loc) == POST_INC) ? size : -size); |
1100 | store_save = amd->store; |
1101 | amd->store = false; |
1102 | tem = simplify_replace_fn_rtx (tem, old_rtx, fn: adjust_mems, data); |
1103 | amd->store = store_save; |
1104 | amd->side_effects.safe_push (gen_rtx_SET (XEXP (loc, 0), tem)); |
1105 | return addr; |
1106 | case PRE_MODIFY: |
1107 | addr = XEXP (loc, 1); |
1108 | /* FALLTHRU */ |
1109 | case POST_MODIFY: |
1110 | if (addr == loc) |
1111 | addr = XEXP (loc, 0); |
1112 | gcc_assert (amd->mem_mode != VOIDmode); |
1113 | addr = simplify_replace_fn_rtx (addr, old_rtx, fn: adjust_mems, data); |
1114 | store_save = amd->store; |
1115 | amd->store = false; |
1116 | tem = simplify_replace_fn_rtx (XEXP (loc, 1), old_rtx, |
1117 | fn: adjust_mems, data); |
1118 | amd->store = store_save; |
1119 | amd->side_effects.safe_push (gen_rtx_SET (XEXP (loc, 0), tem)); |
1120 | return addr; |
1121 | case SUBREG: |
1122 | /* First try without delegitimization of whole MEMs and |
1123 | avoid_constant_pool_reference, which is more likely to succeed. */ |
1124 | store_save = amd->store; |
1125 | amd->store = true; |
1126 | addr = simplify_replace_fn_rtx (SUBREG_REG (loc), old_rtx, fn: adjust_mems, |
1127 | data); |
1128 | amd->store = store_save; |
1129 | mem = simplify_replace_fn_rtx (addr, old_rtx, fn: adjust_mems, data); |
1130 | if (mem == SUBREG_REG (loc)) |
1131 | { |
1132 | tem = loc; |
1133 | goto finish_subreg; |
1134 | } |
1135 | tem = simplify_gen_subreg (GET_MODE (loc), op: mem, |
1136 | GET_MODE (SUBREG_REG (loc)), |
1137 | SUBREG_BYTE (loc)); |
1138 | if (tem) |
1139 | goto finish_subreg; |
1140 | tem = simplify_gen_subreg (GET_MODE (loc), op: addr, |
1141 | GET_MODE (SUBREG_REG (loc)), |
1142 | SUBREG_BYTE (loc)); |
1143 | if (tem == NULL_RTX) |
1144 | tem = gen_rtx_raw_SUBREG (GET_MODE (loc), addr, SUBREG_BYTE (loc)); |
1145 | finish_subreg: |
1146 | if (MAY_HAVE_DEBUG_BIND_INSNS |
1147 | && GET_CODE (tem) == SUBREG |
1148 | && (GET_CODE (SUBREG_REG (tem)) == PLUS |
1149 | || GET_CODE (SUBREG_REG (tem)) == MINUS |
1150 | || GET_CODE (SUBREG_REG (tem)) == MULT |
1151 | || GET_CODE (SUBREG_REG (tem)) == ASHIFT) |
1152 | && is_a <scalar_int_mode> (GET_MODE (tem), result: &tem_mode) |
1153 | && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (tem)), |
1154 | result: &tem_subreg_mode) |
1155 | && (GET_MODE_PRECISION (mode: tem_mode) |
1156 | < GET_MODE_PRECISION (mode: tem_subreg_mode)) |
1157 | && subreg_lowpart_p (tem) |
1158 | && use_narrower_mode_test (SUBREG_REG (tem), subreg: tem)) |
1159 | return use_narrower_mode (SUBREG_REG (tem), mode: tem_mode, wmode: tem_subreg_mode); |
1160 | return tem; |
1161 | case ASM_OPERANDS: |
1162 | /* Don't do any replacements in second and following |
1163 | ASM_OPERANDS of inline-asm with multiple sets. |
1164 | ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC |
1165 | and ASM_OPERANDS_LABEL_VEC need to be equal between |
1166 | all the ASM_OPERANDs in the insn and adjust_insn will |
1167 | fix this up. */ |
1168 | if (ASM_OPERANDS_OUTPUT_IDX (loc) != 0) |
1169 | return loc; |
1170 | break; |
1171 | default: |
1172 | break; |
1173 | } |
1174 | return NULL_RTX; |
1175 | } |
1176 | |
1177 | /* Helper function for replacement of uses. */ |
1178 | |
1179 | static void |
1180 | adjust_mem_uses (rtx *x, void *data) |
1181 | { |
1182 | rtx new_x = simplify_replace_fn_rtx (*x, NULL_RTX, fn: adjust_mems, data); |
1183 | if (new_x != *x) |
1184 | validate_change (NULL_RTX, x, new_x, true); |
1185 | } |
1186 | |
1187 | /* Helper function for replacement of stores. */ |
1188 | |
1189 | static void |
1190 | adjust_mem_stores (rtx loc, const_rtx expr, void *data) |
1191 | { |
1192 | if (MEM_P (loc)) |
1193 | { |
1194 | rtx new_dest = simplify_replace_fn_rtx (SET_DEST (expr), NULL_RTX, |
1195 | fn: adjust_mems, data); |
1196 | if (new_dest != SET_DEST (expr)) |
1197 | { |
1198 | rtx xexpr = CONST_CAST_RTX (expr); |
1199 | validate_change (NULL_RTX, &SET_DEST (xexpr), new_dest, true); |
1200 | } |
1201 | } |
1202 | } |
1203 | |
1204 | /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes, |
1205 | replace them with their value in the insn and add the side-effects |
1206 | as other sets to the insn. */ |
1207 | |
1208 | static void |
1209 | adjust_insn (basic_block bb, rtx_insn *insn) |
1210 | { |
1211 | rtx set; |
1212 | |
1213 | #ifdef HAVE_window_save |
1214 | /* If the target machine has an explicit window save instruction, the |
1215 | transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */ |
1216 | if (RTX_FRAME_RELATED_P (insn) |
1217 | && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX)) |
1218 | { |
1219 | unsigned int i, nregs = vec_safe_length (windowed_parm_regs); |
1220 | rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2)); |
1221 | parm_reg *p; |
1222 | |
1223 | FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p) |
1224 | { |
1225 | XVECEXP (rtl, 0, i * 2) |
1226 | = gen_rtx_SET (p->incoming, p->outgoing); |
1227 | /* Do not clobber the attached DECL, but only the REG. */ |
1228 | XVECEXP (rtl, 0, i * 2 + 1) |
1229 | = gen_rtx_CLOBBER (GET_MODE (p->outgoing), |
1230 | gen_raw_REG (GET_MODE (p->outgoing), |
1231 | REGNO (p->outgoing))); |
1232 | } |
1233 | |
1234 | validate_change (NULL_RTX, &PATTERN (insn), rtl, true); |
1235 | return; |
1236 | } |
1237 | #endif |
1238 | |
1239 | adjust_mem_data amd; |
1240 | amd.mem_mode = VOIDmode; |
1241 | amd.stack_adjust = -VTI (bb)->out.stack_adjust; |
1242 | |
1243 | amd.store = true; |
1244 | note_stores (insn, adjust_mem_stores, &amd); |
1245 | |
1246 | amd.store = false; |
1247 | if (GET_CODE (PATTERN (insn)) == PARALLEL |
1248 | && asm_noperands (PATTERN (insn)) > 0 |
1249 | && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET) |
1250 | { |
1251 | rtx body, set0; |
1252 | int i; |
1253 | |
1254 | /* inline-asm with multiple sets is tiny bit more complicated, |
1255 | because the 3 vectors in ASM_OPERANDS need to be shared between |
1256 | all ASM_OPERANDS in the instruction. adjust_mems will |
1257 | not touch ASM_OPERANDS other than the first one, asm_noperands |
1258 | test above needs to be called before that (otherwise it would fail) |
1259 | and afterwards this code fixes it up. */ |
1260 | note_uses (&PATTERN (insn), adjust_mem_uses, &amd); |
1261 | body = PATTERN (insn); |
1262 | set0 = XVECEXP (body, 0, 0); |
1263 | gcc_checking_assert (GET_CODE (set0) == SET |
1264 | && GET_CODE (SET_SRC (set0)) == ASM_OPERANDS |
1265 | && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set0)) == 0); |
1266 | for (i = 1; i < XVECLEN (body, 0); i++) |
1267 | if (GET_CODE (XVECEXP (body, 0, i)) != SET) |
1268 | break; |
1269 | else |
1270 | { |
1271 | set = XVECEXP (body, 0, i); |
1272 | gcc_checking_assert (GET_CODE (SET_SRC (set)) == ASM_OPERANDS |
1273 | && ASM_OPERANDS_OUTPUT_IDX (SET_SRC (set)) |
1274 | == i); |
1275 | if (ASM_OPERANDS_INPUT_VEC (SET_SRC (set)) |
1276 | != ASM_OPERANDS_INPUT_VEC (SET_SRC (set0)) |
1277 | || ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set)) |
1278 | != ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0)) |
1279 | || ASM_OPERANDS_LABEL_VEC (SET_SRC (set)) |
1280 | != ASM_OPERANDS_LABEL_VEC (SET_SRC (set0))) |
1281 | { |
1282 | rtx newsrc = shallow_copy_rtx (SET_SRC (set)); |
1283 | ASM_OPERANDS_INPUT_VEC (newsrc) |
1284 | = ASM_OPERANDS_INPUT_VEC (SET_SRC (set0)); |
1285 | ASM_OPERANDS_INPUT_CONSTRAINT_VEC (newsrc) |
1286 | = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (SET_SRC (set0)); |
1287 | ASM_OPERANDS_LABEL_VEC (newsrc) |
1288 | = ASM_OPERANDS_LABEL_VEC (SET_SRC (set0)); |
1289 | validate_change (NULL_RTX, &SET_SRC (set), newsrc, true); |
1290 | } |
1291 | } |
1292 | } |
1293 | else |
1294 | note_uses (&PATTERN (insn), adjust_mem_uses, &amd); |
1295 | |
1296 | /* For read-only MEMs containing some constant, prefer those |
1297 | constants. */ |
1298 | set = single_set (insn); |
1299 | if (set && MEM_P (SET_SRC (set)) && MEM_READONLY_P (SET_SRC (set))) |
1300 | { |
1301 | rtx note = find_reg_equal_equiv_note (insn); |
1302 | |
1303 | if (note && CONSTANT_P (XEXP (note, 0))) |
1304 | validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true); |
1305 | } |
1306 | |
1307 | if (!amd.side_effects.is_empty ()) |
1308 | { |
1309 | rtx *pat, new_pat; |
1310 | int i, oldn; |
1311 | |
1312 | pat = &PATTERN (insn); |
1313 | if (GET_CODE (*pat) == COND_EXEC) |
1314 | pat = &COND_EXEC_CODE (*pat); |
1315 | if (GET_CODE (*pat) == PARALLEL) |
1316 | oldn = XVECLEN (*pat, 0); |
1317 | else |
1318 | oldn = 1; |
1319 | unsigned int newn = amd.side_effects.length (); |
1320 | new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn)); |
1321 | if (GET_CODE (*pat) == PARALLEL) |
1322 | for (i = 0; i < oldn; i++) |
1323 | XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i); |
1324 | else |
1325 | XVECEXP (new_pat, 0, 0) = *pat; |
1326 | |
1327 | rtx effect; |
1328 | unsigned int j; |
1329 | FOR_EACH_VEC_ELT_REVERSE (amd.side_effects, j, effect) |
1330 | XVECEXP (new_pat, 0, j + oldn) = effect; |
1331 | validate_change (NULL_RTX, pat, new_pat, true); |
1332 | } |
1333 | } |
1334 | |
1335 | /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */ |
1336 | static inline rtx |
1337 | dv_as_rtx (decl_or_value dv) |
1338 | { |
1339 | tree decl; |
1340 | |
1341 | if (dv_is_value_p (dv)) |
1342 | return dv_as_value (dv); |
1343 | |
1344 | decl = dv_as_decl (dv); |
1345 | |
1346 | gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL); |
1347 | return DECL_RTL_KNOWN_SET (decl); |
1348 | } |
1349 | |
1350 | /* Return nonzero if a decl_or_value must not have more than one |
1351 | variable part. The returned value discriminates among various |
1352 | kinds of one-part DVs ccording to enum onepart_enum. */ |
1353 | static inline onepart_enum |
1354 | dv_onepart_p (decl_or_value dv) |
1355 | { |
1356 | tree decl; |
1357 | |
1358 | if (!MAY_HAVE_DEBUG_BIND_INSNS) |
1359 | return NOT_ONEPART; |
1360 | |
1361 | if (dv_is_value_p (dv)) |
1362 | return ONEPART_VALUE; |
1363 | |
1364 | decl = dv_as_decl (dv); |
1365 | |
1366 | if (TREE_CODE (decl) == DEBUG_EXPR_DECL) |
1367 | return ONEPART_DEXPR; |
1368 | |
1369 | if (target_for_debug_bind (decl) != NULL_TREE) |
1370 | return ONEPART_VDECL; |
1371 | |
1372 | return NOT_ONEPART; |
1373 | } |
1374 | |
1375 | /* Return the variable pool to be used for a dv of type ONEPART. */ |
1376 | static inline pool_allocator & |
1377 | onepart_pool (onepart_enum onepart) |
1378 | { |
1379 | return onepart ? valvar_pool : var_pool; |
1380 | } |
1381 | |
1382 | /* Allocate a variable_def from the corresponding variable pool. */ |
1383 | static inline variable * |
1384 | onepart_pool_allocate (onepart_enum onepart) |
1385 | { |
1386 | return (variable*) onepart_pool (onepart).allocate (); |
1387 | } |
1388 | |
1389 | /* Build a decl_or_value out of a decl. */ |
1390 | static inline decl_or_value |
1391 | dv_from_decl (tree decl) |
1392 | { |
1393 | decl_or_value dv = decl; |
1394 | gcc_checking_assert (dv_is_decl_p (dv)); |
1395 | return dv; |
1396 | } |
1397 | |
1398 | /* Build a decl_or_value out of a value. */ |
1399 | static inline decl_or_value |
1400 | dv_from_value (rtx value) |
1401 | { |
1402 | decl_or_value dv = value; |
1403 | gcc_checking_assert (dv_is_value_p (dv)); |
1404 | return dv; |
1405 | } |
1406 | |
1407 | /* Return a value or the decl of a debug_expr as a decl_or_value. */ |
1408 | static inline decl_or_value |
1409 | dv_from_rtx (rtx x) |
1410 | { |
1411 | decl_or_value dv; |
1412 | |
1413 | switch (GET_CODE (x)) |
1414 | { |
1415 | case DEBUG_EXPR: |
1416 | dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x)); |
1417 | gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x); |
1418 | break; |
1419 | |
1420 | case VALUE: |
1421 | dv = dv_from_value (value: x); |
1422 | break; |
1423 | |
1424 | default: |
1425 | gcc_unreachable (); |
1426 | } |
1427 | |
1428 | return dv; |
1429 | } |
1430 | |
1431 | extern void debug_dv (decl_or_value dv); |
1432 | |
1433 | DEBUG_FUNCTION void |
1434 | debug_dv (decl_or_value dv) |
1435 | { |
1436 | if (dv_is_value_p (dv)) |
1437 | debug_rtx (dv_as_value (dv)); |
1438 | else |
1439 | debug_generic_stmt (dv_as_decl (dv)); |
1440 | } |
1441 | |
1442 | static void loc_exp_dep_clear (variable *var); |
1443 | |
1444 | /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */ |
1445 | |
1446 | static void |
1447 | variable_htab_free (void *elem) |
1448 | { |
1449 | int i; |
1450 | variable *var = (variable *) elem; |
1451 | location_chain *node, *next; |
1452 | |
1453 | gcc_checking_assert (var->refcount > 0); |
1454 | |
1455 | var->refcount--; |
1456 | if (var->refcount > 0) |
1457 | return; |
1458 | |
1459 | for (i = 0; i < var->n_var_parts; i++) |
1460 | { |
1461 | for (node = var->var_part[i].loc_chain; node; node = next) |
1462 | { |
1463 | next = node->next; |
1464 | delete node; |
1465 | } |
1466 | var->var_part[i].loc_chain = NULL; |
1467 | } |
1468 | if (var->onepart && VAR_LOC_1PAUX (var)) |
1469 | { |
1470 | loc_exp_dep_clear (var); |
1471 | if (VAR_LOC_DEP_LST (var)) |
1472 | VAR_LOC_DEP_LST (var)->pprev = NULL; |
1473 | XDELETE (VAR_LOC_1PAUX (var)); |
1474 | /* These may be reused across functions, so reset |
1475 | e.g. NO_LOC_P. */ |
1476 | if (var->onepart == ONEPART_DEXPR) |
1477 | set_dv_changed (var->dv, true); |
1478 | } |
1479 | onepart_pool (onepart: var->onepart).remove (object: var); |
1480 | } |
1481 | |
1482 | /* Initialize the set (array) SET of attrs to empty lists. */ |
1483 | |
1484 | static void |
1485 | init_attrs_list_set (attrs **set) |
1486 | { |
1487 | int i; |
1488 | |
1489 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
1490 | set[i] = NULL; |
1491 | } |
1492 | |
1493 | /* Make the list *LISTP empty. */ |
1494 | |
1495 | static void |
1496 | attrs_list_clear (attrs **listp) |
1497 | { |
1498 | attrs *list, *next; |
1499 | |
1500 | for (list = *listp; list; list = next) |
1501 | { |
1502 | next = list->next; |
1503 | delete list; |
1504 | } |
1505 | *listp = NULL; |
1506 | } |
1507 | |
1508 | /* Return true if the pair of DECL and OFFSET is the member of the LIST. */ |
1509 | |
1510 | static attrs * |
1511 | attrs_list_member (attrs *list, decl_or_value dv, HOST_WIDE_INT offset) |
1512 | { |
1513 | for (; list; list = list->next) |
1514 | if (list->dv == dv && list->offset == offset) |
1515 | return list; |
1516 | return NULL; |
1517 | } |
1518 | |
1519 | /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */ |
1520 | |
1521 | static void |
1522 | attrs_list_insert (attrs **listp, decl_or_value dv, |
1523 | HOST_WIDE_INT offset, rtx loc) |
1524 | { |
1525 | attrs *list = new attrs; |
1526 | list->loc = loc; |
1527 | list->dv = dv; |
1528 | list->offset = offset; |
1529 | list->next = *listp; |
1530 | *listp = list; |
1531 | } |
1532 | |
1533 | /* Copy all nodes from SRC and create a list *DSTP of the copies. */ |
1534 | |
1535 | static void |
1536 | (attrs **dstp, attrs *src) |
1537 | { |
1538 | attrs_list_clear (listp: dstp); |
1539 | for (; src; src = src->next) |
1540 | { |
1541 | attrs *n = new attrs; |
1542 | n->loc = src->loc; |
1543 | n->dv = src->dv; |
1544 | n->offset = src->offset; |
1545 | n->next = *dstp; |
1546 | *dstp = n; |
1547 | } |
1548 | } |
1549 | |
1550 | /* Add all nodes from SRC which are not in *DSTP to *DSTP. */ |
1551 | |
1552 | static void |
1553 | (attrs **dstp, attrs *src) |
1554 | { |
1555 | for (; src; src = src->next) |
1556 | { |
1557 | if (!attrs_list_member (list: *dstp, dv: src->dv, offset: src->offset)) |
1558 | attrs_list_insert (listp: dstp, dv: src->dv, offset: src->offset, loc: src->loc); |
1559 | } |
1560 | } |
1561 | |
1562 | /* Combine nodes that are not onepart nodes from SRC and SRC2 into |
1563 | *DSTP. */ |
1564 | |
1565 | static void |
1566 | (attrs **dstp, attrs *src, attrs *src2) |
1567 | { |
1568 | gcc_assert (!*dstp); |
1569 | for (; src; src = src->next) |
1570 | { |
1571 | if (!dv_onepart_p (dv: src->dv)) |
1572 | attrs_list_insert (listp: dstp, dv: src->dv, offset: src->offset, loc: src->loc); |
1573 | } |
1574 | for (src = src2; src; src = src->next) |
1575 | { |
1576 | if (!dv_onepart_p (dv: src->dv) |
1577 | && !attrs_list_member (list: *dstp, dv: src->dv, offset: src->offset)) |
1578 | attrs_list_insert (listp: dstp, dv: src->dv, offset: src->offset, loc: src->loc); |
1579 | } |
1580 | } |
1581 | |
1582 | /* Shared hashtable support. */ |
1583 | |
1584 | /* Return true if VARS is shared. */ |
1585 | |
1586 | static inline bool |
1587 | shared_hash_shared (shared_hash *vars) |
1588 | { |
1589 | return vars->refcount > 1; |
1590 | } |
1591 | |
1592 | /* Return the hash table for VARS. */ |
1593 | |
1594 | static inline variable_table_type * |
1595 | shared_hash_htab (shared_hash *vars) |
1596 | { |
1597 | return vars->htab; |
1598 | } |
1599 | |
1600 | /* Return true if VAR is shared, or maybe because VARS is shared. */ |
1601 | |
1602 | static inline bool |
1603 | shared_var_p (variable *var, shared_hash *vars) |
1604 | { |
1605 | /* Don't count an entry in the changed_variables table as a duplicate. */ |
1606 | return ((var->refcount > 1 + (int) var->in_changed_variables) |
1607 | || shared_hash_shared (vars)); |
1608 | } |
1609 | |
1610 | /* Copy variables into a new hash table. */ |
1611 | |
1612 | static shared_hash * |
1613 | shared_hash_unshare (shared_hash *vars) |
1614 | { |
1615 | shared_hash *new_vars = new shared_hash; |
1616 | gcc_assert (vars->refcount > 1); |
1617 | new_vars->refcount = 1; |
1618 | new_vars->htab = new variable_table_type (vars->htab->elements () + 3); |
1619 | vars_copy (new_vars->htab, vars->htab); |
1620 | vars->refcount--; |
1621 | return new_vars; |
1622 | } |
1623 | |
1624 | /* Increment reference counter on VARS and return it. */ |
1625 | |
1626 | static inline shared_hash * |
1627 | shared_hash_copy (shared_hash *vars) |
1628 | { |
1629 | vars->refcount++; |
1630 | return vars; |
1631 | } |
1632 | |
1633 | /* Decrement reference counter and destroy hash table if not shared |
1634 | anymore. */ |
1635 | |
1636 | static void |
1637 | shared_hash_destroy (shared_hash *vars) |
1638 | { |
1639 | gcc_checking_assert (vars->refcount > 0); |
1640 | if (--vars->refcount == 0) |
1641 | { |
1642 | delete vars->htab; |
1643 | delete vars; |
1644 | } |
1645 | } |
1646 | |
1647 | /* Unshare *PVARS if shared and return slot for DV. If INS is |
1648 | INSERT, insert it if not already present. */ |
1649 | |
1650 | static inline variable ** |
1651 | shared_hash_find_slot_unshare_1 (shared_hash **pvars, decl_or_value dv, |
1652 | hashval_t dvhash, enum insert_option ins) |
1653 | { |
1654 | if (shared_hash_shared (vars: *pvars)) |
1655 | *pvars = shared_hash_unshare (vars: *pvars); |
1656 | return shared_hash_htab (vars: *pvars)->find_slot_with_hash (comparable: dv, hash: dvhash, insert: ins); |
1657 | } |
1658 | |
1659 | static inline variable ** |
1660 | shared_hash_find_slot_unshare (shared_hash **pvars, decl_or_value dv, |
1661 | enum insert_option ins) |
1662 | { |
1663 | return shared_hash_find_slot_unshare_1 (pvars, dv, dvhash: dv_htab_hash (dv), ins); |
1664 | } |
1665 | |
1666 | /* Return slot for DV, if it is already present in the hash table. |
1667 | If it is not present, insert it only VARS is not shared, otherwise |
1668 | return NULL. */ |
1669 | |
1670 | static inline variable ** |
1671 | shared_hash_find_slot_1 (shared_hash *vars, decl_or_value dv, hashval_t dvhash) |
1672 | { |
1673 | return shared_hash_htab (vars)->find_slot_with_hash (comparable: dv, hash: dvhash, |
1674 | insert: shared_hash_shared (vars) |
1675 | ? NO_INSERT : INSERT); |
1676 | } |
1677 | |
1678 | static inline variable ** |
1679 | shared_hash_find_slot (shared_hash *vars, decl_or_value dv) |
1680 | { |
1681 | return shared_hash_find_slot_1 (vars, dv, dvhash: dv_htab_hash (dv)); |
1682 | } |
1683 | |
1684 | /* Return slot for DV only if it is already present in the hash table. */ |
1685 | |
1686 | static inline variable ** |
1687 | shared_hash_find_slot_noinsert_1 (shared_hash *vars, decl_or_value dv, |
1688 | hashval_t dvhash) |
1689 | { |
1690 | return shared_hash_htab (vars)->find_slot_with_hash (comparable: dv, hash: dvhash, insert: NO_INSERT); |
1691 | } |
1692 | |
1693 | static inline variable ** |
1694 | shared_hash_find_slot_noinsert (shared_hash *vars, decl_or_value dv) |
1695 | { |
1696 | return shared_hash_find_slot_noinsert_1 (vars, dv, dvhash: dv_htab_hash (dv)); |
1697 | } |
1698 | |
1699 | /* Return variable for DV or NULL if not already present in the hash |
1700 | table. */ |
1701 | |
1702 | static inline variable * |
1703 | shared_hash_find_1 (shared_hash *vars, decl_or_value dv, hashval_t dvhash) |
1704 | { |
1705 | return shared_hash_htab (vars)->find_with_hash (comparable: dv, hash: dvhash); |
1706 | } |
1707 | |
1708 | static inline variable * |
1709 | shared_hash_find (shared_hash *vars, decl_or_value dv) |
1710 | { |
1711 | return shared_hash_find_1 (vars, dv, dvhash: dv_htab_hash (dv)); |
1712 | } |
1713 | |
1714 | /* Return true if TVAL is better than CVAL as a canonival value. We |
1715 | choose lowest-numbered VALUEs, using the RTX address as a |
1716 | tie-breaker. The idea is to arrange them into a star topology, |
1717 | such that all of them are at most one step away from the canonical |
1718 | value, and the canonical value has backlinks to all of them, in |
1719 | addition to all the actual locations. We don't enforce this |
1720 | topology throughout the entire dataflow analysis, though. |
1721 | */ |
1722 | |
1723 | static inline bool |
1724 | canon_value_cmp (rtx tval, rtx cval) |
1725 | { |
1726 | return !cval |
1727 | || CSELIB_VAL_PTR (tval)->uid < CSELIB_VAL_PTR (cval)->uid; |
1728 | } |
1729 | |
1730 | static bool dst_can_be_shared; |
1731 | |
1732 | /* Return a copy of a variable VAR and insert it to dataflow set SET. */ |
1733 | |
1734 | static variable ** |
1735 | unshare_variable (dataflow_set *set, variable **slot, variable *var, |
1736 | enum var_init_status initialized) |
1737 | { |
1738 | variable *new_var; |
1739 | int i; |
1740 | |
1741 | new_var = onepart_pool_allocate (onepart: var->onepart); |
1742 | new_var->dv = var->dv; |
1743 | new_var->refcount = 1; |
1744 | var->refcount--; |
1745 | new_var->n_var_parts = var->n_var_parts; |
1746 | new_var->onepart = var->onepart; |
1747 | new_var->in_changed_variables = false; |
1748 | |
1749 | if (! flag_var_tracking_uninit) |
1750 | initialized = VAR_INIT_STATUS_INITIALIZED; |
1751 | |
1752 | for (i = 0; i < var->n_var_parts; i++) |
1753 | { |
1754 | location_chain *node; |
1755 | location_chain **nextp; |
1756 | |
1757 | if (i == 0 && var->onepart) |
1758 | { |
1759 | /* One-part auxiliary data is only used while emitting |
1760 | notes, so propagate it to the new variable in the active |
1761 | dataflow set. If we're not emitting notes, this will be |
1762 | a no-op. */ |
1763 | gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes); |
1764 | VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var); |
1765 | VAR_LOC_1PAUX (var) = NULL; |
1766 | } |
1767 | else |
1768 | VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i); |
1769 | nextp = &new_var->var_part[i].loc_chain; |
1770 | for (node = var->var_part[i].loc_chain; node; node = node->next) |
1771 | { |
1772 | location_chain *new_lc; |
1773 | |
1774 | new_lc = new location_chain; |
1775 | new_lc->next = NULL; |
1776 | if (node->init > initialized) |
1777 | new_lc->init = node->init; |
1778 | else |
1779 | new_lc->init = initialized; |
1780 | if (node->set_src && !(MEM_P (node->set_src))) |
1781 | new_lc->set_src = node->set_src; |
1782 | else |
1783 | new_lc->set_src = NULL; |
1784 | new_lc->loc = node->loc; |
1785 | |
1786 | *nextp = new_lc; |
1787 | nextp = &new_lc->next; |
1788 | } |
1789 | |
1790 | new_var->var_part[i].cur_loc = var->var_part[i].cur_loc; |
1791 | } |
1792 | |
1793 | dst_can_be_shared = false; |
1794 | if (shared_hash_shared (vars: set->vars)) |
1795 | slot = shared_hash_find_slot_unshare (pvars: &set->vars, dv: var->dv, ins: NO_INSERT); |
1796 | else if (set->traversed_vars && set->vars != set->traversed_vars) |
1797 | slot = shared_hash_find_slot_noinsert (vars: set->vars, dv: var->dv); |
1798 | *slot = new_var; |
1799 | if (var->in_changed_variables) |
1800 | { |
1801 | variable **cslot |
1802 | = changed_variables->find_slot_with_hash (comparable: var->dv, |
1803 | hash: dv_htab_hash (dv: var->dv), |
1804 | insert: NO_INSERT); |
1805 | gcc_assert (*cslot == (void *) var); |
1806 | var->in_changed_variables = false; |
1807 | variable_htab_free (elem: var); |
1808 | *cslot = new_var; |
1809 | new_var->in_changed_variables = true; |
1810 | } |
1811 | return slot; |
1812 | } |
1813 | |
1814 | /* Copy all variables from hash table SRC to hash table DST. */ |
1815 | |
1816 | static void |
1817 | vars_copy (variable_table_type *dst, variable_table_type *src) |
1818 | { |
1819 | variable_iterator_type hi; |
1820 | variable *var; |
1821 | |
1822 | FOR_EACH_HASH_TABLE_ELEMENT (*src, var, variable, hi) |
1823 | { |
1824 | variable **dstp; |
1825 | var->refcount++; |
1826 | dstp = dst->find_slot_with_hash (comparable: var->dv, hash: dv_htab_hash (dv: var->dv), insert: INSERT); |
1827 | *dstp = var; |
1828 | } |
1829 | } |
1830 | |
1831 | /* Map a decl to its main debug decl. */ |
1832 | |
1833 | static inline tree |
1834 | var_debug_decl (tree decl) |
1835 | { |
1836 | if (decl && VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl)) |
1837 | { |
1838 | tree debugdecl = DECL_DEBUG_EXPR (decl); |
1839 | if (DECL_P (debugdecl)) |
1840 | decl = debugdecl; |
1841 | } |
1842 | |
1843 | return decl; |
1844 | } |
1845 | |
1846 | /* Set the register LOC to contain DV, OFFSET. */ |
1847 | |
1848 | static void |
1849 | var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized, |
1850 | decl_or_value dv, HOST_WIDE_INT offset, rtx set_src, |
1851 | enum insert_option iopt) |
1852 | { |
1853 | attrs *node; |
1854 | bool decl_p = dv_is_decl_p (dv); |
1855 | |
1856 | if (decl_p) |
1857 | dv = dv_from_decl (decl: var_debug_decl (decl: dv_as_decl (dv))); |
1858 | |
1859 | for (node = set->regs[REGNO (loc)]; node; node = node->next) |
1860 | if (node->dv == dv && node->offset == offset) |
1861 | break; |
1862 | if (!node) |
1863 | attrs_list_insert (listp: &set->regs[REGNO (loc)], dv, offset, loc); |
1864 | set_variable_part (set, loc, dv, offset, initialized, set_src, iopt); |
1865 | } |
1866 | |
1867 | /* Return true if we should track a location that is OFFSET bytes from |
1868 | a variable. Store the constant offset in *OFFSET_OUT if so. */ |
1869 | |
1870 | static bool |
1871 | track_offset_p (poly_int64 offset, HOST_WIDE_INT *offset_out) |
1872 | { |
1873 | HOST_WIDE_INT const_offset; |
1874 | if (!offset.is_constant (const_value: &const_offset) |
1875 | || !IN_RANGE (const_offset, 0, MAX_VAR_PARTS - 1)) |
1876 | return false; |
1877 | *offset_out = const_offset; |
1878 | return true; |
1879 | } |
1880 | |
1881 | /* Return the offset of a register that track_offset_p says we |
1882 | should track. */ |
1883 | |
1884 | static HOST_WIDE_INT |
1885 | get_tracked_reg_offset (rtx loc) |
1886 | { |
1887 | HOST_WIDE_INT offset; |
1888 | if (!track_offset_p (REG_OFFSET (loc), offset_out: &offset)) |
1889 | gcc_unreachable (); |
1890 | return offset; |
1891 | } |
1892 | |
1893 | /* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */ |
1894 | |
1895 | static void |
1896 | var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized, |
1897 | rtx set_src) |
1898 | { |
1899 | tree decl = REG_EXPR (loc); |
1900 | HOST_WIDE_INT offset = get_tracked_reg_offset (loc); |
1901 | |
1902 | var_reg_decl_set (set, loc, initialized, |
1903 | dv: dv_from_decl (decl), offset, set_src, iopt: INSERT); |
1904 | } |
1905 | |
1906 | static enum var_init_status |
1907 | get_init_value (dataflow_set *set, rtx loc, decl_or_value dv) |
1908 | { |
1909 | variable *var; |
1910 | int i; |
1911 | enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN; |
1912 | |
1913 | if (! flag_var_tracking_uninit) |
1914 | return VAR_INIT_STATUS_INITIALIZED; |
1915 | |
1916 | var = shared_hash_find (vars: set->vars, dv); |
1917 | if (var) |
1918 | { |
1919 | for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++) |
1920 | { |
1921 | location_chain *nextp; |
1922 | for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next) |
1923 | if (rtx_equal_p (nextp->loc, loc)) |
1924 | { |
1925 | ret_val = nextp->init; |
1926 | break; |
1927 | } |
1928 | } |
1929 | } |
1930 | |
1931 | return ret_val; |
1932 | } |
1933 | |
1934 | /* Delete current content of register LOC in dataflow set SET and set |
1935 | the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). If |
1936 | MODIFY is true, any other live copies of the same variable part are |
1937 | also deleted from the dataflow set, otherwise the variable part is |
1938 | assumed to be copied from another location holding the same |
1939 | part. */ |
1940 | |
1941 | static void |
1942 | var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify, |
1943 | enum var_init_status initialized, rtx set_src) |
1944 | { |
1945 | tree decl = REG_EXPR (loc); |
1946 | HOST_WIDE_INT offset = get_tracked_reg_offset (loc); |
1947 | attrs *node, *next; |
1948 | attrs **nextp; |
1949 | |
1950 | decl = var_debug_decl (decl); |
1951 | |
1952 | if (initialized == VAR_INIT_STATUS_UNKNOWN) |
1953 | initialized = get_init_value (set, loc, dv: dv_from_decl (decl)); |
1954 | |
1955 | nextp = &set->regs[REGNO (loc)]; |
1956 | for (node = *nextp; node; node = next) |
1957 | { |
1958 | next = node->next; |
1959 | if (node->dv != decl || node->offset != offset) |
1960 | { |
1961 | delete_variable_part (set, node->loc, node->dv, node->offset); |
1962 | delete node; |
1963 | *nextp = next; |
1964 | } |
1965 | else |
1966 | { |
1967 | node->loc = loc; |
1968 | nextp = &node->next; |
1969 | } |
1970 | } |
1971 | if (modify) |
1972 | clobber_variable_part (set, loc, dv_from_decl (decl), offset, set_src); |
1973 | var_reg_set (set, loc, initialized, set_src); |
1974 | } |
1975 | |
1976 | /* Delete the association of register LOC in dataflow set SET with any |
1977 | variables that aren't onepart. If CLOBBER is true, also delete any |
1978 | other live copies of the same variable part, and delete the |
1979 | association with onepart dvs too. */ |
1980 | |
1981 | static void |
1982 | var_reg_delete (dataflow_set *set, rtx loc, bool clobber) |
1983 | { |
1984 | attrs **nextp = &set->regs[REGNO (loc)]; |
1985 | attrs *node, *next; |
1986 | |
1987 | HOST_WIDE_INT offset; |
1988 | if (clobber && track_offset_p (REG_OFFSET (loc), offset_out: &offset)) |
1989 | { |
1990 | tree decl = REG_EXPR (loc); |
1991 | |
1992 | decl = var_debug_decl (decl); |
1993 | |
1994 | clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL); |
1995 | } |
1996 | |
1997 | for (node = *nextp; node; node = next) |
1998 | { |
1999 | next = node->next; |
2000 | if (clobber || !dv_onepart_p (dv: node->dv)) |
2001 | { |
2002 | delete_variable_part (set, node->loc, node->dv, node->offset); |
2003 | delete node; |
2004 | *nextp = next; |
2005 | } |
2006 | else |
2007 | nextp = &node->next; |
2008 | } |
2009 | } |
2010 | |
2011 | /* Delete content of register with number REGNO in dataflow set SET. */ |
2012 | |
2013 | static void |
2014 | var_regno_delete (dataflow_set *set, int regno) |
2015 | { |
2016 | attrs **reg = &set->regs[regno]; |
2017 | attrs *node, *next; |
2018 | |
2019 | for (node = *reg; node; node = next) |
2020 | { |
2021 | next = node->next; |
2022 | delete_variable_part (set, node->loc, node->dv, node->offset); |
2023 | delete node; |
2024 | } |
2025 | *reg = NULL; |
2026 | } |
2027 | |
2028 | /* Return true if I is the negated value of a power of two. */ |
2029 | static bool |
2030 | negative_power_of_two_p (HOST_WIDE_INT i) |
2031 | { |
2032 | unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i; |
2033 | return pow2_or_zerop (x); |
2034 | } |
2035 | |
2036 | /* Strip constant offsets and alignments off of LOC. Return the base |
2037 | expression. */ |
2038 | |
2039 | static rtx |
2040 | vt_get_canonicalize_base (rtx loc) |
2041 | { |
2042 | while ((GET_CODE (loc) == PLUS |
2043 | || GET_CODE (loc) == AND) |
2044 | && GET_CODE (XEXP (loc, 1)) == CONST_INT |
2045 | && (GET_CODE (loc) != AND |
2046 | || negative_power_of_two_p (INTVAL (XEXP (loc, 1))))) |
2047 | loc = XEXP (loc, 0); |
2048 | |
2049 | return loc; |
2050 | } |
2051 | |
2052 | /* This caches canonicalized addresses for VALUEs, computed using |
2053 | information in the global cselib table. */ |
2054 | static hash_map<rtx, rtx> *global_get_addr_cache; |
2055 | |
2056 | /* This caches canonicalized addresses for VALUEs, computed using |
2057 | information from the global cache and information pertaining to a |
2058 | basic block being analyzed. */ |
2059 | static hash_map<rtx, rtx> *local_get_addr_cache; |
2060 | |
2061 | static rtx vt_canonicalize_addr (dataflow_set *, rtx); |
2062 | |
2063 | /* Return the canonical address for LOC, that must be a VALUE, using a |
2064 | cached global equivalence or computing it and storing it in the |
2065 | global cache. */ |
2066 | |
2067 | static rtx |
2068 | get_addr_from_global_cache (rtx const loc) |
2069 | { |
2070 | rtx x; |
2071 | |
2072 | gcc_checking_assert (GET_CODE (loc) == VALUE); |
2073 | |
2074 | bool existed; |
2075 | rtx *slot = &global_get_addr_cache->get_or_insert (k: loc, existed: &existed); |
2076 | if (existed) |
2077 | return *slot; |
2078 | |
2079 | x = canon_rtx (get_addr (loc)); |
2080 | |
2081 | /* Tentative, avoiding infinite recursion. */ |
2082 | *slot = x; |
2083 | |
2084 | if (x != loc) |
2085 | { |
2086 | rtx nx = vt_canonicalize_addr (NULL, x); |
2087 | if (nx != x) |
2088 | { |
2089 | /* The table may have moved during recursion, recompute |
2090 | SLOT. */ |
2091 | *global_get_addr_cache->get (k: loc) = x = nx; |
2092 | } |
2093 | } |
2094 | |
2095 | return x; |
2096 | } |
2097 | |
2098 | /* Return the canonical address for LOC, that must be a VALUE, using a |
2099 | cached local equivalence or computing it and storing it in the |
2100 | local cache. */ |
2101 | |
2102 | static rtx |
2103 | get_addr_from_local_cache (dataflow_set *set, rtx const loc) |
2104 | { |
2105 | rtx x; |
2106 | decl_or_value dv; |
2107 | variable *var; |
2108 | location_chain *l; |
2109 | |
2110 | gcc_checking_assert (GET_CODE (loc) == VALUE); |
2111 | |
2112 | bool existed; |
2113 | rtx *slot = &local_get_addr_cache->get_or_insert (k: loc, existed: &existed); |
2114 | if (existed) |
2115 | return *slot; |
2116 | |
2117 | x = get_addr_from_global_cache (loc); |
2118 | |
2119 | /* Tentative, avoiding infinite recursion. */ |
2120 | *slot = x; |
2121 | |
2122 | /* Recurse to cache local expansion of X, or if we need to search |
2123 | for a VALUE in the expansion. */ |
2124 | if (x != loc) |
2125 | { |
2126 | rtx nx = vt_canonicalize_addr (set, x); |
2127 | if (nx != x) |
2128 | { |
2129 | slot = local_get_addr_cache->get (k: loc); |
2130 | *slot = x = nx; |
2131 | } |
2132 | return x; |
2133 | } |
2134 | |
2135 | dv = dv_from_rtx (x); |
2136 | var = shared_hash_find (vars: set->vars, dv); |
2137 | if (!var) |
2138 | return x; |
2139 | |
2140 | /* Look for an improved equivalent expression. */ |
2141 | for (l = var->var_part[0].loc_chain; l; l = l->next) |
2142 | { |
2143 | rtx base = vt_get_canonicalize_base (loc: l->loc); |
2144 | if (GET_CODE (base) == VALUE |
2145 | && canon_value_cmp (tval: base, cval: loc)) |
2146 | { |
2147 | rtx nx = vt_canonicalize_addr (set, l->loc); |
2148 | if (x != nx) |
2149 | { |
2150 | slot = local_get_addr_cache->get (k: loc); |
2151 | *slot = x = nx; |
2152 | } |
2153 | break; |
2154 | } |
2155 | } |
2156 | |
2157 | return x; |
2158 | } |
2159 | |
2160 | /* Canonicalize LOC using equivalences from SET in addition to those |
2161 | in the cselib static table. It expects a VALUE-based expression, |
2162 | and it will only substitute VALUEs with other VALUEs or |
2163 | function-global equivalences, so that, if two addresses have base |
2164 | VALUEs that are locally or globally related in ways that |
2165 | memrefs_conflict_p cares about, they will both canonicalize to |
2166 | expressions that have the same base VALUE. |
2167 | |
2168 | The use of VALUEs as canonical base addresses enables the canonical |
2169 | RTXs to remain unchanged globally, if they resolve to a constant, |
2170 | or throughout a basic block otherwise, so that they can be cached |
2171 | and the cache needs not be invalidated when REGs, MEMs or such |
2172 | change. */ |
2173 | |
2174 | static rtx |
2175 | vt_canonicalize_addr (dataflow_set *set, rtx oloc) |
2176 | { |
2177 | poly_int64 ofst = 0, term; |
2178 | machine_mode mode = GET_MODE (oloc); |
2179 | rtx loc = oloc; |
2180 | rtx x; |
2181 | bool retry = true; |
2182 | |
2183 | while (retry) |
2184 | { |
2185 | while (GET_CODE (loc) == PLUS |
2186 | && poly_int_rtx_p (XEXP (loc, 1), res: &term)) |
2187 | { |
2188 | ofst += term; |
2189 | loc = XEXP (loc, 0); |
2190 | } |
2191 | |
2192 | /* Alignment operations can't normally be combined, so just |
2193 | canonicalize the base and we're done. We'll normally have |
2194 | only one stack alignment anyway. */ |
2195 | if (GET_CODE (loc) == AND |
2196 | && GET_CODE (XEXP (loc, 1)) == CONST_INT |
2197 | && negative_power_of_two_p (INTVAL (XEXP (loc, 1)))) |
2198 | { |
2199 | x = vt_canonicalize_addr (set, XEXP (loc, 0)); |
2200 | if (x != XEXP (loc, 0)) |
2201 | loc = gen_rtx_AND (mode, x, XEXP (loc, 1)); |
2202 | retry = false; |
2203 | } |
2204 | |
2205 | if (GET_CODE (loc) == VALUE) |
2206 | { |
2207 | if (set) |
2208 | loc = get_addr_from_local_cache (set, loc); |
2209 | else |
2210 | loc = get_addr_from_global_cache (loc); |
2211 | |
2212 | /* Consolidate plus_constants. */ |
2213 | while (maybe_ne (a: ofst, b: 0) |
2214 | && GET_CODE (loc) == PLUS |
2215 | && poly_int_rtx_p (XEXP (loc, 1), res: &term)) |
2216 | { |
2217 | ofst += term; |
2218 | loc = XEXP (loc, 0); |
2219 | } |
2220 | |
2221 | retry = false; |
2222 | } |
2223 | else |
2224 | { |
2225 | x = canon_rtx (loc); |
2226 | if (retry) |
2227 | retry = (x != loc); |
2228 | loc = x; |
2229 | } |
2230 | } |
2231 | |
2232 | /* Add OFST back in. */ |
2233 | if (maybe_ne (a: ofst, b: 0)) |
2234 | { |
2235 | /* Don't build new RTL if we can help it. */ |
2236 | if (strip_offset (oloc, &term) == loc && known_eq (term, ofst)) |
2237 | return oloc; |
2238 | |
2239 | loc = plus_constant (mode, loc, ofst); |
2240 | } |
2241 | |
2242 | return loc; |
2243 | } |
2244 | |
2245 | /* Return true iff there's a true dependence between MLOC and LOC. |
2246 | MADDR must be a canonicalized version of MLOC's address. */ |
2247 | |
2248 | static inline bool |
2249 | vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc) |
2250 | { |
2251 | if (GET_CODE (loc) != MEM) |
2252 | return false; |
2253 | |
2254 | rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0)); |
2255 | if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr)) |
2256 | return false; |
2257 | |
2258 | return true; |
2259 | } |
2260 | |
2261 | /* Hold parameters for the hashtab traversal function |
2262 | drop_overlapping_mem_locs, see below. */ |
2263 | |
2264 | struct overlapping_mems |
2265 | { |
2266 | dataflow_set *set; |
2267 | rtx loc, addr; |
2268 | }; |
2269 | |
2270 | /* Remove all MEMs that overlap with COMS->LOC from the location list |
2271 | of a hash table entry for a onepart variable. COMS->ADDR must be a |
2272 | canonicalized form of COMS->LOC's address, and COMS->LOC must be |
2273 | canonicalized itself. */ |
2274 | |
2275 | int |
2276 | drop_overlapping_mem_locs (variable **slot, overlapping_mems *coms) |
2277 | { |
2278 | dataflow_set *set = coms->set; |
2279 | rtx mloc = coms->loc, addr = coms->addr; |
2280 | variable *var = *slot; |
2281 | |
2282 | if (var->onepart != NOT_ONEPART) |
2283 | { |
2284 | location_chain *loc, **locp; |
2285 | bool changed = false; |
2286 | rtx cur_loc; |
2287 | |
2288 | gcc_assert (var->n_var_parts == 1); |
2289 | |
2290 | if (shared_var_p (var, vars: set->vars)) |
2291 | { |
2292 | for (loc = var->var_part[0].loc_chain; loc; loc = loc->next) |
2293 | if (vt_canon_true_dep (set, mloc, maddr: addr, loc: loc->loc)) |
2294 | break; |
2295 | |
2296 | if (!loc) |
2297 | return 1; |
2298 | |
2299 | slot = unshare_variable (set, slot, var, initialized: VAR_INIT_STATUS_UNKNOWN); |
2300 | var = *slot; |
2301 | gcc_assert (var->n_var_parts == 1); |
2302 | } |
2303 | |
2304 | if (VAR_LOC_1PAUX (var)) |
2305 | cur_loc = VAR_LOC_FROM (var); |
2306 | else |
2307 | cur_loc = var->var_part[0].cur_loc; |
2308 | |
2309 | for (locp = &var->var_part[0].loc_chain, loc = *locp; |
2310 | loc; loc = *locp) |
2311 | { |
2312 | if (!vt_canon_true_dep (set, mloc, maddr: addr, loc: loc->loc)) |
2313 | { |
2314 | locp = &loc->next; |
2315 | continue; |
2316 | } |
2317 | |
2318 | *locp = loc->next; |
2319 | /* If we have deleted the location which was last emitted |
2320 | we have to emit new location so add the variable to set |
2321 | of changed variables. */ |
2322 | if (cur_loc == loc->loc) |
2323 | { |
2324 | changed = true; |
2325 | var->var_part[0].cur_loc = NULL; |
2326 | if (VAR_LOC_1PAUX (var)) |
2327 | VAR_LOC_FROM (var) = NULL; |
2328 | } |
2329 | delete loc; |
2330 | } |
2331 | |
2332 | if (!var->var_part[0].loc_chain) |
2333 | { |
2334 | var->n_var_parts--; |
2335 | changed = true; |
2336 | } |
2337 | if (changed) |
2338 | variable_was_changed (var, set); |
2339 | } |
2340 | |
2341 | return 1; |
2342 | } |
2343 | |
2344 | /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */ |
2345 | |
2346 | static void |
2347 | clobber_overlapping_mems (dataflow_set *set, rtx loc) |
2348 | { |
2349 | struct overlapping_mems coms; |
2350 | |
2351 | gcc_checking_assert (GET_CODE (loc) == MEM); |
2352 | |
2353 | coms.set = set; |
2354 | coms.loc = canon_rtx (loc); |
2355 | coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0)); |
2356 | |
2357 | set->traversed_vars = set->vars; |
2358 | shared_hash_htab (vars: set->vars) |
2359 | ->traverse <overlapping_mems*, drop_overlapping_mem_locs> (argument: &coms); |
2360 | set->traversed_vars = NULL; |
2361 | } |
2362 | |
2363 | /* Set the location of DV, OFFSET as the MEM LOC. */ |
2364 | |
2365 | static void |
2366 | var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized, |
2367 | decl_or_value dv, HOST_WIDE_INT offset, rtx set_src, |
2368 | enum insert_option iopt) |
2369 | { |
2370 | if (dv_is_decl_p (dv)) |
2371 | dv = dv_from_decl (decl: var_debug_decl (decl: dv_as_decl (dv))); |
2372 | |
2373 | set_variable_part (set, loc, dv, offset, initialized, set_src, iopt); |
2374 | } |
2375 | |
2376 | /* Set the location part of variable MEM_EXPR (LOC) in dataflow set |
2377 | SET to LOC. |
2378 | Adjust the address first if it is stack pointer based. */ |
2379 | |
2380 | static void |
2381 | var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized, |
2382 | rtx set_src) |
2383 | { |
2384 | tree decl = MEM_EXPR (loc); |
2385 | HOST_WIDE_INT offset = int_mem_offset (mem: loc); |
2386 | |
2387 | var_mem_decl_set (set, loc, initialized, |
2388 | dv: dv_from_decl (decl), offset, set_src, iopt: INSERT); |
2389 | } |
2390 | |
2391 | /* Delete and set the location part of variable MEM_EXPR (LOC) in |
2392 | dataflow set SET to LOC. If MODIFY is true, any other live copies |
2393 | of the same variable part are also deleted from the dataflow set, |
2394 | otherwise the variable part is assumed to be copied from another |
2395 | location holding the same part. |
2396 | Adjust the address first if it is stack pointer based. */ |
2397 | |
2398 | static void |
2399 | var_mem_delete_and_set (dataflow_set *set, rtx loc, bool modify, |
2400 | enum var_init_status initialized, rtx set_src) |
2401 | { |
2402 | tree decl = MEM_EXPR (loc); |
2403 | HOST_WIDE_INT offset = int_mem_offset (mem: loc); |
2404 | |
2405 | clobber_overlapping_mems (set, loc); |
2406 | decl = var_debug_decl (decl); |
2407 | |
2408 | if (initialized == VAR_INIT_STATUS_UNKNOWN) |
2409 | initialized = get_init_value (set, loc, dv: dv_from_decl (decl)); |
2410 | |
2411 | if (modify) |
2412 | clobber_variable_part (set, NULL, dv_from_decl (decl), offset, set_src); |
2413 | var_mem_set (set, loc, initialized, set_src); |
2414 | } |
2415 | |
2416 | /* Delete the location part LOC from dataflow set SET. If CLOBBER is |
2417 | true, also delete any other live copies of the same variable part. |
2418 | Adjust the address first if it is stack pointer based. */ |
2419 | |
2420 | static void |
2421 | var_mem_delete (dataflow_set *set, rtx loc, bool clobber) |
2422 | { |
2423 | tree decl = MEM_EXPR (loc); |
2424 | HOST_WIDE_INT offset = int_mem_offset (mem: loc); |
2425 | |
2426 | clobber_overlapping_mems (set, loc); |
2427 | decl = var_debug_decl (decl); |
2428 | if (clobber) |
2429 | clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL); |
2430 | delete_variable_part (set, loc, dv_from_decl (decl), offset); |
2431 | } |
2432 | |
2433 | /* Return true if LOC should not be expanded for location expressions, |
2434 | or used in them. */ |
2435 | |
2436 | static inline bool |
2437 | unsuitable_loc (rtx loc) |
2438 | { |
2439 | switch (GET_CODE (loc)) |
2440 | { |
2441 | case PC: |
2442 | case SCRATCH: |
2443 | case ASM_INPUT: |
2444 | case ASM_OPERANDS: |
2445 | return true; |
2446 | |
2447 | default: |
2448 | return false; |
2449 | } |
2450 | } |
2451 | |
2452 | /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values |
2453 | bound to it. */ |
2454 | |
2455 | static inline void |
2456 | val_bind (dataflow_set *set, rtx val, rtx loc, bool modified) |
2457 | { |
2458 | if (REG_P (loc)) |
2459 | { |
2460 | if (modified) |
2461 | var_regno_delete (set, REGNO (loc)); |
2462 | var_reg_decl_set (set, loc, initialized: VAR_INIT_STATUS_INITIALIZED, |
2463 | dv: dv_from_value (value: val), offset: 0, NULL_RTX, iopt: INSERT); |
2464 | } |
2465 | else if (MEM_P (loc)) |
2466 | { |
2467 | struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs; |
2468 | |
2469 | if (modified) |
2470 | clobber_overlapping_mems (set, loc); |
2471 | |
2472 | if (l && GET_CODE (l->loc) == VALUE) |
2473 | l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs; |
2474 | |
2475 | /* If this MEM is a global constant, we don't need it in the |
2476 | dynamic tables. ??? We should test this before emitting the |
2477 | micro-op in the first place. */ |
2478 | while (l) |
2479 | if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0)) |
2480 | break; |
2481 | else |
2482 | l = l->next; |
2483 | |
2484 | if (!l) |
2485 | var_mem_decl_set (set, loc, initialized: VAR_INIT_STATUS_INITIALIZED, |
2486 | dv: dv_from_value (value: val), offset: 0, NULL_RTX, iopt: INSERT); |
2487 | } |
2488 | else |
2489 | { |
2490 | /* Other kinds of equivalences are necessarily static, at least |
2491 | so long as we do not perform substitutions while merging |
2492 | expressions. */ |
2493 | gcc_unreachable (); |
2494 | set_variable_part (set, loc, dv_from_value (value: val), 0, |
2495 | VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT); |
2496 | } |
2497 | } |
2498 | |
2499 | /* Bind a value to a location it was just stored in. If MODIFIED |
2500 | holds, assume the location was modified, detaching it from any |
2501 | values bound to it. */ |
2502 | |
2503 | static void |
2504 | val_store (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn, |
2505 | bool modified) |
2506 | { |
2507 | cselib_val *v = CSELIB_VAL_PTR (val); |
2508 | |
2509 | gcc_assert (cselib_preserved_value_p (v)); |
2510 | |
2511 | if (dump_file) |
2512 | { |
2513 | fprintf (stream: dump_file, format: "%i: " , insn ? INSN_UID (insn) : 0); |
2514 | print_inline_rtx (dump_file, loc, 0); |
2515 | fprintf (stream: dump_file, format: " evaluates to " ); |
2516 | print_inline_rtx (dump_file, val, 0); |
2517 | if (v->locs) |
2518 | { |
2519 | struct elt_loc_list *l; |
2520 | for (l = v->locs; l; l = l->next) |
2521 | { |
2522 | fprintf (stream: dump_file, format: "\n%i: " , INSN_UID (insn: l->setting_insn)); |
2523 | print_inline_rtx (dump_file, l->loc, 0); |
2524 | } |
2525 | } |
2526 | fprintf (stream: dump_file, format: "\n" ); |
2527 | } |
2528 | |
2529 | gcc_checking_assert (!unsuitable_loc (loc)); |
2530 | |
2531 | val_bind (set, val, loc, modified); |
2532 | } |
2533 | |
2534 | /* Clear (canonical address) slots that reference X. */ |
2535 | |
2536 | bool |
2537 | local_get_addr_clear_given_value (rtx const &, rtx *slot, rtx x) |
2538 | { |
2539 | if (vt_get_canonicalize_base (loc: *slot) == x) |
2540 | *slot = NULL; |
2541 | return true; |
2542 | } |
2543 | |
2544 | /* Reset this node, detaching all its equivalences. Return the slot |
2545 | in the variable hash table that holds dv, if there is one. */ |
2546 | |
2547 | static void |
2548 | val_reset (dataflow_set *set, decl_or_value dv) |
2549 | { |
2550 | variable *var = shared_hash_find (vars: set->vars, dv) ; |
2551 | location_chain *node; |
2552 | rtx cval; |
2553 | |
2554 | if (!var || !var->n_var_parts) |
2555 | return; |
2556 | |
2557 | gcc_assert (var->n_var_parts == 1); |
2558 | |
2559 | if (var->onepart == ONEPART_VALUE) |
2560 | { |
2561 | rtx x = dv_as_value (dv); |
2562 | |
2563 | /* Relationships in the global cache don't change, so reset the |
2564 | local cache entry only. */ |
2565 | rtx *slot = local_get_addr_cache->get (k: x); |
2566 | if (slot) |
2567 | { |
2568 | /* If the value resolved back to itself, odds are that other |
2569 | values may have cached it too. These entries now refer |
2570 | to the old X, so detach them too. Entries that used the |
2571 | old X but resolved to something else remain ok as long as |
2572 | that something else isn't also reset. */ |
2573 | if (*slot == x) |
2574 | local_get_addr_cache |
2575 | ->traverse<rtx, local_get_addr_clear_given_value> (a: x); |
2576 | *slot = NULL; |
2577 | } |
2578 | } |
2579 | |
2580 | cval = NULL; |
2581 | for (node = var->var_part[0].loc_chain; node; node = node->next) |
2582 | if (GET_CODE (node->loc) == VALUE |
2583 | && canon_value_cmp (tval: node->loc, cval)) |
2584 | cval = node->loc; |
2585 | |
2586 | for (node = var->var_part[0].loc_chain; node; node = node->next) |
2587 | if (GET_CODE (node->loc) == VALUE && cval != node->loc) |
2588 | { |
2589 | /* Redirect the equivalence link to the new canonical |
2590 | value, or simply remove it if it would point at |
2591 | itself. */ |
2592 | if (cval) |
2593 | set_variable_part (set, cval, dv_from_value (value: node->loc), |
2594 | 0, node->init, node->set_src, NO_INSERT); |
2595 | delete_variable_part (set, dv_as_value (dv), |
2596 | dv_from_value (value: node->loc), 0); |
2597 | } |
2598 | |
2599 | if (cval) |
2600 | { |
2601 | decl_or_value cdv = dv_from_value (value: cval); |
2602 | |
2603 | /* Keep the remaining values connected, accumulating links |
2604 | in the canonical value. */ |
2605 | for (node = var->var_part[0].loc_chain; node; node = node->next) |
2606 | { |
2607 | if (node->loc == cval) |
2608 | continue; |
2609 | else if (GET_CODE (node->loc) == REG) |
2610 | var_reg_decl_set (set, loc: node->loc, initialized: node->init, dv: cdv, offset: 0, |
2611 | set_src: node->set_src, iopt: NO_INSERT); |
2612 | else if (GET_CODE (node->loc) == MEM) |
2613 | var_mem_decl_set (set, loc: node->loc, initialized: node->init, dv: cdv, offset: 0, |
2614 | set_src: node->set_src, iopt: NO_INSERT); |
2615 | else |
2616 | set_variable_part (set, node->loc, cdv, 0, |
2617 | node->init, node->set_src, NO_INSERT); |
2618 | } |
2619 | } |
2620 | |
2621 | /* We remove this last, to make sure that the canonical value is not |
2622 | removed to the point of requiring reinsertion. */ |
2623 | if (cval) |
2624 | delete_variable_part (set, dv_as_value (dv), dv_from_value (value: cval), 0); |
2625 | |
2626 | clobber_variable_part (set, NULL, dv, 0, NULL); |
2627 | } |
2628 | |
2629 | /* Find the values in a given location and map the val to another |
2630 | value, if it is unique, or add the location as one holding the |
2631 | value. */ |
2632 | |
2633 | static void |
2634 | val_resolve (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn) |
2635 | { |
2636 | decl_or_value dv = dv_from_value (value: val); |
2637 | |
2638 | if (dump_file && (dump_flags & TDF_DETAILS)) |
2639 | { |
2640 | if (insn) |
2641 | fprintf (stream: dump_file, format: "%i: " , INSN_UID (insn)); |
2642 | else |
2643 | fprintf (stream: dump_file, format: "head: " ); |
2644 | print_inline_rtx (dump_file, val, 0); |
2645 | fputs (s: " is at " , stream: dump_file); |
2646 | print_inline_rtx (dump_file, loc, 0); |
2647 | fputc (c: '\n', stream: dump_file); |
2648 | } |
2649 | |
2650 | val_reset (set, dv); |
2651 | |
2652 | gcc_checking_assert (!unsuitable_loc (loc)); |
2653 | |
2654 | if (REG_P (loc)) |
2655 | { |
2656 | attrs *node, *found = NULL; |
2657 | |
2658 | for (node = set->regs[REGNO (loc)]; node; node = node->next) |
2659 | if (dv_is_value_p (dv: node->dv) |
2660 | && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc)) |
2661 | { |
2662 | found = node; |
2663 | |
2664 | /* Map incoming equivalences. ??? Wouldn't it be nice if |
2665 | we just started sharing the location lists? Maybe a |
2666 | circular list ending at the value itself or some |
2667 | such. */ |
2668 | set_variable_part (set, dv_as_value (dv: node->dv), |
2669 | dv_from_value (value: val), node->offset, |
2670 | VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT); |
2671 | set_variable_part (set, val, node->dv, node->offset, |
2672 | VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT); |
2673 | } |
2674 | |
2675 | /* If we didn't find any equivalence, we need to remember that |
2676 | this value is held in the named register. */ |
2677 | if (found) |
2678 | return; |
2679 | } |
2680 | /* ??? Attempt to find and merge equivalent MEMs or other |
2681 | expressions too. */ |
2682 | |
2683 | val_bind (set, val, loc, modified: false); |
2684 | } |
2685 | |
2686 | /* Initialize dataflow set SET to be empty. |
2687 | VARS_SIZE is the initial size of hash table VARS. */ |
2688 | |
2689 | static void |
2690 | dataflow_set_init (dataflow_set *set) |
2691 | { |
2692 | init_attrs_list_set (set: set->regs); |
2693 | set->vars = shared_hash_copy (vars: empty_shared_hash); |
2694 | set->stack_adjust = 0; |
2695 | set->traversed_vars = NULL; |
2696 | } |
2697 | |
2698 | /* Delete the contents of dataflow set SET. */ |
2699 | |
2700 | static void |
2701 | dataflow_set_clear (dataflow_set *set) |
2702 | { |
2703 | int i; |
2704 | |
2705 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
2706 | attrs_list_clear (listp: &set->regs[i]); |
2707 | |
2708 | shared_hash_destroy (vars: set->vars); |
2709 | set->vars = shared_hash_copy (vars: empty_shared_hash); |
2710 | } |
2711 | |
2712 | /* Copy the contents of dataflow set SRC to DST. */ |
2713 | |
2714 | static void |
2715 | dataflow_set_copy (dataflow_set *dst, dataflow_set *src) |
2716 | { |
2717 | int i; |
2718 | |
2719 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
2720 | attrs_list_copy (dstp: &dst->regs[i], src: src->regs[i]); |
2721 | |
2722 | shared_hash_destroy (vars: dst->vars); |
2723 | dst->vars = shared_hash_copy (vars: src->vars); |
2724 | dst->stack_adjust = src->stack_adjust; |
2725 | } |
2726 | |
2727 | /* Information for merging lists of locations for a given offset of variable. |
2728 | */ |
2729 | struct variable_union_info |
2730 | { |
2731 | /* Node of the location chain. */ |
2732 | location_chain *lc; |
2733 | |
2734 | /* The sum of positions in the input chains. */ |
2735 | int pos; |
2736 | |
2737 | /* The position in the chain of DST dataflow set. */ |
2738 | int pos_dst; |
2739 | }; |
2740 | |
2741 | /* Buffer for location list sorting and its allocated size. */ |
2742 | static struct variable_union_info *vui_vec; |
2743 | static int vui_allocated; |
2744 | |
2745 | /* Compare function for qsort, order the structures by POS element. */ |
2746 | |
2747 | static int |
2748 | variable_union_info_cmp_pos (const void *n1, const void *n2) |
2749 | { |
2750 | const struct variable_union_info *const i1 = |
2751 | (const struct variable_union_info *) n1; |
2752 | const struct variable_union_info *const i2 = |
2753 | ( const struct variable_union_info *) n2; |
2754 | |
2755 | if (i1->pos != i2->pos) |
2756 | return i1->pos - i2->pos; |
2757 | |
2758 | return (i1->pos_dst - i2->pos_dst); |
2759 | } |
2760 | |
2761 | /* Compute union of location parts of variable *SLOT and the same variable |
2762 | from hash table DATA. Compute "sorted" union of the location chains |
2763 | for common offsets, i.e. the locations of a variable part are sorted by |
2764 | a priority where the priority is the sum of the positions in the 2 chains |
2765 | (if a location is only in one list the position in the second list is |
2766 | defined to be larger than the length of the chains). |
2767 | When we are updating the location parts the newest location is in the |
2768 | beginning of the chain, so when we do the described "sorted" union |
2769 | we keep the newest locations in the beginning. */ |
2770 | |
2771 | static int |
2772 | variable_union (variable *src, dataflow_set *set) |
2773 | { |
2774 | variable *dst; |
2775 | variable **dstp; |
2776 | int i, j, k; |
2777 | |
2778 | dstp = shared_hash_find_slot (vars: set->vars, dv: src->dv); |
2779 | if (!dstp || !*dstp) |
2780 | { |
2781 | src->refcount++; |
2782 | |
2783 | dst_can_be_shared = false; |
2784 | if (!dstp) |
2785 | dstp = shared_hash_find_slot_unshare (pvars: &set->vars, dv: src->dv, ins: INSERT); |
2786 | |
2787 | *dstp = src; |
2788 | |
2789 | /* Continue traversing the hash table. */ |
2790 | return 1; |
2791 | } |
2792 | else |
2793 | dst = *dstp; |
2794 | |
2795 | gcc_assert (src->n_var_parts); |
2796 | gcc_checking_assert (src->onepart == dst->onepart); |
2797 | |
2798 | /* We can combine one-part variables very efficiently, because their |
2799 | entries are in canonical order. */ |
2800 | if (src->onepart) |
2801 | { |
2802 | location_chain **nodep, *dnode, *snode; |
2803 | |
2804 | gcc_assert (src->n_var_parts == 1 |
2805 | && dst->n_var_parts == 1); |
2806 | |
2807 | snode = src->var_part[0].loc_chain; |
2808 | gcc_assert (snode); |
2809 | |
2810 | restart_onepart_unshared: |
2811 | nodep = &dst->var_part[0].loc_chain; |
2812 | dnode = *nodep; |
2813 | gcc_assert (dnode); |
2814 | |
2815 | while (snode) |
2816 | { |
2817 | int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1; |
2818 | |
2819 | if (r > 0) |
2820 | { |
2821 | location_chain *nnode; |
2822 | |
2823 | if (shared_var_p (var: dst, vars: set->vars)) |
2824 | { |
2825 | dstp = unshare_variable (set, slot: dstp, var: dst, |
2826 | initialized: VAR_INIT_STATUS_INITIALIZED); |
2827 | dst = *dstp; |
2828 | goto restart_onepart_unshared; |
2829 | } |
2830 | |
2831 | *nodep = nnode = new location_chain; |
2832 | nnode->loc = snode->loc; |
2833 | nnode->init = snode->init; |
2834 | if (!snode->set_src || MEM_P (snode->set_src)) |
2835 | nnode->set_src = NULL; |
2836 | else |
2837 | nnode->set_src = snode->set_src; |
2838 | nnode->next = dnode; |
2839 | dnode = nnode; |
2840 | } |
2841 | else if (r == 0) |
2842 | gcc_checking_assert (rtx_equal_p (dnode->loc, snode->loc)); |
2843 | |
2844 | if (r >= 0) |
2845 | snode = snode->next; |
2846 | |
2847 | nodep = &dnode->next; |
2848 | dnode = *nodep; |
2849 | } |
2850 | |
2851 | return 1; |
2852 | } |
2853 | |
2854 | gcc_checking_assert (!src->onepart); |
2855 | |
2856 | /* Count the number of location parts, result is K. */ |
2857 | for (i = 0, j = 0, k = 0; |
2858 | i < src->n_var_parts && j < dst->n_var_parts; k++) |
2859 | { |
2860 | if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j)) |
2861 | { |
2862 | i++; |
2863 | j++; |
2864 | } |
2865 | else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j)) |
2866 | i++; |
2867 | else |
2868 | j++; |
2869 | } |
2870 | k += src->n_var_parts - i; |
2871 | k += dst->n_var_parts - j; |
2872 | |
2873 | /* We track only variables whose size is <= MAX_VAR_PARTS bytes |
2874 | thus there are at most MAX_VAR_PARTS different offsets. */ |
2875 | gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS); |
2876 | |
2877 | if (dst->n_var_parts != k && shared_var_p (var: dst, vars: set->vars)) |
2878 | { |
2879 | dstp = unshare_variable (set, slot: dstp, var: dst, initialized: VAR_INIT_STATUS_UNKNOWN); |
2880 | dst = *dstp; |
2881 | } |
2882 | |
2883 | i = src->n_var_parts - 1; |
2884 | j = dst->n_var_parts - 1; |
2885 | dst->n_var_parts = k; |
2886 | |
2887 | for (k--; k >= 0; k--) |
2888 | { |
2889 | location_chain *node, *node2; |
2890 | |
2891 | if (i >= 0 && j >= 0 |
2892 | && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j)) |
2893 | { |
2894 | /* Compute the "sorted" union of the chains, i.e. the locations which |
2895 | are in both chains go first, they are sorted by the sum of |
2896 | positions in the chains. */ |
2897 | int dst_l, src_l; |
2898 | int ii, jj, n; |
2899 | struct variable_union_info *vui; |
2900 | |
2901 | /* If DST is shared compare the location chains. |
2902 | If they are different we will modify the chain in DST with |
2903 | high probability so make a copy of DST. */ |
2904 | if (shared_var_p (var: dst, vars: set->vars)) |
2905 | { |
2906 | for (node = src->var_part[i].loc_chain, |
2907 | node2 = dst->var_part[j].loc_chain; node && node2; |
2908 | node = node->next, node2 = node2->next) |
2909 | { |
2910 | if (!((REG_P (node2->loc) |
2911 | && REG_P (node->loc) |
2912 | && REGNO (node2->loc) == REGNO (node->loc)) |
2913 | || rtx_equal_p (node2->loc, node->loc))) |
2914 | { |
2915 | if (node2->init < node->init) |
2916 | node2->init = node->init; |
2917 | break; |
2918 | } |
2919 | } |
2920 | if (node || node2) |
2921 | { |
2922 | dstp = unshare_variable (set, slot: dstp, var: dst, |
2923 | initialized: VAR_INIT_STATUS_UNKNOWN); |
2924 | dst = (variable *)*dstp; |
2925 | } |
2926 | } |
2927 | |
2928 | src_l = 0; |
2929 | for (node = src->var_part[i].loc_chain; node; node = node->next) |
2930 | src_l++; |
2931 | dst_l = 0; |
2932 | for (node = dst->var_part[j].loc_chain; node; node = node->next) |
2933 | dst_l++; |
2934 | |
2935 | if (dst_l == 1) |
2936 | { |
2937 | /* The most common case, much simpler, no qsort is needed. */ |
2938 | location_chain *dstnode = dst->var_part[j].loc_chain; |
2939 | dst->var_part[k].loc_chain = dstnode; |
2940 | VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j); |
2941 | node2 = dstnode; |
2942 | for (node = src->var_part[i].loc_chain; node; node = node->next) |
2943 | if (!((REG_P (dstnode->loc) |
2944 | && REG_P (node->loc) |
2945 | && REGNO (dstnode->loc) == REGNO (node->loc)) |
2946 | || rtx_equal_p (dstnode->loc, node->loc))) |
2947 | { |
2948 | location_chain *new_node; |
2949 | |
2950 | /* Copy the location from SRC. */ |
2951 | new_node = new location_chain; |
2952 | new_node->loc = node->loc; |
2953 | new_node->init = node->init; |
2954 | if (!node->set_src || MEM_P (node->set_src)) |
2955 | new_node->set_src = NULL; |
2956 | else |
2957 | new_node->set_src = node->set_src; |
2958 | node2->next = new_node; |
2959 | node2 = new_node; |
2960 | } |
2961 | node2->next = NULL; |
2962 | } |
2963 | else |
2964 | { |
2965 | if (src_l + dst_l > vui_allocated) |
2966 | { |
2967 | vui_allocated = MAX (vui_allocated * 2, src_l + dst_l); |
2968 | vui_vec = XRESIZEVEC (struct variable_union_info, vui_vec, |
2969 | vui_allocated); |
2970 | } |
2971 | vui = vui_vec; |
2972 | |
2973 | /* Fill in the locations from DST. */ |
2974 | for (node = dst->var_part[j].loc_chain, jj = 0; node; |
2975 | node = node->next, jj++) |
2976 | { |
2977 | vui[jj].lc = node; |
2978 | vui[jj].pos_dst = jj; |
2979 | |
2980 | /* Pos plus value larger than a sum of 2 valid positions. */ |
2981 | vui[jj].pos = jj + src_l + dst_l; |
2982 | } |
2983 | |
2984 | /* Fill in the locations from SRC. */ |
2985 | n = dst_l; |
2986 | for (node = src->var_part[i].loc_chain, ii = 0; node; |
2987 | node = node->next, ii++) |
2988 | { |
2989 | /* Find location from NODE. */ |
2990 | for (jj = 0; jj < dst_l; jj++) |
2991 | { |
2992 | if ((REG_P (vui[jj].lc->loc) |
2993 | && REG_P (node->loc) |
2994 | && REGNO (vui[jj].lc->loc) == REGNO (node->loc)) |
2995 | || rtx_equal_p (vui[jj].lc->loc, node->loc)) |
2996 | { |
2997 | vui[jj].pos = jj + ii; |
2998 | break; |
2999 | } |
3000 | } |
3001 | if (jj >= dst_l) /* The location has not been found. */ |
3002 | { |
3003 | location_chain *new_node; |
3004 | |
3005 | /* Copy the location from SRC. */ |
3006 | new_node = new location_chain; |
3007 | new_node->loc = node->loc; |
3008 | new_node->init = node->init; |
3009 | if (!node->set_src || MEM_P (node->set_src)) |
3010 | new_node->set_src = NULL; |
3011 | else |
3012 | new_node->set_src = node->set_src; |
3013 | vui[n].lc = new_node; |
3014 | vui[n].pos_dst = src_l + dst_l; |
3015 | vui[n].pos = ii + src_l + dst_l; |
3016 | n++; |
3017 | } |
3018 | } |
3019 | |
3020 | if (dst_l == 2) |
3021 | { |
3022 | /* Special case still very common case. For dst_l == 2 |
3023 | all entries dst_l ... n-1 are sorted, with for i >= dst_l |
3024 | vui[i].pos == i + src_l + dst_l. */ |
3025 | if (vui[0].pos > vui[1].pos) |
3026 | { |
3027 | /* Order should be 1, 0, 2... */ |
3028 | dst->var_part[k].loc_chain = vui[1].lc; |
3029 | vui[1].lc->next = vui[0].lc; |
3030 | if (n >= 3) |
3031 | { |
3032 | vui[0].lc->next = vui[2].lc; |
3033 | vui[n - 1].lc->next = NULL; |
3034 | } |
3035 | else |
3036 | vui[0].lc->next = NULL; |
3037 | ii = 3; |
3038 | } |
3039 | else |
3040 | { |
3041 | dst->var_part[k].loc_chain = vui[0].lc; |
3042 | if (n >= 3 && vui[2].pos < vui[1].pos) |
3043 | { |
3044 | /* Order should be 0, 2, 1, 3... */ |
3045 | vui[0].lc->next = vui[2].lc; |
3046 | vui[2].lc->next = vui[1].lc; |
3047 | if (n >= 4) |
3048 | { |
3049 | vui[1].lc->next = vui[3].lc; |
3050 | vui[n - 1].lc->next = NULL; |
3051 | } |
3052 | else |
3053 | vui[1].lc->next = NULL; |
3054 | ii = 4; |
3055 | } |
3056 | else |
3057 | { |
3058 | /* Order should be 0, 1, 2... */ |
3059 | ii = 1; |
3060 | vui[n - 1].lc->next = NULL; |
3061 | } |
3062 | } |
3063 | for (; ii < n; ii++) |
3064 | vui[ii - 1].lc->next = vui[ii].lc; |
3065 | } |
3066 | else |
3067 | { |
3068 | qsort (vui, n, sizeof (struct variable_union_info), |
3069 | variable_union_info_cmp_pos); |
3070 | |
3071 | /* Reconnect the nodes in sorted order. */ |
3072 | for (ii = 1; ii < n; ii++) |
3073 | vui[ii - 1].lc->next = vui[ii].lc; |
3074 | vui[n - 1].lc->next = NULL; |
3075 | dst->var_part[k].loc_chain = vui[0].lc; |
3076 | } |
3077 | |
3078 | VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j); |
3079 | } |
3080 | i--; |
3081 | j--; |
3082 | } |
3083 | else if ((i >= 0 && j >= 0 |
3084 | && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j)) |
3085 | || i < 0) |
3086 | { |
3087 | dst->var_part[k] = dst->var_part[j]; |
3088 | j--; |
3089 | } |
3090 | else if ((i >= 0 && j >= 0 |
3091 | && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j)) |
3092 | || j < 0) |
3093 | { |
3094 | location_chain **nextp; |
3095 | |
3096 | /* Copy the chain from SRC. */ |
3097 | nextp = &dst->var_part[k].loc_chain; |
3098 | for (node = src->var_part[i].loc_chain; node; node = node->next) |
3099 | { |
3100 | location_chain *new_lc; |
3101 | |
3102 | new_lc = new location_chain; |
3103 | new_lc->next = NULL; |
3104 | new_lc->init = node->init; |
3105 | if (!node->set_src || MEM_P (node->set_src)) |
3106 | new_lc->set_src = NULL; |
3107 | else |
3108 | new_lc->set_src = node->set_src; |
3109 | new_lc->loc = node->loc; |
3110 | |
3111 | *nextp = new_lc; |
3112 | nextp = &new_lc->next; |
3113 | } |
3114 | |
3115 | VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i); |
3116 | i--; |
3117 | } |
3118 | dst->var_part[k].cur_loc = NULL; |
3119 | } |
3120 | |
3121 | if (flag_var_tracking_uninit) |
3122 | for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++) |
3123 | { |
3124 | location_chain *node, *node2; |
3125 | for (node = src->var_part[i].loc_chain; node; node = node->next) |
3126 | for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next) |
3127 | if (rtx_equal_p (node->loc, node2->loc)) |
3128 | { |
3129 | if (node->init > node2->init) |
3130 | node2->init = node->init; |
3131 | } |
3132 | } |
3133 | |
3134 | /* Continue traversing the hash table. */ |
3135 | return 1; |
3136 | } |
3137 | |
3138 | /* Compute union of dataflow sets SRC and DST and store it to DST. */ |
3139 | |
3140 | static void |
3141 | dataflow_set_union (dataflow_set *dst, dataflow_set *src) |
3142 | { |
3143 | int i; |
3144 | |
3145 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
3146 | attrs_list_union (dstp: &dst->regs[i], src: src->regs[i]); |
3147 | |
3148 | if (dst->vars == empty_shared_hash) |
3149 | { |
3150 | shared_hash_destroy (vars: dst->vars); |
3151 | dst->vars = shared_hash_copy (vars: src->vars); |
3152 | } |
3153 | else |
3154 | { |
3155 | variable_iterator_type hi; |
3156 | variable *var; |
3157 | |
3158 | FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src->vars), |
3159 | var, variable, hi) |
3160 | variable_union (src: var, set: dst); |
3161 | } |
3162 | } |
3163 | |
3164 | /* Whether the value is currently being expanded. */ |
3165 | #define VALUE_RECURSED_INTO(x) \ |
3166 | (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used) |
3167 | |
3168 | /* Whether no expansion was found, saving useless lookups. |
3169 | It must only be set when VALUE_CHANGED is clear. */ |
3170 | #define NO_LOC_P(x) \ |
3171 | (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val) |
3172 | |
3173 | /* Whether cur_loc in the value needs to be (re)computed. */ |
3174 | #define VALUE_CHANGED(x) \ |
3175 | (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related) |
3176 | /* Whether cur_loc in the decl needs to be (re)computed. */ |
3177 | #define DECL_CHANGED(x) TREE_VISITED (x) |
3178 | |
3179 | /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For |
3180 | user DECLs, this means they're in changed_variables. Values and |
3181 | debug exprs may be left with this flag set if no user variable |
3182 | requires them to be evaluated. */ |
3183 | |
3184 | static inline void |
3185 | set_dv_changed (decl_or_value dv, bool newv) |
3186 | { |
3187 | switch (dv_onepart_p (dv)) |
3188 | { |
3189 | case ONEPART_VALUE: |
3190 | if (newv) |
3191 | NO_LOC_P (dv_as_value (dv)) = false; |
3192 | VALUE_CHANGED (dv_as_value (dv)) = newv; |
3193 | break; |
3194 | |
3195 | case ONEPART_DEXPR: |
3196 | if (newv) |
3197 | NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false; |
3198 | /* Fall through. */ |
3199 | |
3200 | default: |
3201 | DECL_CHANGED (dv_as_decl (dv)) = newv; |
3202 | break; |
3203 | } |
3204 | } |
3205 | |
3206 | /* Return true if DV needs to have its cur_loc recomputed. */ |
3207 | |
3208 | static inline bool |
3209 | dv_changed_p (decl_or_value dv) |
3210 | { |
3211 | return (dv_is_value_p (dv) |
3212 | ? VALUE_CHANGED (dv_as_value (dv)) |
3213 | : DECL_CHANGED (dv_as_decl (dv))); |
3214 | } |
3215 | |
3216 | /* Return a location list node whose loc is rtx_equal to LOC, in the |
3217 | location list of a one-part variable or value VAR, or in that of |
3218 | any values recursively mentioned in the location lists. VARS must |
3219 | be in star-canonical form. */ |
3220 | |
3221 | static location_chain * |
3222 | find_loc_in_1pdv (rtx loc, variable *var, variable_table_type *vars) |
3223 | { |
3224 | location_chain *node; |
3225 | enum rtx_code loc_code; |
3226 | |
3227 | if (!var) |
3228 | return NULL; |
3229 | |
3230 | gcc_checking_assert (var->onepart); |
3231 | |
3232 | if (!var->n_var_parts) |
3233 | return NULL; |
3234 | |
3235 | gcc_checking_assert (var->dv != loc); |
3236 | |
3237 | loc_code = GET_CODE (loc); |
3238 | for (node = var->var_part[0].loc_chain; node; node = node->next) |
3239 | { |
3240 | decl_or_value dv; |
3241 | variable *rvar; |
3242 | |
3243 | if (GET_CODE (node->loc) != loc_code) |
3244 | { |
3245 | if (GET_CODE (node->loc) != VALUE) |
3246 | continue; |
3247 | } |
3248 | else if (loc == node->loc) |
3249 | return node; |
3250 | else if (loc_code != VALUE) |
3251 | { |
3252 | if (rtx_equal_p (loc, node->loc)) |
3253 | return node; |
3254 | continue; |
3255 | } |
3256 | |
3257 | /* Since we're in star-canonical form, we don't need to visit |
3258 | non-canonical nodes: one-part variables and non-canonical |
3259 | values would only point back to the canonical node. */ |
3260 | if (dv_is_value_p (dv: var->dv) |
3261 | && !canon_value_cmp (tval: node->loc, cval: dv_as_value (dv: var->dv))) |
3262 | { |
3263 | /* Skip all subsequent VALUEs. */ |
3264 | while (node->next && GET_CODE (node->next->loc) == VALUE) |
3265 | { |
3266 | node = node->next; |
3267 | gcc_checking_assert (!canon_value_cmp (node->loc, |
3268 | dv_as_value (var->dv))); |
3269 | if (loc == node->loc) |
3270 | return node; |
3271 | } |
3272 | continue; |
3273 | } |
3274 | |
3275 | gcc_checking_assert (node == var->var_part[0].loc_chain); |
3276 | gcc_checking_assert (!node->next); |
3277 | |
3278 | dv = dv_from_value (value: node->loc); |
3279 | rvar = vars->find_with_hash (comparable: dv, hash: dv_htab_hash (dv)); |
3280 | return find_loc_in_1pdv (loc, var: rvar, vars); |
3281 | } |
3282 | |
3283 | /* ??? Gotta look in cselib_val locations too. */ |
3284 | |
3285 | return NULL; |
3286 | } |
3287 | |
3288 | /* Hash table iteration argument passed to variable_merge. */ |
3289 | struct dfset_merge |
3290 | { |
3291 | /* The set in which the merge is to be inserted. */ |
3292 | dataflow_set *dst; |
3293 | /* The set that we're iterating in. */ |
3294 | dataflow_set *cur; |
3295 | /* The set that may contain the other dv we are to merge with. */ |
3296 | dataflow_set *src; |
3297 | /* Number of onepart dvs in src. */ |
3298 | int src_onepart_cnt; |
3299 | }; |
3300 | |
3301 | /* Insert LOC in *DNODE, if it's not there yet. The list must be in |
3302 | loc_cmp order, and it is maintained as such. */ |
3303 | |
3304 | static void |
3305 | insert_into_intersection (location_chain **nodep, rtx loc, |
3306 | enum var_init_status status) |
3307 | { |
3308 | location_chain *node; |
3309 | int r; |
3310 | |
3311 | for (node = *nodep; node; nodep = &node->next, node = *nodep) |
3312 | if ((r = loc_cmp (node->loc, loc)) == 0) |
3313 | { |
3314 | node->init = MIN (node->init, status); |
3315 | return; |
3316 | } |
3317 | else if (r > 0) |
3318 | break; |
3319 | |
3320 | node = new location_chain; |
3321 | |
3322 | node->loc = loc; |
3323 | node->set_src = NULL; |
3324 | node->init = status; |
3325 | node->next = *nodep; |
3326 | *nodep = node; |
3327 | } |
3328 | |
3329 | /* Insert in DEST the intersection of the locations present in both |
3330 | S1NODE and S2VAR, directly or indirectly. S1NODE is from a |
3331 | variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in |
3332 | DSM->dst. */ |
3333 | |
3334 | static void |
3335 | intersect_loc_chains (rtx val, location_chain **dest, struct dfset_merge *dsm, |
3336 | location_chain *s1node, variable *s2var) |
3337 | { |
3338 | dataflow_set *s1set = dsm->cur; |
3339 | dataflow_set *s2set = dsm->src; |
3340 | location_chain *found; |
3341 | |
3342 | if (s2var) |
3343 | { |
3344 | location_chain *s2node; |
3345 | |
3346 | gcc_checking_assert (s2var->onepart); |
3347 | |
3348 | if (s2var->n_var_parts) |
3349 | { |
3350 | s2node = s2var->var_part[0].loc_chain; |
3351 | |
3352 | for (; s1node && s2node; |
3353 | s1node = s1node->next, s2node = s2node->next) |
3354 | if (s1node->loc != s2node->loc) |
3355 | break; |
3356 | else if (s1node->loc == val) |
3357 | continue; |
3358 | else |
3359 | insert_into_intersection (nodep: dest, loc: s1node->loc, |
3360 | MIN (s1node->init, s2node->init)); |
3361 | } |
3362 | } |
3363 | |
3364 | for (; s1node; s1node = s1node->next) |
3365 | { |
3366 | if (s1node->loc == val) |
3367 | continue; |
3368 | |
3369 | if ((found = find_loc_in_1pdv (loc: s1node->loc, var: s2var, |
3370 | vars: shared_hash_htab (vars: s2set->vars)))) |
3371 | { |
3372 | insert_into_intersection (nodep: dest, loc: s1node->loc, |
3373 | MIN (s1node->init, found->init)); |
3374 | continue; |
3375 | } |
3376 | |
3377 | if (GET_CODE (s1node->loc) == VALUE |
3378 | && !VALUE_RECURSED_INTO (s1node->loc)) |
3379 | { |
3380 | decl_or_value dv = dv_from_value (value: s1node->loc); |
3381 | variable *svar = shared_hash_find (vars: s1set->vars, dv); |
3382 | if (svar) |
3383 | { |
3384 | if (svar->n_var_parts == 1) |
3385 | { |
3386 | VALUE_RECURSED_INTO (s1node->loc) = true; |
3387 | intersect_loc_chains (val, dest, dsm, |
3388 | s1node: svar->var_part[0].loc_chain, |
3389 | s2var); |
3390 | VALUE_RECURSED_INTO (s1node->loc) = false; |
3391 | } |
3392 | } |
3393 | } |
3394 | |
3395 | /* ??? gotta look in cselib_val locations too. */ |
3396 | |
3397 | /* ??? if the location is equivalent to any location in src, |
3398 | searched recursively |
3399 | |
3400 | add to dst the values needed to represent the equivalence |
3401 | |
3402 | telling whether locations S is equivalent to another dv's |
3403 | location list: |
3404 | |
3405 | for each location D in the list |
3406 | |
3407 | if S and D satisfy rtx_equal_p, then it is present |
3408 | |
3409 | else if D is a value, recurse without cycles |
3410 | |
3411 | else if S and D have the same CODE and MODE |
3412 | |
3413 | for each operand oS and the corresponding oD |
3414 | |
3415 | if oS and oD are not equivalent, then S an D are not equivalent |
3416 | |
3417 | else if they are RTX vectors |
3418 | |
3419 | if any vector oS element is not equivalent to its respective oD, |
3420 | then S and D are not equivalent |
3421 | |
3422 | */ |
3423 | |
3424 | |
3425 | } |
3426 | } |
3427 | |
3428 | /* Return -1 if X should be before Y in a location list for a 1-part |
3429 | variable, 1 if Y should be before X, and 0 if they're equivalent |
3430 | and should not appear in the list. */ |
3431 | |
3432 | static int |
3433 | loc_cmp (rtx x, rtx y) |
3434 | { |
3435 | int i, j, r; |
3436 | RTX_CODE code = GET_CODE (x); |
3437 | const char *fmt; |
3438 | |
3439 | if (x == y) |
3440 | return 0; |
3441 | |
3442 | if (REG_P (x)) |
3443 | { |
3444 | if (!REG_P (y)) |
3445 | return -1; |
3446 | gcc_assert (GET_MODE (x) == GET_MODE (y)); |
3447 | if (REGNO (x) == REGNO (y)) |
3448 | return 0; |
3449 | else if (REGNO (x) < REGNO (y)) |
3450 | return -1; |
3451 | else |
3452 | return 1; |
3453 | } |
3454 | |
3455 | if (REG_P (y)) |
3456 | return 1; |
3457 | |
3458 | if (MEM_P (x)) |
3459 | { |
3460 | if (!MEM_P (y)) |
3461 | return -1; |
3462 | gcc_assert (GET_MODE (x) == GET_MODE (y)); |
3463 | return loc_cmp (XEXP (x, 0), XEXP (y, 0)); |
3464 | } |
3465 | |
3466 | if (MEM_P (y)) |
3467 | return 1; |
3468 | |
3469 | if (GET_CODE (x) == VALUE) |
3470 | { |
3471 | if (GET_CODE (y) != VALUE) |
3472 | return -1; |
3473 | /* Don't assert the modes are the same, that is true only |
3474 | when not recursing. (subreg:QI (value:SI 1:1) 0) |
3475 | and (subreg:QI (value:DI 2:2) 0) can be compared, |
3476 | even when the modes are different. */ |
3477 | if (canon_value_cmp (tval: x, cval: y)) |
3478 | return -1; |
3479 | else |
3480 | return 1; |
3481 | } |
3482 | |
3483 | if (GET_CODE (y) == VALUE) |
3484 | return 1; |
3485 | |
3486 | /* Entry value is the least preferable kind of expression. */ |
3487 | if (GET_CODE (x) == ENTRY_VALUE) |
3488 | { |
3489 | if (GET_CODE (y) != ENTRY_VALUE) |
3490 | return 1; |
3491 | gcc_assert (GET_MODE (x) == GET_MODE (y)); |
3492 | return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y)); |
3493 | } |
3494 | |
3495 | if (GET_CODE (y) == ENTRY_VALUE) |
3496 | return -1; |
3497 | |
3498 | if (GET_CODE (x) == GET_CODE (y)) |
3499 | /* Compare operands below. */; |
3500 | else if (GET_CODE (x) < GET_CODE (y)) |
3501 | return -1; |
3502 | else |
3503 | return 1; |
3504 | |
3505 | gcc_assert (GET_MODE (x) == GET_MODE (y)); |
3506 | |
3507 | if (GET_CODE (x) == DEBUG_EXPR) |
3508 | { |
3509 | if (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x)) |
3510 | < DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y))) |
3511 | return -1; |
3512 | gcc_checking_assert (DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x)) |
3513 | > DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (y))); |
3514 | return 1; |
3515 | } |
3516 | |
3517 | fmt = GET_RTX_FORMAT (code); |
3518 | for (i = 0; i < GET_RTX_LENGTH (code); i++) |
3519 | switch (fmt[i]) |
3520 | { |
3521 | case 'w': |
3522 | if (XWINT (x, i) == XWINT (y, i)) |
3523 | break; |
3524 | else if (XWINT (x, i) < XWINT (y, i)) |
3525 | return -1; |
3526 | else |
3527 | return 1; |
3528 | |
3529 | case 'n': |
3530 | case 'i': |
3531 | if (XINT (x, i) == XINT (y, i)) |
3532 | break; |
3533 | else if (XINT (x, i) < XINT (y, i)) |
3534 | return -1; |
3535 | else |
3536 | return 1; |
3537 | |
3538 | case 'p': |
3539 | r = compare_sizes_for_sort (SUBREG_BYTE (x), SUBREG_BYTE (y)); |
3540 | if (r != 0) |
3541 | return r; |
3542 | break; |
3543 | |
3544 | case 'V': |
3545 | case 'E': |
3546 | /* Compare the vector length first. */ |
3547 | if (XVECLEN (x, i) == XVECLEN (y, i)) |
3548 | /* Compare the vectors elements. */; |
3549 | else if (XVECLEN (x, i) < XVECLEN (y, i)) |
3550 | return -1; |
3551 | else |
3552 | return 1; |
3553 | |
3554 | for (j = 0; j < XVECLEN (x, i); j++) |
3555 | if ((r = loc_cmp (XVECEXP (x, i, j), |
3556 | XVECEXP (y, i, j)))) |
3557 | return r; |
3558 | break; |
3559 | |
3560 | case 'e': |
3561 | if ((r = loc_cmp (XEXP (x, i), XEXP (y, i)))) |
3562 | return r; |
3563 | break; |
3564 | |
3565 | case 'S': |
3566 | case 's': |
3567 | if (XSTR (x, i) == XSTR (y, i)) |
3568 | break; |
3569 | if (!XSTR (x, i)) |
3570 | return -1; |
3571 | if (!XSTR (y, i)) |
3572 | return 1; |
3573 | if ((r = strcmp (XSTR (x, i), XSTR (y, i))) == 0) |
3574 | break; |
3575 | else if (r < 0) |
3576 | return -1; |
3577 | else |
3578 | return 1; |
3579 | |
3580 | case 'u': |
3581 | /* These are just backpointers, so they don't matter. */ |
3582 | break; |
3583 | |
3584 | case '0': |
3585 | case 't': |
3586 | break; |
3587 | |
3588 | /* It is believed that rtx's at this level will never |
3589 | contain anything but integers and other rtx's, |
3590 | except for within LABEL_REFs and SYMBOL_REFs. */ |
3591 | default: |
3592 | gcc_unreachable (); |
3593 | } |
3594 | if (CONST_WIDE_INT_P (x)) |
3595 | { |
3596 | /* Compare the vector length first. */ |
3597 | if (CONST_WIDE_INT_NUNITS (x) >= CONST_WIDE_INT_NUNITS (y)) |
3598 | return 1; |
3599 | else if (CONST_WIDE_INT_NUNITS (x) < CONST_WIDE_INT_NUNITS (y)) |
3600 | return -1; |
3601 | |
3602 | /* Compare the vectors elements. */; |
3603 | for (j = CONST_WIDE_INT_NUNITS (x) - 1; j >= 0 ; j--) |
3604 | { |
3605 | if (CONST_WIDE_INT_ELT (x, j) < CONST_WIDE_INT_ELT (y, j)) |
3606 | return -1; |
3607 | if (CONST_WIDE_INT_ELT (x, j) > CONST_WIDE_INT_ELT (y, j)) |
3608 | return 1; |
3609 | } |
3610 | } |
3611 | |
3612 | return 0; |
3613 | } |
3614 | |
3615 | /* Check the order of entries in one-part variables. */ |
3616 | |
3617 | int |
3618 | canonicalize_loc_order_check (variable **slot, |
3619 | dataflow_set *data ATTRIBUTE_UNUSED) |
3620 | { |
3621 | variable *var = *slot; |
3622 | location_chain *node, *next; |
3623 | |
3624 | #ifdef ENABLE_RTL_CHECKING |
3625 | int i; |
3626 | for (i = 0; i < var->n_var_parts; i++) |
3627 | gcc_assert (var->var_part[0].cur_loc == NULL); |
3628 | gcc_assert (!var->in_changed_variables); |
3629 | #endif |
3630 | |
3631 | if (!var->onepart) |
3632 | return 1; |
3633 | |
3634 | gcc_assert (var->n_var_parts == 1); |
3635 | node = var->var_part[0].loc_chain; |
3636 | gcc_assert (node); |
3637 | |
3638 | while ((next = node->next)) |
3639 | { |
3640 | gcc_assert (loc_cmp (node->loc, next->loc) < 0); |
3641 | node = next; |
3642 | } |
3643 | |
3644 | return 1; |
3645 | } |
3646 | |
3647 | /* Mark with VALUE_RECURSED_INTO values that have neighbors that are |
3648 | more likely to be chosen as canonical for an equivalence set. |
3649 | Ensure less likely values can reach more likely neighbors, making |
3650 | the connections bidirectional. */ |
3651 | |
3652 | int |
3653 | canonicalize_values_mark (variable **slot, dataflow_set *set) |
3654 | { |
3655 | variable *var = *slot; |
3656 | decl_or_value dv = var->dv; |
3657 | rtx val; |
3658 | location_chain *node; |
3659 | |
3660 | if (!dv_is_value_p (dv)) |
3661 | return 1; |
3662 | |
3663 | gcc_checking_assert (var->n_var_parts == 1); |
3664 | |
3665 | val = dv_as_value (dv); |
3666 | |
3667 | for (node = var->var_part[0].loc_chain; node; node = node->next) |
3668 | if (GET_CODE (node->loc) == VALUE) |
3669 | { |
3670 | if (canon_value_cmp (tval: node->loc, cval: val)) |
3671 | VALUE_RECURSED_INTO (val) = true; |
3672 | else |
3673 | { |
3674 | decl_or_value odv = dv_from_value (value: node->loc); |
3675 | variable **oslot; |
3676 | oslot = shared_hash_find_slot_noinsert (vars: set->vars, dv: odv); |
3677 | |
3678 | set_slot_part (set, val, oslot, odv, 0, |
3679 | node->init, NULL_RTX); |
3680 | |
3681 | VALUE_RECURSED_INTO (node->loc) = true; |
3682 | } |
3683 | } |
3684 | |
3685 | return 1; |
3686 | } |
3687 | |
3688 | /* Remove redundant entries from equivalence lists in onepart |
3689 | variables, canonicalizing equivalence sets into star shapes. */ |
3690 | |
3691 | int |
3692 | canonicalize_values_star (variable **slot, dataflow_set *set) |
3693 | { |
3694 | variable *var = *slot; |
3695 | decl_or_value dv = var->dv; |
3696 | location_chain *node; |
3697 | decl_or_value cdv; |
3698 | rtx val, cval; |
3699 | variable **cslot; |
3700 | bool has_value; |
3701 | bool has_marks; |
3702 | |
3703 | if (!var->onepart) |
3704 | return 1; |
3705 | |
3706 | gcc_checking_assert (var->n_var_parts == 1); |
3707 | |
3708 | if (dv_is_value_p (dv)) |
3709 | { |
3710 | cval = dv_as_value (dv); |
3711 | if (!VALUE_RECURSED_INTO (cval)) |
3712 | return 1; |
3713 | VALUE_RECURSED_INTO (cval) = false; |
3714 | } |
3715 | else |
3716 | cval = NULL_RTX; |
3717 | |
3718 | restart: |
3719 | val = cval; |
3720 | has_value = false; |
3721 | has_marks = false; |
3722 | |
3723 | gcc_assert (var->n_var_parts == 1); |
3724 | |
3725 | for (node = var->var_part[0].loc_chain; node; node = node->next) |
3726 | if (GET_CODE (node->loc) == VALUE) |
3727 | { |
3728 | has_value = true; |
3729 | if (VALUE_RECURSED_INTO (node->loc)) |
3730 | has_marks = true; |
3731 | if (canon_value_cmp (tval: node->loc, cval)) |
3732 | cval = node->loc; |
3733 | } |
3734 | |
3735 | if (!has_value) |
3736 | return 1; |
3737 | |
3738 | if (cval == val) |
3739 | { |
3740 | if (!has_marks || dv_is_decl_p (dv)) |
3741 | return 1; |
3742 | |
3743 | /* Keep it marked so that we revisit it, either after visiting a |
3744 | child node, or after visiting a new parent that might be |
3745 | found out. */ |
3746 | VALUE_RECURSED_INTO (val) = true; |
3747 | |
3748 | for (node = var->var_part[0].loc_chain; node; node = node->next) |
3749 | if (GET_CODE (node->loc) == VALUE |
3750 | && VALUE_RECURSED_INTO (node->loc)) |
3751 | { |
3752 | cval = node->loc; |
3753 | restart_with_cval: |
3754 | VALUE_RECURSED_INTO (cval) = false; |
3755 | dv = dv_from_value (value: cval); |
3756 | slot = shared_hash_find_slot_noinsert (vars: set->vars, dv); |
3757 | if (!slot) |
3758 | { |
3759 | gcc_assert (dv_is_decl_p (var->dv)); |
3760 | /* The canonical value was reset and dropped. |
3761 | Remove it. */ |
3762 | clobber_variable_part (set, NULL, var->dv, 0, NULL); |
3763 | return 1; |
3764 | } |
3765 | var = *slot; |
3766 | gcc_assert (dv_is_value_p (var->dv)); |
3767 | if (var->n_var_parts == 0) |
3768 | return 1; |
3769 | gcc_assert (var->n_var_parts == 1); |
3770 | goto restart; |
3771 | } |
3772 | |
3773 | VALUE_RECURSED_INTO (val) = false; |
3774 | |
3775 | return 1; |
3776 | } |
3777 | |
3778 | /* Push values to the canonical one. */ |
3779 | cdv = dv_from_value (value: cval); |
3780 | cslot = shared_hash_find_slot_noinsert (vars: set->vars, dv: cdv); |
3781 | |
3782 | for (node = var->var_part[0].loc_chain; node; node = node->next) |
3783 | if (node->loc != cval) |
3784 | { |
3785 | cslot = set_slot_part (set, node->loc, cslot, cdv, 0, |
3786 | node->init, NULL_RTX); |
3787 | if (GET_CODE (node->loc) == VALUE) |
3788 | { |
3789 | decl_or_value ndv = dv_from_value (value: node->loc); |
3790 | |
3791 | set_variable_part (set, cval, ndv, 0, node->init, NULL_RTX, |
3792 | NO_INSERT); |
3793 | |
3794 | if (canon_value_cmp (tval: node->loc, cval: val)) |
3795 | { |
3796 | /* If it could have been a local minimum, it's not any more, |
3797 | since it's now neighbor to cval, so it may have to push |
3798 | to it. Conversely, if it wouldn't have prevailed over |
3799 | val, then whatever mark it has is fine: if it was to |
3800 | push, it will now push to a more canonical node, but if |
3801 | it wasn't, then it has already pushed any values it might |
3802 | have to. */ |
3803 | VALUE_RECURSED_INTO (node->loc) = true; |
3804 | /* Make sure we visit node->loc by ensuring we cval is |
3805 | visited too. */ |
3806 | VALUE_RECURSED_INTO (cval) = true; |
3807 | } |
3808 | else if (!VALUE_RECURSED_INTO (node->loc)) |
3809 | /* If we have no need to "recurse" into this node, it's |
3810 | already "canonicalized", so drop the link to the old |
3811 | parent. */ |
3812 | clobber_variable_part (set, cval, ndv, 0, NULL); |
3813 | } |
3814 | else if (GET_CODE (node->loc) == REG) |
3815 | { |
3816 | attrs *list = set->regs[REGNO (node->loc)], **listp; |
3817 | |
3818 | /* Change an existing attribute referring to dv so that it |
3819 | refers to cdv, removing any duplicate this might |
3820 | introduce, and checking that no previous duplicates |
3821 | existed, all in a single pass. */ |
3822 | |
3823 | while (list) |
3824 | { |
3825 | if (list->offset == 0 && (list->dv == dv || list->dv == cdv)) |
3826 | break; |
3827 | |
3828 | list = list->next; |
3829 | } |
3830 | |
3831 | gcc_assert (list); |
3832 | if (list->dv == dv) |
3833 | { |
3834 | list->dv = cdv; |
3835 | for (listp = &list->next; (list = *listp); listp = &list->next) |
3836 | { |
3837 | if (list->offset) |
3838 | continue; |
3839 | |
3840 | if (list->dv == cdv) |
3841 | { |
3842 | *listp = list->next; |
3843 | delete list; |
3844 | list = *listp; |
3845 | break; |
3846 | } |
3847 | |
3848 | gcc_assert (list->dv != dv); |
3849 | } |
3850 | } |
3851 | else if (list->dv == cdv) |
3852 | { |
3853 | for (listp = &list->next; (list = *listp); listp = &list->next) |
3854 | { |
3855 | if (list->offset) |
3856 | continue; |
3857 | |
3858 | if (list->dv == dv) |
3859 | { |
3860 | *listp = list->next; |
3861 | delete list; |
3862 | list = *listp; |
3863 | break; |
3864 | } |
3865 | |
3866 | gcc_assert (list->dv != cdv); |
3867 | } |
3868 | } |
3869 | else |
3870 | gcc_unreachable (); |
3871 | |
3872 | if (flag_checking) |
3873 | while (list) |
3874 | { |
3875 | if (list->offset == 0 && (list->dv == dv || list->dv == cdv)) |
3876 | gcc_unreachable (); |
3877 | |
3878 | list = list->next; |
3879 | } |
3880 | } |
3881 | } |
3882 | |
3883 | if (val) |
3884 | set_slot_part (set, val, cslot, cdv, 0, |
3885 | VAR_INIT_STATUS_INITIALIZED, NULL_RTX); |
3886 | |
3887 | slot = clobber_slot_part (set, cval, slot, 0, NULL); |
3888 | |
3889 | /* Variable may have been unshared. */ |
3890 | var = *slot; |
3891 | gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval |
3892 | && var->var_part[0].loc_chain->next == NULL); |
3893 | |
3894 | if (VALUE_RECURSED_INTO (cval)) |
3895 | goto restart_with_cval; |
3896 | |
3897 | return 1; |
3898 | } |
3899 | |
3900 | /* Bind one-part variables to the canonical value in an equivalence |
3901 | set. Not doing this causes dataflow convergence failure in rare |
3902 | circumstances, see PR42873. Unfortunately we can't do this |
3903 | efficiently as part of canonicalize_values_star, since we may not |
3904 | have determined or even seen the canonical value of a set when we |
3905 | get to a variable that references another member of the set. */ |
3906 | |
3907 | int |
3908 | canonicalize_vars_star (variable **slot, dataflow_set *set) |
3909 | { |
3910 | variable *var = *slot; |
3911 | decl_or_value dv = var->dv; |
3912 | location_chain *node; |
3913 | rtx cval; |
3914 | decl_or_value cdv; |
3915 | variable **cslot; |
3916 | variable *cvar; |
3917 | location_chain *cnode; |
3918 | |
3919 | if (!var->onepart || var->onepart == ONEPART_VALUE) |
3920 | return 1; |
3921 | |
3922 | gcc_assert (var->n_var_parts == 1); |
3923 | |
3924 | node = var->var_part[0].loc_chain; |
3925 | |
3926 | if (GET_CODE (node->loc) != VALUE) |
3927 | return 1; |
3928 | |
3929 | gcc_assert (!node->next); |
3930 | cval = node->loc; |
3931 | |
3932 | /* Push values to the canonical one. */ |
3933 | cdv = dv_from_value (value: cval); |
3934 | cslot = shared_hash_find_slot_noinsert (vars: set->vars, dv: cdv); |
3935 | if (!cslot) |
3936 | return 1; |
3937 | cvar = *cslot; |
3938 | gcc_assert (cvar->n_var_parts == 1); |
3939 | |
3940 | cnode = cvar->var_part[0].loc_chain; |
3941 | |
3942 | /* CVAL is canonical if its value list contains non-VALUEs or VALUEs |
3943 | that are not “more canonical” than it. */ |
3944 | if (GET_CODE (cnode->loc) != VALUE |
3945 | || !canon_value_cmp (tval: cnode->loc, cval)) |
3946 | return 1; |
3947 | |
3948 | /* CVAL was found to be non-canonical. Change the variable to point |
3949 | to the canonical VALUE. */ |
3950 | gcc_assert (!cnode->next); |
3951 | cval = cnode->loc; |
3952 | |
3953 | slot = set_slot_part (set, cval, slot, dv, 0, |
3954 | node->init, node->set_src); |
3955 | clobber_slot_part (set, cval, slot, 0, node->set_src); |
3956 | |
3957 | return 1; |
3958 | } |
3959 | |
3960 | /* Combine variable or value in *S1SLOT (in DSM->cur) with the |
3961 | corresponding entry in DSM->src. Multi-part variables are combined |
3962 | with variable_union, whereas onepart dvs are combined with |
3963 | intersection. */ |
3964 | |
3965 | static int |
3966 | variable_merge_over_cur (variable *s1var, struct dfset_merge *dsm) |
3967 | { |
3968 | dataflow_set *dst = dsm->dst; |
3969 | variable **dstslot; |
3970 | variable *s2var, *dvar = NULL; |
3971 | decl_or_value dv = s1var->dv; |
3972 | onepart_enum onepart = s1var->onepart; |
3973 | rtx val; |
3974 | hashval_t dvhash; |
3975 | location_chain *node, **nodep; |
3976 | |
3977 | /* If the incoming onepart variable has an empty location list, then |
3978 | the intersection will be just as empty. For other variables, |
3979 | it's always union. */ |
3980 | gcc_checking_assert (s1var->n_var_parts |
3981 | && s1var->var_part[0].loc_chain); |
3982 | |
3983 | if (!onepart) |
3984 | return variable_union (src: s1var, set: dst); |
3985 | |
3986 | gcc_checking_assert (s1var->n_var_parts == 1); |
3987 | |
3988 | dvhash = dv_htab_hash (dv); |
3989 | if (dv_is_value_p (dv)) |
3990 | val = dv_as_value (dv); |
3991 | else |
3992 | val = NULL; |
3993 | |
3994 | s2var = shared_hash_find_1 (vars: dsm->src->vars, dv, dvhash); |
3995 | if (!s2var) |
3996 | { |
3997 | dst_can_be_shared = false; |
3998 | return 1; |
3999 | } |
4000 | |
4001 | dsm->src_onepart_cnt--; |
4002 | gcc_assert (s2var->var_part[0].loc_chain |
4003 | && s2var->onepart == onepart |
4004 | && s2var->n_var_parts == 1); |
4005 | |
4006 | dstslot = shared_hash_find_slot_noinsert_1 (vars: dst->vars, dv, dvhash); |
4007 | if (dstslot) |
4008 | { |
4009 | dvar = *dstslot; |
4010 | gcc_assert (dvar->refcount == 1 |
4011 | && dvar->onepart == onepart |
4012 | && dvar->n_var_parts == 1); |
4013 | nodep = &dvar->var_part[0].loc_chain; |
4014 | } |
4015 | else |
4016 | { |
4017 | nodep = &node; |
4018 | node = NULL; |
4019 | } |
4020 | |
4021 | if (!dstslot && !onepart_variable_different_p (s1var, s2var)) |
4022 | { |
4023 | dstslot = shared_hash_find_slot_unshare_1 (pvars: &dst->vars, dv, |
4024 | dvhash, ins: INSERT); |
4025 | *dstslot = dvar = s2var; |
4026 | dvar->refcount++; |
4027 | } |
4028 | else |
4029 | { |
4030 | dst_can_be_shared = false; |
4031 | |
4032 | intersect_loc_chains (val, dest: nodep, dsm, |
4033 | s1node: s1var->var_part[0].loc_chain, s2var); |
4034 | |
4035 | if (!dstslot) |
4036 | { |
4037 | if (node) |
4038 | { |
4039 | dvar = onepart_pool_allocate (onepart); |
4040 | dvar->dv = dv; |
4041 | dvar->refcount = 1; |
4042 | dvar->n_var_parts = 1; |
4043 | dvar->onepart = onepart; |
4044 | dvar->in_changed_variables = false; |
4045 | dvar->var_part[0].loc_chain = node; |
4046 | dvar->var_part[0].cur_loc = NULL; |
4047 | if (onepart) |
4048 | VAR_LOC_1PAUX (dvar) = NULL; |
4049 | else |
4050 | VAR_PART_OFFSET (dvar, 0) = 0; |
4051 | |
4052 | dstslot |
4053 | = shared_hash_find_slot_unshare_1 (pvars: &dst->vars, dv, dvhash, |
4054 | ins: INSERT); |
4055 | gcc_assert (!*dstslot); |
4056 | *dstslot = dvar; |
4057 | } |
4058 | else |
4059 | return 1; |
4060 | } |
4061 | } |
4062 | |
4063 | nodep = &dvar->var_part[0].loc_chain; |
4064 | while ((node = *nodep)) |
4065 | { |
4066 | location_chain **nextp = &node->next; |
4067 | |
4068 | if (GET_CODE (node->loc) == REG) |
4069 | { |
4070 | attrs *list; |
4071 | |
4072 | for (list = dst->regs[REGNO (node->loc)]; list; list = list->next) |
4073 | if (GET_MODE (node->loc) == GET_MODE (list->loc) |
4074 | && dv_is_value_p (dv: list->dv)) |
4075 | break; |
4076 | |
4077 | if (!list) |
4078 | attrs_list_insert (listp: &dst->regs[REGNO (node->loc)], |
4079 | dv, offset: 0, loc: node->loc); |
4080 | /* If this value became canonical for another value that had |
4081 | this register, we want to leave it alone. */ |
4082 | else if (dv_as_value (dv: list->dv) != val) |
4083 | { |
4084 | dstslot = set_slot_part (dst, dv_as_value (dv: list->dv), |
4085 | dstslot, dv, 0, |
4086 | node->init, NULL_RTX); |
4087 | dstslot = delete_slot_part (dst, node->loc, dstslot, 0); |
4088 | |
4089 | /* Since nextp points into the removed node, we can't |
4090 | use it. The pointer to the next node moved to nodep. |
4091 | However, if the variable we're walking is unshared |
4092 | during our walk, we'll keep walking the location list |
4093 | of the previously-shared variable, in which case the |
4094 | node won't have been removed, and we'll want to skip |
4095 | it. That's why we test *nodep here. */ |
4096 | if (*nodep != node) |
4097 | nextp = nodep; |
4098 | } |
4099 | } |
4100 | else |
4101 | /* Canonicalization puts registers first, so we don't have to |
4102 | walk it all. */ |
4103 | break; |
4104 | nodep = nextp; |
4105 | } |
4106 | |
4107 | if (dvar != *dstslot) |
4108 | dvar = *dstslot; |
4109 | nodep = &dvar->var_part[0].loc_chain; |
4110 | |
4111 | if (val) |
4112 | { |
4113 | /* Mark all referenced nodes for canonicalization, and make sure |
4114 | we have mutual equivalence links. */ |
4115 | VALUE_RECURSED_INTO (val) = true; |
4116 | for (node = *nodep; node; node = node->next) |
4117 | if (GET_CODE (node->loc) == VALUE) |
4118 | { |
4119 | VALUE_RECURSED_INTO (node->loc) = true; |
4120 | set_variable_part (dst, val, dv_from_value (value: node->loc), 0, |
4121 | node->init, NULL, INSERT); |
4122 | } |
4123 | |
4124 | dstslot = shared_hash_find_slot_noinsert_1 (vars: dst->vars, dv, dvhash); |
4125 | gcc_assert (*dstslot == dvar); |
4126 | canonicalize_values_star (slot: dstslot, set: dst); |
4127 | gcc_checking_assert (dstslot |
4128 | == shared_hash_find_slot_noinsert_1 (dst->vars, |
4129 | dv, dvhash)); |
4130 | dvar = *dstslot; |
4131 | } |
4132 | else |
4133 | { |
4134 | bool has_value = false, has_other = false; |
4135 | |
4136 | /* If we have one value and anything else, we're going to |
4137 | canonicalize this, so make sure all values have an entry in |
4138 | the table and are marked for canonicalization. */ |
4139 | for (node = *nodep; node; node = node->next) |
4140 | { |
4141 | if (GET_CODE (node->loc) == VALUE) |
4142 | { |
4143 | /* If this was marked during register canonicalization, |
4144 | we know we have to canonicalize values. */ |
4145 | if (has_value) |
4146 | has_other = true; |
4147 | has_value = true; |
4148 | if (has_other) |
4149 | break; |
4150 | } |
4151 | else |
4152 | { |
4153 | has_other = true; |
4154 | if (has_value) |
4155 | break; |
4156 | } |
4157 | } |
4158 | |
4159 | if (has_value && has_other) |
4160 | { |
4161 | for (node = *nodep; node; node = node->next) |
4162 | { |
4163 | if (GET_CODE (node->loc) == VALUE) |
4164 | { |
4165 | decl_or_value dv = dv_from_value (value: node->loc); |
4166 | variable **slot = NULL; |
4167 | |
4168 | if (shared_hash_shared (vars: dst->vars)) |
4169 | slot = shared_hash_find_slot_noinsert (vars: dst->vars, dv); |
4170 | if (!slot) |
4171 | slot = shared_hash_find_slot_unshare (pvars: &dst->vars, dv, |
4172 | ins: INSERT); |
4173 | if (!*slot) |
4174 | { |
4175 | variable *var = onepart_pool_allocate (onepart: ONEPART_VALUE); |
4176 | var->dv = dv; |
4177 | var->refcount = 1; |
4178 | var->n_var_parts = 1; |
4179 | var->onepart = ONEPART_VALUE; |
4180 | var->in_changed_variables = false; |
4181 | var->var_part[0].loc_chain = NULL; |
4182 | var->var_part[0].cur_loc = NULL; |
4183 | VAR_LOC_1PAUX (var) = NULL; |
4184 | *slot = var; |
4185 | } |
4186 | |
4187 | VALUE_RECURSED_INTO (node->loc) = true; |
4188 | } |
4189 | } |
4190 | |
4191 | dstslot = shared_hash_find_slot_noinsert_1 (vars: dst->vars, dv, dvhash); |
4192 | gcc_assert (*dstslot == dvar); |
4193 | canonicalize_values_star (slot: dstslot, set: dst); |
4194 | gcc_checking_assert (dstslot |
4195 | == shared_hash_find_slot_noinsert_1 (dst->vars, |
4196 | dv, dvhash)); |
4197 | dvar = *dstslot; |
4198 | } |
4199 | } |
4200 | |
4201 | if (!onepart_variable_different_p (dvar, s2var)) |
4202 | { |
4203 | variable_htab_free (elem: dvar); |
4204 | *dstslot = dvar = s2var; |
4205 | dvar->refcount++; |
4206 | } |
4207 | else if (s2var != s1var && !onepart_variable_different_p (dvar, s1var)) |
4208 | { |
4209 | variable_htab_free (elem: dvar); |
4210 | *dstslot = dvar = s1var; |
4211 | dvar->refcount++; |
4212 | dst_can_be_shared = false; |
4213 | } |
4214 | else |
4215 | dst_can_be_shared = false; |
4216 | |
4217 | return 1; |
4218 | } |
4219 | |
4220 | /* Copy s2slot (in DSM->src) to DSM->dst if the variable is a |
4221 | multi-part variable. Unions of multi-part variables and |
4222 | intersections of one-part ones will be handled in |
4223 | variable_merge_over_cur(). */ |
4224 | |
4225 | static int |
4226 | variable_merge_over_src (variable *s2var, struct dfset_merge *dsm) |
4227 | { |
4228 | dataflow_set *dst = dsm->dst; |
4229 | decl_or_value dv = s2var->dv; |
4230 | |
4231 | if (!s2var->onepart) |
4232 | { |
4233 | variable **dstp = shared_hash_find_slot (vars: dst->vars, dv); |
4234 | *dstp = s2var; |
4235 | s2var->refcount++; |
4236 | return 1; |
4237 | } |
4238 | |
4239 | dsm->src_onepart_cnt++; |
4240 | return 1; |
4241 | } |
4242 | |
4243 | /* Combine dataflow set information from SRC2 into DST, using PDST |
4244 | to carry over information across passes. */ |
4245 | |
4246 | static void |
4247 | dataflow_set_merge (dataflow_set *dst, dataflow_set *src2) |
4248 | { |
4249 | dataflow_set cur = *dst; |
4250 | dataflow_set *src1 = &cur; |
4251 | struct dfset_merge dsm; |
4252 | int i; |
4253 | size_t src1_elems, src2_elems; |
4254 | variable_iterator_type hi; |
4255 | variable *var; |
4256 | |
4257 | src1_elems = shared_hash_htab (vars: src1->vars)->elements (); |
4258 | src2_elems = shared_hash_htab (vars: src2->vars)->elements (); |
4259 | dataflow_set_init (set: dst); |
4260 | dst->stack_adjust = cur.stack_adjust; |
4261 | shared_hash_destroy (vars: dst->vars); |
4262 | dst->vars = new shared_hash; |
4263 | dst->vars->refcount = 1; |
4264 | dst->vars->htab = new variable_table_type (MAX (src1_elems, src2_elems)); |
4265 | |
4266 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
4267 | attrs_list_mpdv_union (dstp: &dst->regs[i], src: src1->regs[i], src2: src2->regs[i]); |
4268 | |
4269 | dsm.dst = dst; |
4270 | dsm.src = src2; |
4271 | dsm.cur = src1; |
4272 | dsm.src_onepart_cnt = 0; |
4273 | |
4274 | FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.src->vars), |
4275 | var, variable, hi) |
4276 | variable_merge_over_src (s2var: var, dsm: &dsm); |
4277 | FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.cur->vars), |
4278 | var, variable, hi) |
4279 | variable_merge_over_cur (s1var: var, dsm: &dsm); |
4280 | |
4281 | if (dsm.src_onepart_cnt) |
4282 | dst_can_be_shared = false; |
4283 | |
4284 | dataflow_set_destroy (src1); |
4285 | } |
4286 | |
4287 | /* Mark register equivalences. */ |
4288 | |
4289 | static void |
4290 | dataflow_set_equiv_regs (dataflow_set *set) |
4291 | { |
4292 | int i; |
4293 | attrs *list, **listp; |
4294 | |
4295 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
4296 | { |
4297 | rtx canon[NUM_MACHINE_MODES]; |
4298 | |
4299 | /* If the list is empty or one entry, no need to canonicalize |
4300 | anything. */ |
4301 | if (set->regs[i] == NULL || set->regs[i]->next == NULL) |
4302 | continue; |
4303 | |
4304 | memset (s: canon, c: 0, n: sizeof (canon)); |
4305 | |
4306 | for (list = set->regs[i]; list; list = list->next) |
4307 | if (list->offset == 0 && dv_is_value_p (dv: list->dv)) |
4308 | { |
4309 | rtx val = dv_as_value (dv: list->dv); |
4310 | rtx *cvalp = &canon[(int)GET_MODE (val)]; |
4311 | rtx cval = *cvalp; |
4312 | |
4313 | if (canon_value_cmp (tval: val, cval)) |
4314 | *cvalp = val; |
4315 | } |
4316 | |
4317 | for (list = set->regs[i]; list; list = list->next) |
4318 | if (list->offset == 0 && dv_onepart_p (dv: list->dv)) |
4319 | { |
4320 | rtx cval = canon[(int)GET_MODE (list->loc)]; |
4321 | |
4322 | if (!cval) |
4323 | continue; |
4324 | |
4325 | if (dv_is_value_p (dv: list->dv)) |
4326 | { |
4327 | rtx val = dv_as_value (dv: list->dv); |
4328 | |
4329 | if (val == cval) |
4330 | continue; |
4331 | |
4332 | VALUE_RECURSED_INTO (val) = true; |
4333 | set_variable_part (set, val, dv_from_value (value: cval), 0, |
4334 | VAR_INIT_STATUS_INITIALIZED, |
4335 | NULL, NO_INSERT); |
4336 | } |
4337 | |
4338 | VALUE_RECURSED_INTO (cval) = true; |
4339 | set_variable_part (set, cval, list->dv, 0, |
4340 | VAR_INIT_STATUS_INITIALIZED, NULL, NO_INSERT); |
4341 | } |
4342 | |
4343 | for (listp = &set->regs[i]; (list = *listp); |
4344 | listp = list ? &list->next : listp) |
4345 | if (list->offset == 0 && dv_onepart_p (dv: list->dv)) |
4346 | { |
4347 | rtx cval = canon[(int)GET_MODE (list->loc)]; |
4348 | variable **slot; |
4349 | |
4350 | if (!cval) |
4351 | continue; |
4352 | |
4353 | if (dv_is_value_p (dv: list->dv)) |
4354 | { |
4355 | rtx val = dv_as_value (dv: list->dv); |
4356 | if (!VALUE_RECURSED_INTO (val)) |
4357 | continue; |
4358 | } |
4359 | |
4360 | slot = shared_hash_find_slot_noinsert (vars: set->vars, dv: list->dv); |
4361 | canonicalize_values_star (slot, set); |
4362 | if (*listp != list) |
4363 | list = NULL; |
4364 | } |
4365 | } |
4366 | } |
4367 | |
4368 | /* Remove any redundant values in the location list of VAR, which must |
4369 | be unshared and 1-part. */ |
4370 | |
4371 | static void |
4372 | remove_duplicate_values (variable *var) |
4373 | { |
4374 | location_chain *node, **nodep; |
4375 | |
4376 | gcc_assert (var->onepart); |
4377 | gcc_assert (var->n_var_parts == 1); |
4378 | gcc_assert (var->refcount == 1); |
4379 | |
4380 | for (nodep = &var->var_part[0].loc_chain; (node = *nodep); ) |
4381 | { |
4382 | if (GET_CODE (node->loc) == VALUE) |
4383 | { |
4384 | if (VALUE_RECURSED_INTO (node->loc)) |
4385 | { |
4386 | /* Remove duplicate value node. */ |
4387 | *nodep = node->next; |
4388 | delete node; |
4389 | continue; |
4390 | } |
4391 | else |
4392 | VALUE_RECURSED_INTO (node->loc) = true; |
4393 | } |
4394 | nodep = &node->next; |
4395 | } |
4396 | |
4397 | for (node = var->var_part[0].loc_chain; node; node = node->next) |
4398 | if (GET_CODE (node->loc) == VALUE) |
4399 | { |
4400 | gcc_assert (VALUE_RECURSED_INTO (node->loc)); |
4401 | VALUE_RECURSED_INTO (node->loc) = false; |
4402 | } |
4403 | } |
4404 | |
4405 | |
4406 | /* Hash table iteration argument passed to variable_post_merge. */ |
4407 | struct dfset_post_merge |
4408 | { |
4409 | /* The new input set for the current block. */ |
4410 | dataflow_set *set; |
4411 | /* Pointer to the permanent input set for the current block, or |
4412 | NULL. */ |
4413 | dataflow_set **permp; |
4414 | }; |
4415 | |
4416 | /* Create values for incoming expressions associated with one-part |
4417 | variables that don't have value numbers for them. */ |
4418 | |
4419 | int |
4420 | variable_post_merge_new_vals (variable **slot, dfset_post_merge *dfpm) |
4421 | { |
4422 | dataflow_set *set = dfpm->set; |
4423 | variable *var = *slot; |
4424 | location_chain *node; |
4425 | |
4426 | if (!var->onepart || !var->n_var_parts) |
4427 | return 1; |
4428 | |
4429 | gcc_assert (var->n_var_parts == 1); |
4430 | |
4431 | if (dv_is_decl_p (dv: var->dv)) |
4432 | { |
4433 | bool check_dupes = false; |
4434 | |
4435 | restart: |
4436 | for (node = var->var_part[0].loc_chain; node; node = node->next) |
4437 | { |
4438 | if (GET_CODE (node->loc) == VALUE) |
4439 | gcc_assert (!VALUE_RECURSED_INTO (node->loc)); |
4440 | else if (GET_CODE (node->loc) == REG) |
4441 | { |
4442 | attrs *att, **attp, **curp = NULL; |
4443 | |
4444 | if (var->refcount != 1) |
4445 | { |
4446 | slot = unshare_variable (set, slot, var, |
4447 | initialized: VAR_INIT_STATUS_INITIALIZED); |
4448 | var = *slot; |
4449 | goto restart; |
4450 | } |
4451 | |
4452 | for (attp = &set->regs[REGNO (node->loc)]; (att = *attp); |
4453 | attp = &att->next) |
4454 | if (att->offset == 0 |
4455 | && GET_MODE (att->loc) == GET_MODE (node->loc)) |
4456 | { |
4457 | if (dv_is_value_p (dv: att->dv)) |
4458 | { |
4459 | rtx cval = dv_as_value (dv: att->dv); |
4460 | node->loc = cval; |
4461 | check_dupes = true; |
4462 | break; |
4463 | } |
4464 | else if (att->dv == var->dv) |
4465 | curp = attp; |
4466 | } |
4467 | |
4468 | if (!curp) |
4469 | { |
4470 | curp = attp; |
4471 | while (*curp) |
4472 | if ((*curp)->offset == 0 |
4473 | && GET_MODE ((*curp)->loc) == GET_MODE (node->loc) |
4474 | && (*curp)->dv == var->dv) |
4475 | break; |
4476 | else |
4477 | curp = &(*curp)->next; |
4478 | gcc_assert (*curp); |
4479 | } |
4480 | |
4481 | if (!att) |
4482 | { |
4483 | decl_or_value cdv; |
4484 | rtx cval; |
4485 | |
4486 | if (!*dfpm->permp) |
4487 | { |
4488 | *dfpm->permp = XNEW (dataflow_set); |
4489 | dataflow_set_init (set: *dfpm->permp); |
4490 | } |
4491 | |
4492 | for (att = (*dfpm->permp)->regs[REGNO (node->loc)]; |
4493 | att; att = att->next) |
4494 | if (GET_MODE (att->loc) == GET_MODE (node->loc)) |
4495 | { |
4496 | gcc_assert (att->offset == 0 |
4497 | && dv_is_value_p (att->dv)); |
4498 | val_reset (set, dv: att->dv); |
4499 | break; |
4500 | } |
4501 | |
4502 | if (att) |
4503 | { |
4504 | cdv = att->dv; |
4505 | cval = dv_as_value (dv: cdv); |
4506 | } |
4507 | else |
4508 | { |
4509 | /* Create a unique value to hold this register, |
4510 | that ought to be found and reused in |
4511 | subsequent rounds. */ |
4512 | cselib_val *v; |
4513 | gcc_assert (!cselib_lookup (node->loc, |
4514 | GET_MODE (node->loc), 0, |
4515 | VOIDmode)); |
4516 | v = cselib_lookup (node->loc, GET_MODE (node->loc), 1, |
4517 | VOIDmode); |
4518 | cselib_preserve_value (v); |
4519 | cselib_invalidate_rtx (node->loc); |
4520 | cval = v->val_rtx; |
4521 | cdv = dv_from_value (value: cval); |
4522 | if (dump_file) |
4523 | fprintf (stream: dump_file, |
4524 | format: "Created new value %u:%u for reg %i\n" , |
4525 | v->uid, v->hash, REGNO (node->loc)); |
4526 | } |
4527 | |
4528 | var_reg_decl_set (set: *dfpm->permp, loc: node->loc, |
4529 | initialized: VAR_INIT_STATUS_INITIALIZED, |
4530 | dv: cdv, offset: 0, NULL, iopt: INSERT); |
4531 | |
4532 | node->loc = cval; |
4533 | check_dupes = true; |
4534 | } |
4535 | |
4536 | /* Remove attribute referring to the decl, which now |
4537 | uses the value for the register, already existing or |
4538 | to be added when we bring perm in. */ |
4539 | att = *curp; |
4540 | *curp = att->next; |
4541 | delete att; |
4542 | } |
4543 | } |
4544 | |
4545 | if (check_dupes) |
4546 | remove_duplicate_values (var); |
4547 | } |
4548 | |
4549 | return 1; |
4550 | } |
4551 | |
4552 | /* Reset values in the permanent set that are not associated with the |
4553 | chosen expression. */ |
4554 | |
4555 | int |
4556 | variable_post_merge_perm_vals (variable **pslot, dfset_post_merge *dfpm) |
4557 | { |
4558 | dataflow_set *set = dfpm->set; |
4559 | variable *pvar = *pslot, *var; |
4560 | location_chain *pnode; |
4561 | decl_or_value dv; |
4562 | attrs *att; |
4563 | |
4564 | gcc_assert (dv_is_value_p (pvar->dv) |
4565 | && pvar->n_var_parts == 1); |
4566 | pnode = pvar->var_part[0].loc_chain; |
4567 | gcc_assert (pnode |
4568 | && !pnode->next |
4569 | && REG_P (pnode->loc)); |
4570 | |
4571 | dv = pvar->dv; |
4572 | |
4573 | var = shared_hash_find (vars: set->vars, dv); |
4574 | if (var) |
4575 | { |
4576 | /* Although variable_post_merge_new_vals may have made decls |
4577 | non-star-canonical, values that pre-existed in canonical form |
4578 | remain canonical, and newly-created values reference a single |
4579 | REG, so they are canonical as well. Since VAR has the |
4580 | location list for a VALUE, using find_loc_in_1pdv for it is |
4581 | fine, since VALUEs don't map back to DECLs. */ |
4582 | if (find_loc_in_1pdv (loc: pnode->loc, var, vars: shared_hash_htab (vars: set->vars))) |
4583 | return 1; |
4584 | val_reset (set, dv); |
4585 | } |
4586 | |
4587 | for (att = set->regs[REGNO (pnode->loc)]; att; att = att->next) |
4588 | if (att->offset == 0 |
4589 | && GET_MODE (att->loc) == GET_MODE (pnode->loc) |
4590 | && dv_is_value_p (dv: att->dv)) |
4591 | break; |
4592 | |
4593 | /* If there is a value associated with this register already, create |
4594 | an equivalence. */ |
4595 | if (att && dv_as_value (dv: att->dv) != dv_as_value (dv)) |
4596 | { |
4597 | rtx cval = dv_as_value (dv: att->dv); |
4598 | set_variable_part (set, cval, dv, 0, pnode->init, NULL, INSERT); |
4599 | set_variable_part (set, dv_as_value (dv), att->dv, 0, pnode->init, |
4600 | NULL, INSERT); |
4601 | } |
4602 | else if (!att) |
4603 | { |
4604 | attrs_list_insert (listp: &set->regs[REGNO (pnode->loc)], |
4605 | dv, offset: 0, loc: pnode->loc); |
4606 | variable_union (src: pvar, set); |
4607 | } |
4608 | |
4609 | return 1; |
4610 | } |
4611 | |
4612 | /* Just checking stuff and registering register attributes for |
4613 | now. */ |
4614 | |
4615 | static void |
4616 | dataflow_post_merge_adjust (dataflow_set *set, dataflow_set **permp) |
4617 | { |
4618 | struct dfset_post_merge dfpm; |
4619 | |
4620 | dfpm.set = set; |
4621 | dfpm.permp = permp; |
4622 | |
4623 | shared_hash_htab (vars: set->vars) |
4624 | ->traverse <dfset_post_merge*, variable_post_merge_new_vals> (argument: &dfpm); |
4625 | if (*permp) |
4626 | shared_hash_htab (vars: (*permp)->vars) |
4627 | ->traverse <dfset_post_merge*, variable_post_merge_perm_vals> (argument: &dfpm); |
4628 | shared_hash_htab (vars: set->vars) |
4629 | ->traverse <dataflow_set *, canonicalize_values_star> (argument: set); |
4630 | shared_hash_htab (vars: set->vars) |
4631 | ->traverse <dataflow_set *, canonicalize_vars_star> (argument: set); |
4632 | } |
4633 | |
4634 | /* Return a node whose loc is a MEM that refers to EXPR in the |
4635 | location list of a one-part variable or value VAR, or in that of |
4636 | any values recursively mentioned in the location lists. */ |
4637 | |
4638 | static location_chain * |
4639 | find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type *vars) |
4640 | { |
4641 | location_chain *node; |
4642 | decl_or_value dv; |
4643 | variable *var; |
4644 | location_chain *where = NULL; |
4645 | |
4646 | if (!val) |
4647 | return NULL; |
4648 | |
4649 | gcc_assert (GET_CODE (val) == VALUE |
4650 | && !VALUE_RECURSED_INTO (val)); |
4651 | |
4652 | dv = dv_from_value (value: val); |
4653 | var = vars->find_with_hash (comparable: dv, hash: dv_htab_hash (dv)); |
4654 | |
4655 | if (!var) |
4656 | return NULL; |
4657 | |
4658 | gcc_assert (var->onepart); |
4659 | |
4660 | if (!var->n_var_parts) |
4661 | return NULL; |
4662 | |
4663 | VALUE_RECURSED_INTO (val) = true; |
4664 | |
4665 | for (node = var->var_part[0].loc_chain; node; node = node->next) |
4666 | if (MEM_P (node->loc) |
4667 | && MEM_EXPR (node->loc) == expr |
4668 | && int_mem_offset (mem: node->loc) == 0) |
4669 | { |
4670 | where = node; |
4671 | break; |
4672 | } |
4673 | else if (GET_CODE (node->loc) == VALUE |
4674 | && !VALUE_RECURSED_INTO (node->loc) |
4675 | && (where = find_mem_expr_in_1pdv (expr, val: node->loc, vars))) |
4676 | break; |
4677 | |
4678 | VALUE_RECURSED_INTO (val) = false; |
4679 | |
4680 | return where; |
4681 | } |
4682 | |
4683 | /* Return TRUE if the value of MEM may vary across a call. */ |
4684 | |
4685 | static bool |
4686 | mem_dies_at_call (rtx mem) |
4687 | { |
4688 | tree expr = MEM_EXPR (mem); |
4689 | tree decl; |
4690 | |
4691 | if (!expr) |
4692 | return true; |
4693 | |
4694 | decl = get_base_address (t: expr); |
4695 | |
4696 | if (!decl) |
4697 | return true; |
4698 | |
4699 | if (!DECL_P (decl)) |
4700 | return true; |
4701 | |
4702 | return (may_be_aliased (var: decl) |
4703 | || (!TREE_READONLY (decl) && is_global_var (t: decl))); |
4704 | } |
4705 | |
4706 | /* Remove all MEMs from the location list of a hash table entry for a |
4707 | one-part variable, except those whose MEM attributes map back to |
4708 | the variable itself, directly or within a VALUE. */ |
4709 | |
4710 | int |
4711 | dataflow_set_preserve_mem_locs (variable **slot, dataflow_set *set) |
4712 | { |
4713 | variable *var = *slot; |
4714 | |
4715 | if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR) |
4716 | { |
4717 | tree decl = dv_as_decl (dv: var->dv); |
4718 | location_chain *loc, **locp; |
4719 | bool changed = false; |
4720 | |
4721 | if (!var->n_var_parts) |
4722 | return 1; |
4723 | |
4724 | gcc_assert (var->n_var_parts == 1); |
4725 | |
4726 | if (shared_var_p (var, vars: set->vars)) |
4727 | { |
4728 | for (loc = var->var_part[0].loc_chain; loc; loc = loc->next) |
4729 | { |
4730 | /* We want to remove dying MEMs that don't refer to DECL. */ |
4731 | if (GET_CODE (loc->loc) == MEM |
4732 | && (MEM_EXPR (loc->loc) != decl |
4733 | || int_mem_offset (mem: loc->loc) != 0) |
4734 | && mem_dies_at_call (mem: loc->loc)) |
4735 | break; |
4736 | /* We want to move here MEMs that do refer to DECL. */ |
4737 | else if (GET_CODE (loc->loc) == VALUE |
4738 | && find_mem_expr_in_1pdv (expr: decl, val: loc->loc, |
4739 | vars: shared_hash_htab (vars: set->vars))) |
4740 | break; |
4741 | } |
4742 | |
4743 | if (!loc) |
4744 | return 1; |
4745 | |
4746 | slot = unshare_variable (set, slot, var, initialized: VAR_INIT_STATUS_UNKNOWN); |
4747 | var = *slot; |
4748 | gcc_assert (var->n_var_parts == 1); |
4749 | } |
4750 | |
4751 | for (locp = &var->var_part[0].loc_chain, loc = *locp; |
4752 | loc; loc = *locp) |
4753 | { |
4754 | rtx old_loc = loc->loc; |
4755 | if (GET_CODE (old_loc) == VALUE) |
4756 | { |
4757 | location_chain *mem_node |
4758 | = find_mem_expr_in_1pdv (expr: decl, val: loc->loc, |
4759 | vars: shared_hash_htab (vars: set->vars)); |
4760 | |
4761 | /* ??? This picks up only one out of multiple MEMs that |
4762 | refer to the same variable. Do we ever need to be |
4763 | concerned about dealing with more than one, or, given |
4764 | that they should all map to the same variable |
4765 | location, their addresses will have been merged and |
4766 | they will be regarded as equivalent? */ |
4767 | if (mem_node) |
4768 | { |
4769 | loc->loc = mem_node->loc; |
4770 | loc->set_src = mem_node->set_src; |
4771 | loc->init = MIN (loc->init, mem_node->init); |
4772 | } |
4773 | } |
4774 | |
4775 | if (GET_CODE (loc->loc) != MEM |
4776 | || (MEM_EXPR (loc->loc) == decl |
4777 | && int_mem_offset (mem: loc->loc) == 0) |
4778 | || !mem_dies_at_call (mem: loc->loc)) |
4779 | { |
4780 | if (old_loc != loc->loc && emit_notes) |
4781 | { |
4782 | if (old_loc == var->var_part[0].cur_loc) |
4783 | { |
4784 | changed = true; |
4785 | var->var_part[0].cur_loc = NULL; |
4786 | } |
4787 | } |
4788 | locp = &loc->next; |
4789 | continue; |
4790 | } |
4791 | |
4792 | if (emit_notes) |
4793 | { |
4794 | if (old_loc == var->var_part[0].cur_loc) |
4795 | { |
4796 | changed = true; |
4797 | var->var_part[0].cur_loc = NULL; |
4798 | } |
4799 | } |
4800 | *locp = loc->next; |
4801 | delete loc; |
4802 | } |
4803 | |
4804 | if (!var->var_part[0].loc_chain) |
4805 | { |
4806 | var->n_var_parts--; |
4807 | changed = true; |
4808 | } |
4809 | if (changed) |
4810 | variable_was_changed (var, set); |
4811 | } |
4812 | |
4813 | return 1; |
4814 | } |
4815 | |
4816 | /* Remove all MEMs from the location list of a hash table entry for a |
4817 | onepart variable. */ |
4818 | |
4819 | int |
4820 | dataflow_set_remove_mem_locs (variable **slot, dataflow_set *set) |
4821 | { |
4822 | variable *var = *slot; |
4823 | |
4824 | if (var->onepart != NOT_ONEPART) |
4825 | { |
4826 | location_chain *loc, **locp; |
4827 | bool changed = false; |
4828 | rtx cur_loc; |
4829 | |
4830 | gcc_assert (var->n_var_parts == 1); |
4831 | |
4832 | if (shared_var_p (var, vars: set->vars)) |
4833 | { |
4834 | for (loc = var->var_part[0].loc_chain; loc; loc = loc->next) |
4835 | if (GET_CODE (loc->loc) == MEM |
4836 | && mem_dies_at_call (mem: loc->loc)) |
4837 | break; |
4838 | |
4839 | if (!loc) |
4840 | return 1; |
4841 | |
4842 | slot = unshare_variable (set, slot, var, initialized: VAR_INIT_STATUS_UNKNOWN); |
4843 | var = *slot; |
4844 | gcc_assert (var->n_var_parts == 1); |
4845 | } |
4846 | |
4847 | if (VAR_LOC_1PAUX (var)) |
4848 | cur_loc = VAR_LOC_FROM (var); |
4849 | else |
4850 | cur_loc = var->var_part[0].cur_loc; |
4851 | |
4852 | for (locp = &var->var_part[0].loc_chain, loc = *locp; |
4853 | loc; loc = *locp) |
4854 | { |
4855 | if (GET_CODE (loc->loc) != MEM |
4856 | || !mem_dies_at_call (mem: loc->loc)) |
4857 | { |
4858 | locp = &loc->next; |
4859 | continue; |
4860 | } |
4861 | |
4862 | *locp = loc->next; |
4863 | /* If we have deleted the location which was last emitted |
4864 | we have to emit new location so add the variable to set |
4865 | of changed variables. */ |
4866 | if (cur_loc == loc->loc) |
4867 | { |
4868 | changed = true; |
4869 | var->var_part[0].cur_loc = NULL; |
4870 | if (VAR_LOC_1PAUX (var)) |
4871 | VAR_LOC_FROM (var) = NULL; |
4872 | } |
4873 | delete loc; |
4874 | } |
4875 | |
4876 | if (!var->var_part[0].loc_chain) |
4877 | { |
4878 | var->n_var_parts--; |
4879 | changed = true; |
4880 | } |
4881 | if (changed) |
4882 | variable_was_changed (var, set); |
4883 | } |
4884 | |
4885 | return 1; |
4886 | } |
4887 | |
4888 | /* Remove all variable-location information about call-clobbered |
4889 | registers, as well as associations between MEMs and VALUEs. */ |
4890 | |
4891 | static void |
4892 | dataflow_set_clear_at_call (dataflow_set *set, rtx_insn *call_insn) |
4893 | { |
4894 | unsigned int r; |
4895 | hard_reg_set_iterator hrsi; |
4896 | |
4897 | HARD_REG_SET callee_clobbers |
4898 | = insn_callee_abi (call_insn).full_reg_clobbers (); |
4899 | |
4900 | EXECUTE_IF_SET_IN_HARD_REG_SET (callee_clobbers, 0, r, hrsi) |
4901 | var_regno_delete (set, regno: r); |
4902 | |
4903 | if (MAY_HAVE_DEBUG_BIND_INSNS) |
4904 | { |
4905 | set->traversed_vars = set->vars; |
4906 | shared_hash_htab (vars: set->vars) |
4907 | ->traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (argument: set); |
4908 | set->traversed_vars = set->vars; |
4909 | shared_hash_htab (vars: set->vars) |
4910 | ->traverse <dataflow_set *, dataflow_set_remove_mem_locs> (argument: set); |
4911 | set->traversed_vars = NULL; |
4912 | } |
4913 | } |
4914 | |
4915 | static bool |
4916 | variable_part_different_p (variable_part *vp1, variable_part *vp2) |
4917 | { |
4918 | location_chain *lc1, *lc2; |
4919 | |
4920 | for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next) |
4921 | { |
4922 | for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next) |
4923 | { |
4924 | if (REG_P (lc1->loc) && REG_P (lc2->loc)) |
4925 | { |
4926 | if (REGNO (lc1->loc) == REGNO (lc2->loc)) |
4927 | break; |
4928 | } |
4929 | if (rtx_equal_p (lc1->loc, lc2->loc)) |
4930 | break; |
4931 | } |
4932 | if (!lc2) |
4933 | return true; |
4934 | } |
4935 | return false; |
4936 | } |
4937 | |
4938 | /* Return true if one-part variables VAR1 and VAR2 are different. |
4939 | They must be in canonical order. */ |
4940 | |
4941 | static bool |
4942 | onepart_variable_different_p (variable *var1, variable *var2) |
4943 | { |
4944 | location_chain *lc1, *lc2; |
4945 | |
4946 | if (var1 == var2) |
4947 | return false; |
4948 | |
4949 | gcc_assert (var1->n_var_parts == 1 |
4950 | && var2->n_var_parts == 1); |
4951 | |
4952 | lc1 = var1->var_part[0].loc_chain; |
4953 | lc2 = var2->var_part[0].loc_chain; |
4954 | |
4955 | gcc_assert (lc1 && lc2); |
4956 | |
4957 | while (lc1 && lc2) |
4958 | { |
4959 | if (loc_cmp (x: lc1->loc, y: lc2->loc)) |
4960 | return true; |
4961 | lc1 = lc1->next; |
4962 | lc2 = lc2->next; |
4963 | } |
4964 | |
4965 | return lc1 != lc2; |
4966 | } |
4967 | |
4968 | /* Return true if one-part variables VAR1 and VAR2 are different. |
4969 | They must be in canonical order. */ |
4970 | |
4971 | static void |
4972 | dump_onepart_variable_differences (variable *var1, variable *var2) |
4973 | { |
4974 | location_chain *lc1, *lc2; |
4975 | |
4976 | gcc_assert (var1 != var2); |
4977 | gcc_assert (dump_file); |
4978 | gcc_assert (var1->dv == var2->dv); |
4979 | gcc_assert (var1->n_var_parts == 1 |
4980 | && var2->n_var_parts == 1); |
4981 | |
4982 | lc1 = var1->var_part[0].loc_chain; |
4983 | lc2 = var2->var_part[0].loc_chain; |
4984 | |
4985 | gcc_assert (lc1 && lc2); |
4986 | |
4987 | while (lc1 && lc2) |
4988 | { |
4989 | switch (loc_cmp (x: lc1->loc, y: lc2->loc)) |
4990 | { |
4991 | case -1: |
4992 | fprintf (stream: dump_file, format: "removed: " ); |
4993 | print_rtl_single (dump_file, lc1->loc); |
4994 | lc1 = lc1->next; |
4995 | continue; |
4996 | case 0: |
4997 | break; |
4998 | case 1: |
4999 | fprintf (stream: dump_file, format: "added: " ); |
5000 | print_rtl_single (dump_file, lc2->loc); |
5001 | lc2 = lc2->next; |
5002 | continue; |
5003 | default: |
5004 | gcc_unreachable (); |
5005 | } |
5006 | lc1 = lc1->next; |
5007 | lc2 = lc2->next; |
5008 | } |
5009 | |
5010 | while (lc1) |
5011 | { |
5012 | fprintf (stream: dump_file, format: "removed: " ); |
5013 | print_rtl_single (dump_file, lc1->loc); |
5014 | lc1 = lc1->next; |
5015 | } |
5016 | |
5017 | while (lc2) |
5018 | { |
5019 | fprintf (stream: dump_file, format: "added: " ); |
5020 | print_rtl_single (dump_file, lc2->loc); |
5021 | lc2 = lc2->next; |
5022 | } |
5023 | } |
5024 | |
5025 | /* Return true if variables VAR1 and VAR2 are different. */ |
5026 | |
5027 | static bool |
5028 | variable_different_p (variable *var1, variable *var2) |
5029 | { |
5030 | int i; |
5031 | |
5032 | if (var1 == var2) |
5033 | return false; |
5034 | |
5035 | if (var1->onepart != var2->onepart) |
5036 | return true; |
5037 | |
5038 | if (var1->n_var_parts != var2->n_var_parts) |
5039 | return true; |
5040 | |
5041 | if (var1->onepart && var1->n_var_parts) |
5042 | { |
5043 | gcc_checking_assert (var1->dv == var2->dv && var1->n_var_parts == 1); |
5044 | /* One-part values have locations in a canonical order. */ |
5045 | return onepart_variable_different_p (var1, var2); |
5046 | } |
5047 | |
5048 | for (i = 0; i < var1->n_var_parts; i++) |
5049 | { |
5050 | if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i)) |
5051 | return true; |
5052 | if (variable_part_different_p (vp1: &var1->var_part[i], vp2: &var2->var_part[i])) |
5053 | return true; |
5054 | if (variable_part_different_p (vp1: &var2->var_part[i], vp2: &var1->var_part[i])) |
5055 | return true; |
5056 | } |
5057 | return false; |
5058 | } |
5059 | |
5060 | /* Return true if dataflow sets OLD_SET and NEW_SET differ. */ |
5061 | |
5062 | static bool |
5063 | dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set) |
5064 | { |
5065 | variable_iterator_type hi; |
5066 | variable *var1; |
5067 | bool diffound = false; |
5068 | bool details = (dump_file && (dump_flags & TDF_DETAILS)); |
5069 | |
5070 | #define RETRUE \ |
5071 | do \ |
5072 | { \ |
5073 | if (!details) \ |
5074 | return true; \ |
5075 | else \ |
5076 | diffound = true; \ |
5077 | } \ |
5078 | while (0) |
5079 | |
5080 | if (old_set->vars == new_set->vars) |
5081 | return false; |
5082 | |
5083 | if (shared_hash_htab (vars: old_set->vars)->elements () |
5084 | != shared_hash_htab (vars: new_set->vars)->elements ()) |
5085 | RETRUE; |
5086 | |
5087 | FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set->vars), |
5088 | var1, variable, hi) |
5089 | { |
5090 | variable_table_type *htab = shared_hash_htab (vars: new_set->vars); |
5091 | variable *var2 = htab->find_with_hash (comparable: var1->dv, hash: dv_htab_hash (dv: var1->dv)); |
5092 | |
5093 | if (!var2) |
5094 | { |
5095 | if (dump_file && (dump_flags & TDF_DETAILS)) |
5096 | { |
5097 | fprintf (stream: dump_file, format: "dataflow difference found: removal of:\n" ); |
5098 | dump_var (var1); |
5099 | } |
5100 | RETRUE; |
5101 | } |
5102 | else if (variable_different_p (var1, var2)) |
5103 | { |
5104 | if (details) |
5105 | { |
5106 | fprintf (stream: dump_file, format: "dataflow difference found: " |
5107 | "old and new follow:\n" ); |
5108 | dump_var (var1); |
5109 | if (dv_onepart_p (dv: var1->dv)) |
5110 | dump_onepart_variable_differences (var1, var2); |
5111 | dump_var (var2); |
5112 | } |
5113 | RETRUE; |
5114 | } |
5115 | } |
5116 | |
5117 | /* There's no need to traverse the second hashtab unless we want to |
5118 | print the details. If both have the same number of elements and |
5119 | the second one had all entries found in the first one, then the |
5120 | second can't have any extra entries. */ |
5121 | if (!details) |
5122 | return diffound; |
5123 | |
5124 | FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (new_set->vars), |
5125 | var1, variable, hi) |
5126 | { |
5127 | variable_table_type *htab = shared_hash_htab (vars: old_set->vars); |
5128 | variable *var2 = htab->find_with_hash (comparable: var1->dv, hash: dv_htab_hash (dv: var1->dv)); |
5129 | if (!var2) |
5130 | { |
5131 | if (details) |
5132 | { |
5133 | fprintf (stream: dump_file, format: "dataflow difference found: addition of:\n" ); |
5134 | dump_var (var1); |
5135 | } |
5136 | RETRUE; |
5137 | } |
5138 | } |
5139 | |
5140 | #undef RETRUE |
5141 | |
5142 | return diffound; |
5143 | } |
5144 | |
5145 | /* Free the contents of dataflow set SET. */ |
5146 | |
5147 | static void |
5148 | dataflow_set_destroy (dataflow_set *set) |
5149 | { |
5150 | int i; |
5151 | |
5152 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
5153 | attrs_list_clear (listp: &set->regs[i]); |
5154 | |
5155 | shared_hash_destroy (vars: set->vars); |
5156 | set->vars = NULL; |
5157 | } |
5158 | |
5159 | /* Return true if T is a tracked parameter with non-degenerate record type. */ |
5160 | |
5161 | static bool |
5162 | tracked_record_parameter_p (tree t) |
5163 | { |
5164 | if (TREE_CODE (t) != PARM_DECL) |
5165 | return false; |
5166 | |
5167 | if (DECL_MODE (t) == BLKmode) |
5168 | return false; |
5169 | |
5170 | tree type = TREE_TYPE (t); |
5171 | if (TREE_CODE (type) != RECORD_TYPE) |
5172 | return false; |
5173 | |
5174 | if (TYPE_FIELDS (type) == NULL_TREE |
5175 | || DECL_CHAIN (TYPE_FIELDS (type)) == NULL_TREE) |
5176 | return false; |
5177 | |
5178 | return true; |
5179 | } |
5180 | |
5181 | /* Shall EXPR be tracked? */ |
5182 | |
5183 | static bool |
5184 | track_expr_p (tree expr, bool need_rtl) |
5185 | { |
5186 | rtx decl_rtl; |
5187 | tree realdecl; |
5188 | |
5189 | if (TREE_CODE (expr) == DEBUG_EXPR_DECL) |
5190 | return DECL_RTL_SET_P (expr); |
5191 | |
5192 | /* If EXPR is not a parameter or a variable do not track it. */ |
5193 | if (!VAR_P (expr) && TREE_CODE (expr) != PARM_DECL) |
5194 | return 0; |
5195 | |
5196 | /* It also must have a name... */ |
5197 | if (!DECL_NAME (expr) && need_rtl) |
5198 | return 0; |
5199 | |
5200 | /* ... and a RTL assigned to it. */ |
5201 | decl_rtl = DECL_RTL_IF_SET (expr); |
5202 | if (!decl_rtl && need_rtl) |
5203 | return 0; |
5204 | |
5205 | /* If this expression is really a debug alias of some other declaration, we |
5206 | don't need to track this expression if the ultimate declaration is |
5207 | ignored. */ |
5208 | realdecl = expr; |
5209 | if (VAR_P (realdecl) && DECL_HAS_DEBUG_EXPR_P (realdecl)) |
5210 | { |
5211 | realdecl = DECL_DEBUG_EXPR (realdecl); |
5212 | if (!DECL_P (realdecl)) |
5213 | { |
5214 | if (handled_component_p (t: realdecl) |
5215 | || (TREE_CODE (realdecl) == MEM_REF |
5216 | && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR)) |
5217 | { |
5218 | HOST_WIDE_INT bitsize, bitpos; |
5219 | bool reverse; |
5220 | tree innerdecl |
5221 | = get_ref_base_and_extent_hwi (realdecl, &bitpos, |
5222 | &bitsize, &reverse); |
5223 | if (!innerdecl |
5224 | || !DECL_P (innerdecl) |
5225 | || DECL_IGNORED_P (innerdecl) |
5226 | /* Do not track declarations for parts of tracked record |
5227 | parameters since we want to track them as a whole. */ |
5228 | || tracked_record_parameter_p (t: innerdecl) |
5229 | || TREE_STATIC (innerdecl) |
5230 | || bitsize == 0 |
5231 | || bitpos + bitsize > 256) |
5232 | return 0; |
5233 | else |
5234 | realdecl = expr; |
5235 | } |
5236 | else |
5237 | return 0; |
5238 | } |
5239 | } |
5240 | |
5241 | /* Do not track EXPR if REALDECL it should be ignored for debugging |
5242 | purposes. */ |
5243 | if (DECL_IGNORED_P (realdecl)) |
5244 | return 0; |
5245 | |
5246 | /* Do not track global variables until we are able to emit correct location |
5247 | list for them. */ |
5248 | if (TREE_STATIC (realdecl)) |
5249 | return 0; |
5250 | |
5251 | /* When the EXPR is a DECL for alias of some variable (see example) |
5252 | the TREE_STATIC flag is not used. Disable tracking all DECLs whose |
5253 | DECL_RTL contains SYMBOL_REF. |
5254 | |
5255 | Example: |
5256 | extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv"))); |
5257 | char **_dl_argv; |
5258 | */ |
5259 | if (decl_rtl && MEM_P (decl_rtl) |
5260 | && contains_symbol_ref_p (XEXP (decl_rtl, 0))) |
5261 | return 0; |
5262 | |
5263 | /* If RTX is a memory it should not be very large (because it would be |
5264 | an array or struct). */ |
5265 | if (decl_rtl && MEM_P (decl_rtl)) |
5266 | { |
5267 | /* Do not track structures and arrays. */ |
5268 | if ((GET_MODE (decl_rtl) == BLKmode |
5269 | || AGGREGATE_TYPE_P (TREE_TYPE (realdecl))) |
5270 | && !tracked_record_parameter_p (t: realdecl)) |
5271 | return 0; |
5272 | if (MEM_SIZE_KNOWN_P (decl_rtl) |
5273 | && maybe_gt (MEM_SIZE (decl_rtl), MAX_VAR_PARTS)) |
5274 | return 0; |
5275 | } |
5276 | |
5277 | DECL_CHANGED (expr) = 0; |
5278 | DECL_CHANGED (realdecl) = 0; |
5279 | return 1; |
5280 | } |
5281 | |
5282 | /* Determine whether a given LOC refers to the same variable part as |
5283 | EXPR+OFFSET. */ |
5284 | |
5285 | static bool |
5286 | same_variable_part_p (rtx loc, tree expr, poly_int64 offset) |
5287 | { |
5288 | tree expr2; |
5289 | poly_int64 offset2; |
5290 | |
5291 | if (! DECL_P (expr)) |
5292 | return false; |
5293 | |
5294 | if (REG_P (loc)) |
5295 | { |
5296 | expr2 = REG_EXPR (loc); |
5297 | offset2 = REG_OFFSET (loc); |
5298 | } |
5299 | else if (MEM_P (loc)) |
5300 | { |
5301 | expr2 = MEM_EXPR (loc); |
5302 | offset2 = int_mem_offset (mem: loc); |
5303 | } |
5304 | else |
5305 | return false; |
5306 | |
5307 | if (! expr2 || ! DECL_P (expr2)) |
5308 | return false; |
5309 | |
5310 | expr = var_debug_decl (decl: expr); |
5311 | expr2 = var_debug_decl (decl: expr2); |
5312 | |
5313 | return (expr == expr2 && known_eq (offset, offset2)); |
5314 | } |
5315 | |
5316 | /* LOC is a REG or MEM that we would like to track if possible. |
5317 | If EXPR is null, we don't know what expression LOC refers to, |
5318 | otherwise it refers to EXPR + OFFSET. STORE_REG_P is true if |
5319 | LOC is an lvalue register. |
5320 | |
5321 | Return true if EXPR is nonnull and if LOC, or some lowpart of it, |
5322 | is something we can track. When returning true, store the mode of |
5323 | the lowpart we can track in *MODE_OUT (if nonnull) and its offset |
5324 | from EXPR in *OFFSET_OUT (if nonnull). */ |
5325 | |
5326 | static bool |
5327 | track_loc_p (rtx loc, tree expr, poly_int64 offset, bool store_reg_p, |
5328 | machine_mode *mode_out, HOST_WIDE_INT *offset_out) |
5329 | { |
5330 | machine_mode mode; |
5331 | |
5332 | if (expr == NULL || !track_expr_p (expr, need_rtl: true)) |
5333 | return false; |
5334 | |
5335 | /* If REG was a paradoxical subreg, its REG_ATTRS will describe the |
5336 | whole subreg, but only the old inner part is really relevant. */ |
5337 | mode = GET_MODE (loc); |
5338 | if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc))) |
5339 | { |
5340 | machine_mode pseudo_mode; |
5341 | |
5342 | pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc)); |
5343 | if (paradoxical_subreg_p (outermode: mode, innermode: pseudo_mode)) |
5344 | { |
5345 | offset += byte_lowpart_offset (pseudo_mode, mode); |
5346 | mode = pseudo_mode; |
5347 | } |
5348 | } |
5349 | |
5350 | /* If LOC is a paradoxical lowpart of EXPR, refer to EXPR itself. |
5351 | Do the same if we are storing to a register and EXPR occupies |
5352 | the whole of register LOC; in that case, the whole of EXPR is |
5353 | being changed. We exclude complex modes from the second case |
5354 | because the real and imaginary parts are represented as separate |
5355 | pseudo registers, even if the whole complex value fits into one |
5356 | hard register. */ |
5357 | if ((paradoxical_subreg_p (outermode: mode, DECL_MODE (expr)) |
5358 | || (store_reg_p |
5359 | && !COMPLEX_MODE_P (DECL_MODE (expr)) |
5360 | && hard_regno_nregs (REGNO (loc), DECL_MODE (expr)) == 1)) |
5361 | && known_eq (offset + byte_lowpart_offset (DECL_MODE (expr), mode), 0)) |
5362 | { |
5363 | mode = DECL_MODE (expr); |
5364 | offset = 0; |
5365 | } |
5366 | |
5367 | HOST_WIDE_INT const_offset; |
5368 | if (!track_offset_p (offset, offset_out: &const_offset)) |
5369 | return false; |
5370 | |
5371 | if (mode_out) |
5372 | *mode_out = mode; |
5373 | if (offset_out) |
5374 | *offset_out = const_offset; |
5375 | return true; |
5376 | } |
5377 | |
5378 | /* Return the MODE lowpart of LOC, or null if LOC is not something we |
5379 | want to track. When returning nonnull, make sure that the attributes |
5380 | on the returned value are updated. */ |
5381 | |
5382 | static rtx |
5383 | var_lowpart (machine_mode mode, rtx loc) |
5384 | { |
5385 | unsigned int regno; |
5386 | |
5387 | if (GET_MODE (loc) == mode) |
5388 | return loc; |
5389 | |
5390 | if (!REG_P (loc) && !MEM_P (loc)) |
5391 | return NULL; |
5392 | |
5393 | poly_uint64 offset = byte_lowpart_offset (mode, GET_MODE (loc)); |
5394 | |
5395 | if (MEM_P (loc)) |
5396 | return adjust_address_nv (loc, mode, offset); |
5397 | |
5398 | poly_uint64 reg_offset = subreg_lowpart_offset (outermode: mode, GET_MODE (loc)); |
5399 | regno = REGNO (loc) + subreg_regno_offset (REGNO (loc), GET_MODE (loc), |
5400 | reg_offset, mode); |
5401 | return gen_rtx_REG_offset (loc, mode, regno, offset); |
5402 | } |
5403 | |
5404 | /* Carry information about uses and stores while walking rtx. */ |
5405 | |
5406 | struct count_use_info |
5407 | { |
5408 | /* The insn where the RTX is. */ |
5409 | rtx_insn *insn; |
5410 | |
5411 | /* The basic block where insn is. */ |
5412 | basic_block bb; |
5413 | |
5414 | /* The array of n_sets sets in the insn, as determined by cselib. */ |
5415 | struct cselib_set *sets; |
5416 | int n_sets; |
5417 | |
5418 | /* True if we're counting stores, false otherwise. */ |
5419 | bool store_p; |
5420 | }; |
5421 | |
5422 | /* Find a VALUE corresponding to X. */ |
5423 | |
5424 | static inline cselib_val * |
5425 | find_use_val (rtx x, machine_mode mode, struct count_use_info *cui) |
5426 | { |
5427 | int i; |
5428 | |
5429 | if (cui->sets) |
5430 | { |
5431 | /* This is called after uses are set up and before stores are |
5432 | processed by cselib, so it's safe to look up srcs, but not |
5433 | dsts. So we look up expressions that appear in srcs or in |
5434 | dest expressions, but we search the sets array for dests of |
5435 | stores. */ |
5436 | if (cui->store_p) |
5437 | { |
5438 | /* Some targets represent memset and memcpy patterns |
5439 | by (set (mem:BLK ...) (reg:[QHSD]I ...)) or |
5440 | (set (mem:BLK ...) (const_int ...)) or |
5441 | (set (mem:BLK ...) (mem:BLK ...)). Don't return anything |
5442 | in that case, otherwise we end up with mode mismatches. */ |
5443 | if (mode == BLKmode && MEM_P (x)) |
5444 | return NULL; |
5445 | for (i = 0; i < cui->n_sets; i++) |
5446 | if (cui->sets[i].dest == x) |
5447 | return cui->sets[i].src_elt; |
5448 | } |
5449 | else |
5450 | return cselib_lookup (x, mode, 0, VOIDmode); |
5451 | } |
5452 | |
5453 | return NULL; |
5454 | } |
5455 | |
5456 | /* Replace all registers and addresses in an expression with VALUE |
5457 | expressions that map back to them, unless the expression is a |
5458 | register. If no mapping is or can be performed, returns NULL. */ |
5459 | |
5460 | static rtx |
5461 | replace_expr_with_values (rtx loc) |
5462 | { |
5463 | if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE) |
5464 | return NULL; |
5465 | else if (MEM_P (loc)) |
5466 | { |
5467 | cselib_val *addr = cselib_lookup (XEXP (loc, 0), |
5468 | get_address_mode (mem: loc), 0, |
5469 | GET_MODE (loc)); |
5470 | if (addr) |
5471 | return replace_equiv_address_nv (loc, addr->val_rtx); |
5472 | else |
5473 | return NULL; |
5474 | } |
5475 | else |
5476 | return cselib_subst_to_values (loc, VOIDmode); |
5477 | } |
5478 | |
5479 | /* Return true if X contains a DEBUG_EXPR. */ |
5480 | |
5481 | static bool |
5482 | rtx_debug_expr_p (const_rtx x) |
5483 | { |
5484 | subrtx_iterator::array_type array; |
5485 | FOR_EACH_SUBRTX (iter, array, x, ALL) |
5486 | if (GET_CODE (*iter) == DEBUG_EXPR) |
5487 | return true; |
5488 | return false; |
5489 | } |
5490 | |
5491 | /* Determine what kind of micro operation to choose for a USE. Return |
5492 | MO_CLOBBER if no micro operation is to be generated. */ |
5493 | |
5494 | static enum micro_operation_type |
5495 | use_type (rtx loc, struct count_use_info *cui, machine_mode *modep) |
5496 | { |
5497 | tree expr; |
5498 | |
5499 | if (cui && cui->sets) |
5500 | { |
5501 | if (GET_CODE (loc) == VAR_LOCATION) |
5502 | { |
5503 | if (track_expr_p (PAT_VAR_LOCATION_DECL (loc), need_rtl: false)) |
5504 | { |
5505 | rtx ploc = PAT_VAR_LOCATION_LOC (loc); |
5506 | if (! VAR_LOC_UNKNOWN_P (ploc)) |
5507 | { |
5508 | cselib_val *val = cselib_lookup (ploc, GET_MODE (loc), 1, |
5509 | VOIDmode); |
5510 | |
5511 | /* ??? flag_float_store and volatile mems are never |
5512 | given values, but we could in theory use them for |
5513 | locations. */ |
5514 | gcc_assert (val || 1); |
5515 | } |
5516 | return MO_VAL_LOC; |
5517 | } |
5518 | else |
5519 | return MO_CLOBBER; |
5520 | } |
5521 | |
5522 | if (REG_P (loc) || MEM_P (loc)) |
5523 | { |
5524 | if (modep) |
5525 | *modep = GET_MODE (loc); |
5526 | if (cui->store_p) |
5527 | { |
5528 | if (REG_P (loc) |
5529 | || (find_use_val (x: loc, GET_MODE (loc), cui) |
5530 | && cselib_lookup (XEXP (loc, 0), |
5531 | get_address_mode (mem: loc), 0, |
5532 | GET_MODE (loc)))) |
5533 | return MO_VAL_SET; |
5534 | } |
5535 | else |
5536 | { |
5537 | cselib_val *val = find_use_val (x: loc, GET_MODE (loc), cui); |
5538 | |
5539 | if (val && !cselib_preserved_value_p (val)) |
5540 | return MO_VAL_USE; |
5541 | } |
5542 | } |
5543 | } |
5544 | |
5545 | if (REG_P (loc)) |
5546 | { |
5547 | gcc_assert (REGNO (loc) < FIRST_PSEUDO_REGISTER); |
5548 | |
5549 | if (loc == cfa_base_rtx) |
5550 | return MO_CLOBBER; |
5551 | expr = REG_EXPR (loc); |
5552 | |
5553 | if (!expr) |
5554 | return MO_USE_NO_VAR; |
5555 | else if (target_for_debug_bind (var_debug_decl (decl: expr))) |
5556 | return MO_CLOBBER; |
5557 | else if (track_loc_p (loc, expr, REG_OFFSET (loc), |
5558 | store_reg_p: false, mode_out: modep, NULL)) |
5559 | return MO_USE; |
5560 | else |
5561 | return MO_USE_NO_VAR; |
5562 | } |
5563 | else if (MEM_P (loc)) |
5564 | { |
5565 | expr = MEM_EXPR (loc); |
5566 | |
5567 | if (!expr) |
5568 | return MO_CLOBBER; |
5569 | else if (target_for_debug_bind (var_debug_decl (decl: expr))) |
5570 | return MO_CLOBBER; |
5571 | else if (track_loc_p (loc, expr, offset: int_mem_offset (mem: loc), |
5572 | store_reg_p: false, mode_out: modep, NULL) |
5573 | /* Multi-part variables shouldn't refer to one-part |
5574 | variable names such as VALUEs (never happens) or |
5575 | DEBUG_EXPRs (only happens in the presence of debug |
5576 | insns). */ |
5577 | && (!MAY_HAVE_DEBUG_BIND_INSNS |
5578 | || !rtx_debug_expr_p (XEXP (loc, 0)))) |
5579 | return MO_USE; |
5580 | else |
5581 | return MO_CLOBBER; |
5582 | } |
5583 | |
5584 | return MO_CLOBBER; |
5585 | } |
5586 | |
5587 | /* Log to OUT information about micro-operation MOPT involving X in |
5588 | INSN of BB. */ |
5589 | |
5590 | static inline void |
5591 | log_op_type (rtx x, basic_block bb, rtx_insn *insn, |
5592 | enum micro_operation_type mopt, FILE *out) |
5593 | { |
5594 | fprintf (stream: out, format: "bb %i op %i insn %i %s " , |
5595 | bb->index, VTI (bb)->mos.length (), |
5596 | INSN_UID (insn), micro_operation_type_name[mopt]); |
5597 | print_inline_rtx (out, x, 2); |
5598 | fputc (c: '\n', stream: out); |
5599 | } |
5600 | |
5601 | /* Tell whether the CONCAT used to holds a VALUE and its location |
5602 | needs value resolution, i.e., an attempt of mapping the location |
5603 | back to other incoming values. */ |
5604 | #define VAL_NEEDS_RESOLUTION(x) \ |
5605 | (RTL_FLAG_CHECK1 ("VAL_NEEDS_RESOLUTION", (x), CONCAT)->volatil) |
5606 | /* Whether the location in the CONCAT is a tracked expression, that |
5607 | should also be handled like a MO_USE. */ |
5608 | #define VAL_HOLDS_TRACK_EXPR(x) \ |
5609 | (RTL_FLAG_CHECK1 ("VAL_HOLDS_TRACK_EXPR", (x), CONCAT)->used) |
5610 | /* Whether the location in the CONCAT should be handled like a MO_COPY |
5611 | as well. */ |
5612 | #define VAL_EXPR_IS_COPIED(x) \ |
5613 | (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump) |
5614 | /* Whether the location in the CONCAT should be handled like a |
5615 | MO_CLOBBER as well. */ |
5616 | #define VAL_EXPR_IS_CLOBBERED(x) \ |
5617 | (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging) |
5618 | |
5619 | /* All preserved VALUEs. */ |
5620 | static vec<rtx> preserved_values; |
5621 | |
5622 | /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */ |
5623 | |
5624 | static void |
5625 | preserve_value (cselib_val *val) |
5626 | { |
5627 | cselib_preserve_value (val); |
5628 | preserved_values.safe_push (obj: val->val_rtx); |
5629 | } |
5630 | |
5631 | /* Helper function for MO_VAL_LOC handling. Return non-zero if |
5632 | any rtxes not suitable for CONST use not replaced by VALUEs |
5633 | are discovered. */ |
5634 | |
5635 | static bool |
5636 | non_suitable_const (const_rtx x) |
5637 | { |
5638 | subrtx_iterator::array_type array; |
5639 | FOR_EACH_SUBRTX (iter, array, x, ALL) |
5640 | { |
5641 | const_rtx x = *iter; |
5642 | switch (GET_CODE (x)) |
5643 | { |
5644 | case REG: |
5645 | case DEBUG_EXPR: |
5646 | case PC: |
5647 | case SCRATCH: |
5648 | case ASM_INPUT: |
5649 | case ASM_OPERANDS: |
5650 | return true; |
5651 | case MEM: |
5652 | if (!MEM_READONLY_P (x)) |
5653 | return true; |
5654 | break; |
5655 | default: |
5656 | break; |
5657 | } |
5658 | } |
5659 | return false; |
5660 | } |
5661 | |
5662 | /* Add uses (register and memory references) LOC which will be tracked |
5663 | to VTI (bb)->mos. */ |
5664 | |
5665 | static void |
5666 | add_uses (rtx loc, struct count_use_info *cui) |
5667 | { |
5668 | machine_mode mode = VOIDmode; |
5669 | enum micro_operation_type type = use_type (loc, cui, modep: &mode); |
5670 | |
5671 | if (type != MO_CLOBBER) |
5672 | { |
5673 | basic_block bb = cui->bb; |
5674 | micro_operation mo; |
5675 | |
5676 | mo.type = type; |
5677 | mo.u.loc = type == MO_USE ? var_lowpart (mode, loc) : loc; |
5678 | mo.insn = cui->insn; |
5679 | |
5680 | if (type == MO_VAL_LOC) |
5681 | { |
5682 | rtx oloc = loc; |
5683 | rtx vloc = PAT_VAR_LOCATION_LOC (oloc); |
5684 | cselib_val *val; |
5685 | |
5686 | gcc_assert (cui->sets); |
5687 | |
5688 | if (MEM_P (vloc) |
5689 | && !REG_P (XEXP (vloc, 0)) |
5690 | && !MEM_P (XEXP (vloc, 0))) |
5691 | { |
5692 | rtx mloc = vloc; |
5693 | machine_mode address_mode = get_address_mode (mem: mloc); |
5694 | cselib_val *val |
5695 | = cselib_lookup (XEXP (mloc, 0), address_mode, 0, |
5696 | GET_MODE (mloc)); |
5697 | |
5698 | if (val && !cselib_preserved_value_p (val)) |
5699 | preserve_value (val); |
5700 | } |
5701 | |
5702 | if (CONSTANT_P (vloc) |
5703 | && (GET_CODE (vloc) != CONST || non_suitable_const (x: vloc))) |
5704 | /* For constants don't look up any value. */; |
5705 | else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (loc: vloc) |
5706 | && (val = find_use_val (x: vloc, GET_MODE (oloc), cui))) |
5707 | { |
5708 | machine_mode mode2; |
5709 | enum micro_operation_type type2; |
5710 | rtx nloc = NULL; |
5711 | bool resolvable = REG_P (vloc) || MEM_P (vloc); |
5712 | |
5713 | if (resolvable) |
5714 | nloc = replace_expr_with_values (loc: vloc); |
5715 | |
5716 | if (nloc) |
5717 | { |
5718 | oloc = shallow_copy_rtx (oloc); |
5719 | PAT_VAR_LOCATION_LOC (oloc) = nloc; |
5720 | } |
5721 | |
5722 | oloc = gen_rtx_CONCAT (mode, val->val_rtx, oloc); |
5723 | |
5724 | type2 = use_type (loc: vloc, cui: 0, modep: &mode2); |
5725 | |
5726 | gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR |
5727 | || type2 == MO_CLOBBER); |
5728 | |
5729 | if (type2 == MO_CLOBBER |
5730 | && !cselib_preserved_value_p (val)) |
5731 | { |
5732 | VAL_NEEDS_RESOLUTION (oloc) = resolvable; |
5733 | preserve_value (val); |
5734 | } |
5735 | } |
5736 | else if (!VAR_LOC_UNKNOWN_P (vloc)) |
5737 | { |
5738 | oloc = shallow_copy_rtx (oloc); |
5739 | PAT_VAR_LOCATION_LOC (oloc) = gen_rtx_UNKNOWN_VAR_LOC (); |
5740 | } |
5741 | |
5742 | mo.u.loc = oloc; |
5743 | } |
5744 | else if (type == MO_VAL_USE) |
5745 | { |
5746 | machine_mode mode2 = VOIDmode; |
5747 | enum micro_operation_type type2; |
5748 | cselib_val *val = find_use_val (x: loc, GET_MODE (loc), cui); |
5749 | rtx vloc, oloc = loc, nloc; |
5750 | |
5751 | gcc_assert (cui->sets); |
5752 | |
5753 | if (MEM_P (oloc) |
5754 | && !REG_P (XEXP (oloc, 0)) |
5755 | && !MEM_P (XEXP (oloc, 0))) |
5756 | { |
5757 | rtx mloc = oloc; |
5758 | machine_mode address_mode = get_address_mode (mem: mloc); |
5759 | cselib_val *val |
5760 | = cselib_lookup (XEXP (mloc, 0), address_mode, 0, |
5761 | GET_MODE (mloc)); |
5762 | |
5763 | if (val && !cselib_preserved_value_p (val)) |
5764 | preserve_value (val); |
5765 | } |
5766 | |
5767 | type2 = use_type (loc, cui: 0, modep: &mode2); |
5768 | |
5769 | gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR |
5770 | || type2 == MO_CLOBBER); |
5771 | |
5772 | if (type2 == MO_USE) |
5773 | vloc = var_lowpart (mode: mode2, loc); |
5774 | else |
5775 | vloc = oloc; |
5776 | |
5777 | /* The loc of a MO_VAL_USE may have two forms: |
5778 | |
5779 | (concat val src): val is at src, a value-based |
5780 | representation. |
5781 | |
5782 | (concat (concat val use) src): same as above, with use as |
5783 | the MO_USE tracked value, if it differs from src. |
5784 | |
5785 | */ |
5786 | |
5787 | gcc_checking_assert (REG_P (loc) || MEM_P (loc)); |
5788 | nloc = replace_expr_with_values (loc); |
5789 | if (!nloc) |
5790 | nloc = oloc; |
5791 | |
5792 | if (vloc != nloc) |
5793 | oloc = gen_rtx_CONCAT (mode2, val->val_rtx, vloc); |
5794 | else |
5795 | oloc = val->val_rtx; |
5796 | |
5797 | mo.u.loc = gen_rtx_CONCAT (mode, oloc, nloc); |
5798 | |
5799 | if (type2 == MO_USE) |
5800 | VAL_HOLDS_TRACK_EXPR (mo.u.loc) = 1; |
5801 | if (!cselib_preserved_value_p (val)) |
5802 | { |
5803 | VAL_NEEDS_RESOLUTION (mo.u.loc) = 1; |
5804 | preserve_value (val); |
5805 | } |
5806 | } |
5807 | else |
5808 | gcc_assert (type == MO_USE || type == MO_USE_NO_VAR); |
5809 | |
5810 | if (dump_file && (dump_flags & TDF_DETAILS)) |
5811 | log_op_type (x: mo.u.loc, bb: cui->bb, insn: cui->insn, mopt: mo.type, out: dump_file); |
5812 | VTI (bb)->mos.safe_push (obj: mo); |
5813 | } |
5814 | } |
5815 | |
5816 | /* Helper function for finding all uses of REG/MEM in X in insn INSN. */ |
5817 | |
5818 | static void |
5819 | add_uses_1 (rtx *x, void *cui) |
5820 | { |
5821 | subrtx_var_iterator::array_type array; |
5822 | FOR_EACH_SUBRTX_VAR (iter, array, *x, NONCONST) |
5823 | add_uses (loc: *iter, cui: (struct count_use_info *) cui); |
5824 | } |
5825 | |
5826 | /* This is the value used during expansion of locations. We want it |
5827 | to be unbounded, so that variables expanded deep in a recursion |
5828 | nest are fully evaluated, so that their values are cached |
5829 | correctly. We avoid recursion cycles through other means, and we |
5830 | don't unshare RTL, so excess complexity is not a problem. */ |
5831 | #define EXPR_DEPTH (INT_MAX) |
5832 | /* We use this to keep too-complex expressions from being emitted as |
5833 | location notes, and then to debug information. Users can trade |
5834 | compile time for ridiculously complex expressions, although they're |
5835 | seldom useful, and they may often have to be discarded as not |
5836 | representable anyway. */ |
5837 | #define EXPR_USE_DEPTH (param_max_vartrack_expr_depth) |
5838 | |
5839 | /* Attempt to reverse the EXPR operation in the debug info and record |
5840 | it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is |
5841 | no longer live we can express its value as VAL - 6. */ |
5842 | |
5843 | static void |
5844 | reverse_op (rtx val, const_rtx expr, rtx_insn *insn) |
5845 | { |
5846 | rtx src, arg, ret; |
5847 | cselib_val *v; |
5848 | struct elt_loc_list *l; |
5849 | enum rtx_code code; |
5850 | int count; |
5851 | |
5852 | if (GET_CODE (expr) != SET) |
5853 | return; |
5854 | |
5855 | if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr))) |
5856 | return; |
5857 | |
5858 | src = SET_SRC (expr); |
5859 | switch (GET_CODE (src)) |
5860 | { |
5861 | case PLUS: |
5862 | case MINUS: |
5863 | case XOR: |
5864 | case NOT: |
5865 | case NEG: |
5866 | if (!REG_P (XEXP (src, 0))) |
5867 | return; |
5868 | break; |
5869 | case SIGN_EXTEND: |
5870 | case ZERO_EXTEND: |
5871 | if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0))) |
5872 | return; |
5873 | break; |
5874 | default: |
5875 | return; |
5876 | } |
5877 | |
5878 | if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx) |
5879 | return; |
5880 | |
5881 | v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode); |
5882 | if (!v || !cselib_preserved_value_p (v)) |
5883 | return; |
5884 | |
5885 | /* Use canonical V to avoid creating multiple redundant expressions |
5886 | for different VALUES equivalent to V. */ |
5887 | v = canonical_cselib_val (val: v); |
5888 | |
5889 | /* Adding a reverse op isn't useful if V already has an always valid |
5890 | location. Ignore ENTRY_VALUE, while it is always constant, we should |
5891 | prefer non-ENTRY_VALUE locations whenever possible. */ |
5892 | for (l = v->locs, count = 0; l; l = l->next, count++) |
5893 | if (CONSTANT_P (l->loc) |
5894 | && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0))) |
5895 | return; |
5896 | /* Avoid creating too large locs lists. */ |
5897 | else if (count == param_max_vartrack_reverse_op_size) |
5898 | return; |
5899 | |
5900 | switch (GET_CODE (src)) |
5901 | { |
5902 | case NOT: |
5903 | case NEG: |
5904 | if (GET_MODE (v->val_rtx) != GET_MODE (val)) |
5905 | return; |
5906 | ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val); |
5907 | break; |
5908 | case SIGN_EXTEND: |
5909 | case ZERO_EXTEND: |
5910 | ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val); |
5911 | break; |
5912 | case XOR: |
5913 | code = XOR; |
5914 | goto binary; |
5915 | case PLUS: |
5916 | code = MINUS; |
5917 | goto binary; |
5918 | case MINUS: |
5919 | code = PLUS; |
5920 | goto binary; |
5921 | binary: |
5922 | if (GET_MODE (v->val_rtx) != GET_MODE (val)) |
5923 | return; |
5924 | arg = XEXP (src, 1); |
5925 | if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF) |
5926 | { |
5927 | arg = cselib_expand_value_rtx (arg, scratch_regs, 5); |
5928 | if (arg == NULL_RTX) |
5929 | return; |
5930 | if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF) |
5931 | return; |
5932 | } |
5933 | ret = simplify_gen_binary (code, GET_MODE (val), op0: val, op1: arg); |
5934 | break; |
5935 | default: |
5936 | gcc_unreachable (); |
5937 | } |
5938 | |
5939 | cselib_add_permanent_equiv (v, ret, insn); |
5940 | } |
5941 | |
5942 | /* Add stores (register and memory references) LOC which will be tracked |
5943 | to VTI (bb)->mos. EXPR is the RTL expression containing the store. |
5944 | CUIP->insn is instruction which the LOC is part of. */ |
5945 | |
5946 | static void |
5947 | add_stores (rtx loc, const_rtx expr, void *cuip) |
5948 | { |
5949 | machine_mode mode = VOIDmode, mode2; |
5950 | struct count_use_info *cui = (struct count_use_info *)cuip; |
5951 | basic_block bb = cui->bb; |
5952 | micro_operation mo; |
5953 | rtx oloc = loc, nloc, src = NULL; |
5954 | enum micro_operation_type type = use_type (loc, cui, modep: &mode); |
5955 | bool track_p = false; |
5956 | cselib_val *v; |
5957 | bool resolve, preserve; |
5958 | |
5959 | if (type == MO_CLOBBER) |
5960 | return; |
5961 | |
5962 | mode2 = mode; |
5963 | |
5964 | if (REG_P (loc)) |
5965 | { |
5966 | gcc_assert (loc != cfa_base_rtx); |
5967 | if ((GET_CODE (expr) == CLOBBER && type != MO_VAL_SET) |
5968 | || !(track_p = use_type (loc, NULL, modep: &mode2) == MO_USE) |
5969 | || GET_CODE (expr) == CLOBBER) |
5970 | { |
5971 | mo.type = MO_CLOBBER; |
5972 | mo.u.loc = loc; |
5973 | if (GET_CODE (expr) == SET |
5974 | && (SET_DEST (expr) == loc |
5975 | || (GET_CODE (SET_DEST (expr)) == STRICT_LOW_PART |
5976 | && XEXP (SET_DEST (expr), 0) == loc)) |
5977 | && !unsuitable_loc (SET_SRC (expr)) |
5978 | && find_use_val (x: loc, mode, cui)) |
5979 | { |
5980 | gcc_checking_assert (type == MO_VAL_SET); |
5981 | mo.u.loc = gen_rtx_SET (loc, SET_SRC (expr)); |
5982 | } |
5983 | } |
5984 | else |
5985 | { |
5986 | if (GET_CODE (expr) == SET |
5987 | && SET_DEST (expr) == loc |
5988 | && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS) |
5989 | src = var_lowpart (mode: mode2, SET_SRC (expr)); |
5990 | loc = var_lowpart (mode: mode2, loc); |
5991 | |
5992 | if (src == NULL) |
5993 | { |
5994 | mo.type = MO_SET; |
5995 | mo.u.loc = loc; |
5996 | } |
5997 | else |
5998 | { |
5999 | rtx xexpr = gen_rtx_SET (loc, src); |
6000 | if (same_variable_part_p (loc: src, REG_EXPR (loc), REG_OFFSET (loc))) |
6001 | { |
6002 | /* If this is an instruction copying (part of) a parameter |
6003 | passed by invisible reference to its register location, |
6004 | pretend it's a SET so that the initial memory location |
6005 | is discarded, as the parameter register can be reused |
6006 | for other purposes and we do not track locations based |
6007 | on generic registers. */ |
6008 | if (MEM_P (src) |
6009 | && REG_EXPR (loc) |
6010 | && TREE_CODE (REG_EXPR (loc)) == PARM_DECL |
6011 | && DECL_MODE (REG_EXPR (loc)) != BLKmode |
6012 | && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc))) |
6013 | && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) |
6014 | != arg_pointer_rtx) |
6015 | mo.type = MO_SET; |
6016 | else |
6017 | mo.type = MO_COPY; |
6018 | } |
6019 | else |
6020 | mo.type = MO_SET; |
6021 | mo.u.loc = xexpr; |
6022 | } |
6023 | } |
6024 | mo.insn = cui->insn; |
6025 | } |
6026 | else if (MEM_P (loc) |
6027 | && ((track_p = use_type (loc, NULL, modep: &mode2) == MO_USE) |
6028 | || cui->sets)) |
6029 | { |
6030 | if (MEM_P (loc) && type == MO_VAL_SET |
6031 | && !REG_P (XEXP (loc, 0)) |
6032 | && !MEM_P (XEXP (loc, 0))) |
6033 | { |
6034 | rtx mloc = loc; |
6035 | machine_mode address_mode = get_address_mode (mem: mloc); |
6036 | cselib_val *val = cselib_lookup (XEXP (mloc, 0), |
6037 | address_mode, 0, |
6038 | GET_MODE (mloc)); |
6039 | |
6040 | if (val && !cselib_preserved_value_p (val)) |
6041 | preserve_value (val); |
6042 | } |
6043 | |
6044 | if (GET_CODE (expr) == CLOBBER || !track_p) |
6045 | { |
6046 | mo.type = MO_CLOBBER; |
6047 | mo.u.loc = track_p ? var_lowpart (mode: mode2, loc) : loc; |
6048 | } |
6049 | else |
6050 | { |
6051 | if (GET_CODE (expr) == SET |
6052 | && SET_DEST (expr) == loc |
6053 | && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS) |
6054 | src = var_lowpart (mode: mode2, SET_SRC (expr)); |
6055 | loc = var_lowpart (mode: mode2, loc); |
6056 | |
6057 | if (src == NULL) |
6058 | { |
6059 | mo.type = MO_SET; |
6060 | mo.u.loc = loc; |
6061 | } |
6062 | else |
6063 | { |
6064 | rtx xexpr = gen_rtx_SET (loc, src); |
6065 | if (same_variable_part_p (SET_SRC (xexpr), |
6066 | MEM_EXPR (loc), |
6067 | offset: int_mem_offset (mem: loc))) |
6068 | mo.type = MO_COPY; |
6069 | else |
6070 | mo.type = MO_SET; |
6071 | mo.u.loc = xexpr; |
6072 | } |
6073 | } |
6074 | mo.insn = cui->insn; |
6075 | } |
6076 | else |
6077 | return; |
6078 | |
6079 | if (type != MO_VAL_SET) |
6080 | goto log_and_return; |
6081 | |
6082 | v = find_use_val (x: oloc, mode, cui); |
6083 | |
6084 | if (!v) |
6085 | goto log_and_return; |
6086 | |
6087 | resolve = preserve = !cselib_preserved_value_p (v); |
6088 | |
6089 | /* We cannot track values for multiple-part variables, so we track only |
6090 | locations for tracked record parameters. */ |
6091 | if (track_p |
6092 | && REG_P (loc) |
6093 | && REG_EXPR (loc) |
6094 | && tracked_record_parameter_p (REG_EXPR (loc))) |
6095 | { |
6096 | /* Although we don't use the value here, it could be used later by the |
6097 | mere virtue of its existence as the operand of the reverse operation |
6098 | that gave rise to it (typically extension/truncation). Make sure it |
6099 | is preserved as required by vt_expand_var_loc_chain. */ |
6100 | if (preserve) |
6101 | preserve_value (val: v); |
6102 | goto log_and_return; |
6103 | } |
6104 | |
6105 | if (loc == stack_pointer_rtx |
6106 | && (maybe_ne (a: hard_frame_pointer_adjustment, b: -1) |
6107 | || (!frame_pointer_needed && !ACCUMULATE_OUTGOING_ARGS)) |
6108 | && preserve) |
6109 | cselib_set_value_sp_based (v); |
6110 | |
6111 | /* Don't record MO_VAL_SET for VALUEs that can be described using |
6112 | cfa_base_rtx or cfa_base_rtx + CONST_INT, cselib already knows |
6113 | all the needed equivalences and they shouldn't change depending |
6114 | on which register holds that VALUE in some instruction. */ |
6115 | if (!frame_pointer_needed |
6116 | && cfa_base_rtx |
6117 | && cselib_sp_derived_value_p (v) |
6118 | && loc == stack_pointer_rtx) |
6119 | { |
6120 | if (preserve) |
6121 | preserve_value (val: v); |
6122 | return; |
6123 | } |
6124 | |
6125 | nloc = replace_expr_with_values (loc: oloc); |
6126 | if (nloc) |
6127 | oloc = nloc; |
6128 | |
6129 | if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC) |
6130 | { |
6131 | cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode); |
6132 | |
6133 | if (oval == v) |
6134 | return; |
6135 | gcc_assert (REG_P (oloc) || MEM_P (oloc)); |
6136 | |
6137 | if (oval && !cselib_preserved_value_p (oval)) |
6138 | { |
6139 | micro_operation moa; |
6140 | |
6141 | preserve_value (val: oval); |
6142 | |
6143 | moa.type = MO_VAL_USE; |
6144 | moa.u.loc = gen_rtx_CONCAT (mode, oval->val_rtx, oloc); |
6145 | VAL_NEEDS_RESOLUTION (moa.u.loc) = 1; |
6146 | moa.insn = cui->insn; |
6147 | |
6148 | if (dump_file && (dump_flags & TDF_DETAILS)) |
6149 | log_op_type (x: moa.u.loc, bb: cui->bb, insn: cui->insn, |
6150 | mopt: moa.type, out: dump_file); |
6151 | VTI (bb)->mos.safe_push (obj: moa); |
6152 | } |
6153 | |
6154 | resolve = false; |
6155 | } |
6156 | else if (resolve && GET_CODE (mo.u.loc) == SET) |
6157 | { |
6158 | if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr))) |
6159 | nloc = replace_expr_with_values (SET_SRC (expr)); |
6160 | else |
6161 | nloc = NULL_RTX; |
6162 | |
6163 | /* Avoid the mode mismatch between oexpr and expr. */ |
6164 | if (!nloc && mode != mode2) |
6165 | { |
6166 | nloc = SET_SRC (expr); |
6167 | gcc_assert (oloc == SET_DEST (expr)); |
6168 | } |
6169 | |
6170 | if (nloc && nloc != SET_SRC (mo.u.loc)) |
6171 | oloc = gen_rtx_SET (oloc, nloc); |
6172 | else |
6173 | { |
6174 | if (oloc == SET_DEST (mo.u.loc)) |
6175 | /* No point in duplicating. */ |
6176 | oloc = mo.u.loc; |
6177 | if (!REG_P (SET_SRC (mo.u.loc))) |
6178 | resolve = false; |
6179 | } |
6180 | } |
6181 | else if (!resolve) |
6182 | { |
6183 | if (GET_CODE (mo.u.loc) == SET |
6184 | && oloc == SET_DEST (mo.u.loc)) |
6185 | /* No point in duplicating. */ |
6186 | oloc = mo.u.loc; |
6187 | } |
6188 | else |
6189 | resolve = false; |
6190 | |
6191 | loc = gen_rtx_CONCAT (mode, v->val_rtx, oloc); |
6192 | |
6193 | if (mo.u.loc != oloc) |
6194 | loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, mo.u.loc); |
6195 | |
6196 | /* The loc of a MO_VAL_SET may have various forms: |
6197 | |
6198 | (concat val dst): dst now holds val |
6199 | |
6200 | (concat val (set dst src)): dst now holds val, copied from src |
6201 | |
6202 | (concat (concat val dstv) dst): dst now holds val; dstv is dst |
6203 | after replacing mems and non-top-level regs with values. |
6204 | |
6205 | (concat (concat val dstv) (set dst src)): dst now holds val, |
6206 | copied from src. dstv is a value-based representation of dst, if |
6207 | it differs from dst. If resolution is needed, src is a REG, and |
6208 | its mode is the same as that of val. |
6209 | |
6210 | (concat (concat val (set dstv srcv)) (set dst src)): src |
6211 | copied to dst, holding val. dstv and srcv are value-based |
6212 | representations of dst and src, respectively. |
6213 | |
6214 | */ |
6215 | |
6216 | if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC) |
6217 | reverse_op (val: v->val_rtx, expr, insn: cui->insn); |
6218 | |
6219 | mo.u.loc = loc; |
6220 | |
6221 | if (track_p) |
6222 | VAL_HOLDS_TRACK_EXPR (loc) = 1; |
6223 | if (preserve) |
6224 | { |
6225 | VAL_NEEDS_RESOLUTION (loc) = resolve; |
6226 | preserve_value (val: v); |
6227 | } |
6228 | if (mo.type == MO_CLOBBER) |
6229 | VAL_EXPR_IS_CLOBBERED (loc) = 1; |
6230 | if (mo.type == MO_COPY) |
6231 | VAL_EXPR_IS_COPIED (loc) = 1; |
6232 | |
6233 | mo.type = MO_VAL_SET; |
6234 | |
6235 | log_and_return: |
6236 | if (dump_file && (dump_flags & TDF_DETAILS)) |
6237 | log_op_type (x: mo.u.loc, bb: cui->bb, insn: cui->insn, mopt: mo.type, out: dump_file); |
6238 | VTI (bb)->mos.safe_push (obj: mo); |
6239 | } |
6240 | |
6241 | /* Arguments to the call. */ |
6242 | static rtx call_arguments; |
6243 | |
6244 | /* Compute call_arguments. */ |
6245 | |
6246 | static void |
6247 | prepare_call_arguments (basic_block bb, rtx_insn *insn) |
6248 | { |
6249 | rtx link, x, call; |
6250 | rtx prev, cur, next; |
6251 | rtx this_arg = NULL_RTX; |
6252 | tree type = NULL_TREE, t, fndecl = NULL_TREE; |
6253 | tree obj_type_ref = NULL_TREE; |
6254 | CUMULATIVE_ARGS args_so_far_v; |
6255 | cumulative_args_t args_so_far; |
6256 | |
6257 | memset (s: &args_so_far_v, c: 0, n: sizeof (args_so_far_v)); |
6258 | args_so_far = pack_cumulative_args (arg: &args_so_far_v); |
6259 | call = get_call_rtx_from (insn); |
6260 | if (call) |
6261 | { |
6262 | if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF) |
6263 | { |
6264 | rtx symbol = XEXP (XEXP (call, 0), 0); |
6265 | if (SYMBOL_REF_DECL (symbol)) |
6266 | fndecl = SYMBOL_REF_DECL (symbol); |
6267 | } |
6268 | if (fndecl == NULL_TREE) |
6269 | fndecl = MEM_EXPR (XEXP (call, 0)); |
6270 | if (fndecl |
6271 | && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE |
6272 | && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE) |
6273 | fndecl = NULL_TREE; |
6274 | if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl))) |
6275 | type = TREE_TYPE (fndecl); |
6276 | if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL) |
6277 | { |
6278 | if (INDIRECT_REF_P (fndecl) |
6279 | && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF) |
6280 | obj_type_ref = TREE_OPERAND (fndecl, 0); |
6281 | fndecl = NULL_TREE; |
6282 | } |
6283 | if (type) |
6284 | { |
6285 | for (t = TYPE_ARG_TYPES (type); t && t != void_list_node; |
6286 | t = TREE_CHAIN (t)) |
6287 | if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE |
6288 | && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t)))) |
6289 | break; |
6290 | if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE) |
6291 | type = NULL; |
6292 | else |
6293 | { |
6294 | int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type)); |
6295 | link = CALL_INSN_FUNCTION_USAGE (insn); |
6296 | #ifndef PCC_STATIC_STRUCT_RETURN |
6297 | if (aggregate_value_p (TREE_TYPE (type), type) |
6298 | && targetm.calls.struct_value_rtx (type, 0) == 0) |
6299 | { |
6300 | tree struct_addr = build_pointer_type (TREE_TYPE (type)); |
6301 | function_arg_info arg (struct_addr, /*named=*/true); |
6302 | rtx reg; |
6303 | INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl, |
6304 | nargs + 1); |
6305 | reg = targetm.calls.function_arg (args_so_far, arg); |
6306 | targetm.calls.function_arg_advance (args_so_far, arg); |
6307 | if (reg == NULL_RTX) |
6308 | { |
6309 | for (; link; link = XEXP (link, 1)) |
6310 | if (GET_CODE (XEXP (link, 0)) == USE |
6311 | && MEM_P (XEXP (XEXP (link, 0), 0))) |
6312 | { |
6313 | link = XEXP (link, 1); |
6314 | break; |
6315 | } |
6316 | } |
6317 | } |
6318 | else |
6319 | #endif |
6320 | INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl, |
6321 | nargs); |
6322 | if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node) |
6323 | { |
6324 | t = TYPE_ARG_TYPES (type); |
6325 | function_arg_info arg (TREE_VALUE (t), /*named=*/true); |
6326 | this_arg = targetm.calls.function_arg (args_so_far, arg); |
6327 | if (this_arg && !REG_P (this_arg)) |
6328 | this_arg = NULL_RTX; |
6329 | else if (this_arg == NULL_RTX) |
6330 | { |
6331 | for (; link; link = XEXP (link, 1)) |
6332 | if (GET_CODE (XEXP (link, 0)) == USE |
6333 | && MEM_P (XEXP (XEXP (link, 0), 0))) |
6334 | { |
6335 | this_arg = XEXP (XEXP (link, 0), 0); |
6336 | break; |
6337 | } |
6338 | } |
6339 | } |
6340 | } |
6341 | } |
6342 | } |
6343 | t = type ? TYPE_ARG_TYPES (type) : NULL_TREE; |
6344 | |
6345 | for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1)) |
6346 | if (GET_CODE (XEXP (link, 0)) == USE) |
6347 | { |
6348 | rtx item = NULL_RTX; |
6349 | x = XEXP (XEXP (link, 0), 0); |
6350 | if (GET_MODE (link) == VOIDmode |
6351 | || GET_MODE (link) == BLKmode |
6352 | || (GET_MODE (link) != GET_MODE (x) |
6353 | && ((GET_MODE_CLASS (GET_MODE (link)) != MODE_INT |
6354 | && GET_MODE_CLASS (GET_MODE (link)) != MODE_PARTIAL_INT) |
6355 | || (GET_MODE_CLASS (GET_MODE (x)) != MODE_INT |
6356 | && GET_MODE_CLASS (GET_MODE (x)) != MODE_PARTIAL_INT)))) |
6357 | /* Can't do anything for these, if the original type mode |
6358 | isn't known or can't be converted. */; |
6359 | else if (REG_P (x)) |
6360 | { |
6361 | cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode); |
6362 | scalar_int_mode mode; |
6363 | if (val && cselib_preserved_value_p (val)) |
6364 | item = val->val_rtx; |
6365 | else if (is_a <scalar_int_mode> (GET_MODE (x), result: &mode)) |
6366 | { |
6367 | opt_scalar_int_mode mode_iter; |
6368 | FOR_EACH_WIDER_MODE (mode_iter, mode) |
6369 | { |
6370 | mode = mode_iter.require (); |
6371 | if (GET_MODE_BITSIZE (mode) > BITS_PER_WORD) |
6372 | break; |
6373 | |
6374 | rtx reg = simplify_subreg (outermode: mode, op: x, GET_MODE (x), byte: 0); |
6375 | if (reg == NULL_RTX || !REG_P (reg)) |
6376 | continue; |
6377 | val = cselib_lookup (reg, mode, 0, VOIDmode); |
6378 | if (val && cselib_preserved_value_p (val)) |
6379 | { |
6380 | item = val->val_rtx; |
6381 | break; |
6382 | } |
6383 | } |
6384 | } |
6385 | } |
6386 | else if (MEM_P (x)) |
6387 | { |
6388 | rtx mem = x; |
6389 | cselib_val *val; |
6390 | |
6391 | if (!frame_pointer_needed) |
6392 | { |
6393 | class adjust_mem_data amd; |
6394 | amd.mem_mode = VOIDmode; |
6395 | amd.stack_adjust = -VTI (bb)->out.stack_adjust; |
6396 | amd.store = true; |
6397 | mem = simplify_replace_fn_rtx (mem, NULL_RTX, fn: adjust_mems, |
6398 | &amd); |
6399 | gcc_assert (amd.side_effects.is_empty ()); |
6400 | } |
6401 | val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode); |
6402 | if (val && cselib_preserved_value_p (val)) |
6403 | item = val->val_rtx; |
6404 | else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT |
6405 | && GET_MODE_CLASS (GET_MODE (mem)) != MODE_PARTIAL_INT) |
6406 | { |
6407 | /* For non-integer stack argument see also if they weren't |
6408 | initialized by integers. */ |
6409 | scalar_int_mode imode; |
6410 | if (int_mode_for_mode (GET_MODE (mem)).exists (mode: &imode) |
6411 | && imode != GET_MODE (mem)) |
6412 | { |
6413 | val = cselib_lookup (adjust_address_nv (mem, imode, 0), |
6414 | imode, 0, VOIDmode); |
6415 | if (val && cselib_preserved_value_p (val)) |
6416 | item = lowpart_subreg (GET_MODE (x), op: val->val_rtx, |
6417 | innermode: imode); |
6418 | } |
6419 | } |
6420 | } |
6421 | if (item) |
6422 | { |
6423 | rtx x2 = x; |
6424 | if (GET_MODE (item) != GET_MODE (link)) |
6425 | item = lowpart_subreg (GET_MODE (link), op: item, GET_MODE (item)); |
6426 | if (GET_MODE (x2) != GET_MODE (link)) |
6427 | x2 = lowpart_subreg (GET_MODE (link), op: x2, GET_MODE (x2)); |
6428 | item = gen_rtx_CONCAT (GET_MODE (link), x2, item); |
6429 | call_arguments |
6430 | = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments); |
6431 | } |
6432 | if (t && t != void_list_node) |
6433 | { |
6434 | rtx reg; |
6435 | function_arg_info arg (TREE_VALUE (t), /*named=*/true); |
6436 | apply_pass_by_reference_rules (&args_so_far_v, arg); |
6437 | reg = targetm.calls.function_arg (args_so_far, arg); |
6438 | if (TREE_CODE (arg.type) == REFERENCE_TYPE |
6439 | && INTEGRAL_TYPE_P (TREE_TYPE (arg.type)) |
6440 | && reg |
6441 | && REG_P (reg) |
6442 | && GET_MODE (reg) == arg.mode |
6443 | && (GET_MODE_CLASS (arg.mode) == MODE_INT |
6444 | || GET_MODE_CLASS (arg.mode) == MODE_PARTIAL_INT) |
6445 | && REG_P (x) |
6446 | && REGNO (x) == REGNO (reg) |
6447 | && GET_MODE (x) == arg.mode |
6448 | && item) |
6449 | { |
6450 | machine_mode indmode |
6451 | = TYPE_MODE (TREE_TYPE (arg.type)); |
6452 | rtx mem = gen_rtx_MEM (indmode, x); |
6453 | cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode); |
6454 | if (val && cselib_preserved_value_p (val)) |
6455 | { |
6456 | item = gen_rtx_CONCAT (indmode, mem, val->val_rtx); |
6457 | call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item, |
6458 | call_arguments); |
6459 | } |
6460 | else |
6461 | { |
6462 | struct elt_loc_list *l; |
6463 | tree initial; |
6464 | |
6465 | /* Try harder, when passing address of a constant |
6466 | pool integer it can be easily read back. */ |
6467 | item = XEXP (item, 1); |
6468 | if (GET_CODE (item) == SUBREG) |
6469 | item = SUBREG_REG (item); |
6470 | gcc_assert (GET_CODE (item) == VALUE); |
6471 | val = CSELIB_VAL_PTR (item); |
6472 | for (l = val->locs; l; l = l->next) |
6473 | if (GET_CODE (l->loc) == SYMBOL_REF |
6474 | && TREE_CONSTANT_POOL_ADDRESS_P (l->loc) |
6475 | && SYMBOL_REF_DECL (l->loc) |
6476 | && DECL_INITIAL (SYMBOL_REF_DECL (l->loc))) |
6477 | { |
6478 | initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc)); |
6479 | if (tree_fits_shwi_p (initial)) |
6480 | { |
6481 | item = GEN_INT (tree_to_shwi (initial)); |
6482 | item = gen_rtx_CONCAT (indmode, mem, item); |
6483 | call_arguments |
6484 | = gen_rtx_EXPR_LIST (VOIDmode, item, |
6485 | call_arguments); |
6486 | } |
6487 | break; |
6488 | } |
6489 | } |
6490 | } |
6491 | targetm.calls.function_arg_advance (args_so_far, arg); |
6492 | t = TREE_CHAIN (t); |
6493 | } |
6494 | } |
6495 | |
6496 | /* Add debug arguments. */ |
6497 | if (fndecl |
6498 | && TREE_CODE (fndecl) == FUNCTION_DECL |
6499 | && DECL_HAS_DEBUG_ARGS_P (fndecl)) |
6500 | { |
6501 | vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl); |
6502 | if (debug_args) |
6503 | { |
6504 | unsigned int ix; |
6505 | tree param; |
6506 | for (ix = 0; vec_safe_iterate (v: *debug_args, ix, ptr: ¶m); ix += 2) |
6507 | { |
6508 | rtx item; |
6509 | tree dtemp = (**debug_args)[ix + 1]; |
6510 | machine_mode mode = DECL_MODE (dtemp); |
6511 | item = gen_rtx_DEBUG_PARAMETER_REF (mode, param); |
6512 | item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp)); |
6513 | call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item, |
6514 | call_arguments); |
6515 | } |
6516 | } |
6517 | } |
6518 | |
6519 | /* Reverse call_arguments chain. */ |
6520 | prev = NULL_RTX; |
6521 | for (cur = call_arguments; cur; cur = next) |
6522 | { |
6523 | next = XEXP (cur, 1); |
6524 | XEXP (cur, 1) = prev; |
6525 | prev = cur; |
6526 | } |
6527 | call_arguments = prev; |
6528 | |
6529 | x = get_call_rtx_from (insn); |
6530 | if (x) |
6531 | { |
6532 | x = XEXP (XEXP (x, 0), 0); |
6533 | if (GET_CODE (x) == SYMBOL_REF) |
6534 | /* Don't record anything. */; |
6535 | else if (CONSTANT_P (x)) |
6536 | { |
6537 | x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x), |
6538 | pc_rtx, x); |
6539 | call_arguments |
6540 | = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments); |
6541 | } |
6542 | else |
6543 | { |
6544 | cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode); |
6545 | if (val && cselib_preserved_value_p (val)) |
6546 | { |
6547 | x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx); |
6548 | call_arguments |
6549 | = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments); |
6550 | } |
6551 | } |
6552 | } |
6553 | if (this_arg) |
6554 | { |
6555 | machine_mode mode |
6556 | = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref))); |
6557 | rtx clobbered = gen_rtx_MEM (mode, this_arg); |
6558 | HOST_WIDE_INT token |
6559 | = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref)); |
6560 | if (token) |
6561 | clobbered = plus_constant (mode, clobbered, |
6562 | token * GET_MODE_SIZE (mode)); |
6563 | clobbered = gen_rtx_MEM (mode, clobbered); |
6564 | x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered); |
6565 | call_arguments |
6566 | = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments); |
6567 | } |
6568 | } |
6569 | |
6570 | /* Callback for cselib_record_sets_hook, that records as micro |
6571 | operations uses and stores in an insn after cselib_record_sets has |
6572 | analyzed the sets in an insn, but before it modifies the stored |
6573 | values in the internal tables, unless cselib_record_sets doesn't |
6574 | call it directly (perhaps because we're not doing cselib in the |
6575 | first place, in which case sets and n_sets will be 0). */ |
6576 | |
6577 | static void |
6578 | add_with_sets (rtx_insn *insn, struct cselib_set *sets, int n_sets) |
6579 | { |
6580 | basic_block bb = BLOCK_FOR_INSN (insn); |
6581 | int n1, n2; |
6582 | struct count_use_info cui; |
6583 | micro_operation *mos; |
6584 | |
6585 | cselib_hook_called = true; |
6586 | |
6587 | cui.insn = insn; |
6588 | cui.bb = bb; |
6589 | cui.sets = sets; |
6590 | cui.n_sets = n_sets; |
6591 | |
6592 | n1 = VTI (bb)->mos.length (); |
6593 | cui.store_p = false; |
6594 | note_uses (&PATTERN (insn), add_uses_1, &cui); |
6595 | n2 = VTI (bb)->mos.length () - 1; |
6596 | mos = VTI (bb)->mos.address (); |
6597 | |
6598 | /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and |
6599 | MO_VAL_LOC last. */ |
6600 | while (n1 < n2) |
6601 | { |
6602 | while (n1 < n2 && mos[n1].type == MO_USE) |
6603 | n1++; |
6604 | while (n1 < n2 && mos[n2].type != MO_USE) |
6605 | n2--; |
6606 | if (n1 < n2) |
6607 | std::swap (a&: mos[n1], b&: mos[n2]); |
6608 | } |
6609 | |
6610 | n2 = VTI (bb)->mos.length () - 1; |
6611 | while (n1 < n2) |
6612 | { |
6613 | while (n1 < n2 && mos[n1].type != MO_VAL_LOC) |
6614 | n1++; |
6615 | while (n1 < n2 && mos[n2].type == MO_VAL_LOC) |
6616 | n2--; |
6617 | if (n1 < n2) |
6618 | std::swap (a&: mos[n1], b&: mos[n2]); |
6619 | } |
6620 | |
6621 | if (CALL_P (insn)) |
6622 | { |
6623 | micro_operation mo; |
6624 | |
6625 | mo.type = MO_CALL; |
6626 | mo.insn = insn; |
6627 | mo.u.loc = call_arguments; |
6628 | call_arguments = NULL_RTX; |
6629 | |
6630 | if (dump_file && (dump_flags & TDF_DETAILS)) |
6631 | log_op_type (x: PATTERN (insn), bb, insn, mopt: mo.type, out: dump_file); |
6632 | VTI (bb)->mos.safe_push (obj: mo); |
6633 | } |
6634 | |
6635 | n1 = VTI (bb)->mos.length (); |
6636 | /* This will record NEXT_INSN (insn), such that we can |
6637 | insert notes before it without worrying about any |
6638 | notes that MO_USEs might emit after the insn. */ |
6639 | cui.store_p = true; |
6640 | note_stores (insn, add_stores, &cui); |
6641 | n2 = VTI (bb)->mos.length () - 1; |
6642 | mos = VTI (bb)->mos.address (); |
6643 | |
6644 | /* Order the MO_VAL_USEs first (note_stores does nothing |
6645 | on DEBUG_INSNs, so there are no MO_VAL_LOCs from this |
6646 | insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */ |
6647 | while (n1 < n2) |
6648 | { |
6649 | while (n1 < n2 && mos[n1].type == MO_VAL_USE) |
6650 | n1++; |
6651 | while (n1 < n2 && mos[n2].type != MO_VAL_USE) |
6652 | n2--; |
6653 | if (n1 < n2) |
6654 | std::swap (a&: mos[n1], b&: mos[n2]); |
6655 | } |
6656 | |
6657 | n2 = VTI (bb)->mos.length () - 1; |
6658 | while (n1 < n2) |
6659 | { |
6660 | while (n1 < n2 && mos[n1].type == MO_CLOBBER) |
6661 | n1++; |
6662 | while (n1 < n2 && mos[n2].type != MO_CLOBBER) |
6663 | n2--; |
6664 | if (n1 < n2) |
6665 | std::swap (a&: mos[n1], b&: mos[n2]); |
6666 | } |
6667 | } |
6668 | |
6669 | static enum var_init_status |
6670 | find_src_status (dataflow_set *in, rtx src) |
6671 | { |
6672 | tree decl = NULL_TREE; |
6673 | enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED; |
6674 | |
6675 | if (! flag_var_tracking_uninit) |
6676 | status = VAR_INIT_STATUS_INITIALIZED; |
6677 | |
6678 | if (src && REG_P (src)) |
6679 | decl = var_debug_decl (REG_EXPR (src)); |
6680 | else if (src && MEM_P (src)) |
6681 | decl = var_debug_decl (MEM_EXPR (src)); |
6682 | |
6683 | if (src && decl) |
6684 | status = get_init_value (set: in, loc: src, dv: dv_from_decl (decl)); |
6685 | |
6686 | return status; |
6687 | } |
6688 | |
6689 | /* SRC is the source of an assignment. Use SET to try to find what |
6690 | was ultimately assigned to SRC. Return that value if known, |
6691 | otherwise return SRC itself. */ |
6692 | |
6693 | static rtx |
6694 | find_src_set_src (dataflow_set *set, rtx src) |
6695 | { |
6696 | tree decl = NULL_TREE; /* The variable being copied around. */ |
6697 | rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */ |
6698 | variable *var; |
6699 | location_chain *nextp; |
6700 | int i; |
6701 | bool found; |
6702 | |
6703 | if (src && REG_P (src)) |
6704 | decl = var_debug_decl (REG_EXPR (src)); |
6705 | else if (src && MEM_P (src)) |
6706 | decl = var_debug_decl (MEM_EXPR (src)); |
6707 | |
6708 | if (src && decl) |
6709 | { |
6710 | decl_or_value dv = dv_from_decl (decl); |
6711 | |
6712 | var = shared_hash_find (vars: set->vars, dv); |
6713 | if (var) |
6714 | { |
6715 | found = false; |
6716 | for (i = 0; i < var->n_var_parts && !found; i++) |
6717 | for (nextp = var->var_part[i].loc_chain; nextp && !found; |
6718 | nextp = nextp->next) |
6719 | if (rtx_equal_p (nextp->loc, src)) |
6720 | { |
6721 | set_src = nextp->set_src; |
6722 | found = true; |
6723 | } |
6724 | |
6725 | } |
6726 | } |
6727 | |
6728 | return set_src; |
6729 | } |
6730 | |
6731 | /* Compute the changes of variable locations in the basic block BB. */ |
6732 | |
6733 | static bool |
6734 | compute_bb_dataflow (basic_block bb) |
6735 | { |
6736 | unsigned int i; |
6737 | micro_operation *mo; |
6738 | bool changed; |
6739 | dataflow_set old_out; |
6740 | dataflow_set *in = &VTI (bb)->in; |
6741 | dataflow_set *out = &VTI (bb)->out; |
6742 | |
6743 | dataflow_set_init (set: &old_out); |
6744 | dataflow_set_copy (dst: &old_out, src: out); |
6745 | dataflow_set_copy (dst: out, src: in); |
6746 | |
6747 | if (MAY_HAVE_DEBUG_BIND_INSNS) |
6748 | local_get_addr_cache = new hash_map<rtx, rtx>; |
6749 | |
6750 | FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo) |
6751 | { |
6752 | rtx_insn *insn = mo->insn; |
6753 | |
6754 | switch (mo->type) |
6755 | { |
6756 | case MO_CALL: |
6757 | dataflow_set_clear_at_call (set: out, call_insn: insn); |
6758 | break; |
6759 | |
6760 | case MO_USE: |
6761 | { |
6762 | rtx loc = mo->u.loc; |
6763 | |
6764 | if (REG_P (loc)) |
6765 | var_reg_set (set: out, loc, initialized: VAR_INIT_STATUS_UNINITIALIZED, NULL); |
6766 | else if (MEM_P (loc)) |
6767 | var_mem_set (set: out, loc, initialized: VAR_INIT_STATUS_UNINITIALIZED, NULL); |
6768 | } |
6769 | break; |
6770 | |
6771 | case MO_VAL_LOC: |
6772 | { |
6773 | rtx loc = mo->u.loc; |
6774 | rtx val, vloc; |
6775 | tree var; |
6776 | |
6777 | if (GET_CODE (loc) == CONCAT) |
6778 | { |
6779 | val = XEXP (loc, 0); |
6780 | vloc = XEXP (loc, 1); |
6781 | } |
6782 | else |
6783 | { |
6784 | val = NULL_RTX; |
6785 | vloc = loc; |
6786 | } |
6787 | |
6788 | var = PAT_VAR_LOCATION_DECL (vloc); |
6789 | |
6790 | clobber_variable_part (out, NULL_RTX, |
6791 | dv_from_decl (decl: var), 0, NULL_RTX); |
6792 | if (val) |
6793 | { |
6794 | if (VAL_NEEDS_RESOLUTION (loc)) |
6795 | val_resolve (set: out, val, PAT_VAR_LOCATION_LOC (vloc), insn); |
6796 | set_variable_part (out, val, dv_from_decl (decl: var), 0, |
6797 | VAR_INIT_STATUS_INITIALIZED, NULL_RTX, |
6798 | INSERT); |
6799 | } |
6800 | else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc))) |
6801 | set_variable_part (out, PAT_VAR_LOCATION_LOC (vloc), |
6802 | dv_from_decl (decl: var), 0, |
6803 | VAR_INIT_STATUS_INITIALIZED, NULL_RTX, |
6804 | INSERT); |
6805 | } |
6806 | break; |
6807 | |
6808 | case MO_VAL_USE: |
6809 | { |
6810 | rtx loc = mo->u.loc; |
6811 | rtx val, vloc, uloc; |
6812 | |
6813 | vloc = uloc = XEXP (loc, 1); |
6814 | val = XEXP (loc, 0); |
6815 | |
6816 | if (GET_CODE (val) == CONCAT) |
6817 | { |
6818 | uloc = XEXP (val, 1); |
6819 | val = XEXP (val, 0); |
6820 | } |
6821 | |
6822 | if (VAL_NEEDS_RESOLUTION (loc)) |
6823 | val_resolve (set: out, val, loc: vloc, insn); |
6824 | else |
6825 | val_store (set: out, val, loc: uloc, insn, modified: false); |
6826 | |
6827 | if (VAL_HOLDS_TRACK_EXPR (loc)) |
6828 | { |
6829 | if (GET_CODE (uloc) == REG) |
6830 | var_reg_set (set: out, loc: uloc, initialized: VAR_INIT_STATUS_UNINITIALIZED, |
6831 | NULL); |
6832 | else if (GET_CODE (uloc) == MEM) |
6833 | var_mem_set (set: out, loc: uloc, initialized: VAR_INIT_STATUS_UNINITIALIZED, |
6834 | NULL); |
6835 | } |
6836 | } |
6837 | break; |
6838 | |
6839 | case MO_VAL_SET: |
6840 | { |
6841 | rtx loc = mo->u.loc; |
6842 | rtx val, vloc, uloc; |
6843 | rtx dstv, srcv; |
6844 | |
6845 | vloc = loc; |
6846 | uloc = XEXP (vloc, 1); |
6847 | val = XEXP (vloc, 0); |
6848 | vloc = uloc; |
6849 | |
6850 | if (GET_CODE (uloc) == SET) |
6851 | { |
6852 | dstv = SET_DEST (uloc); |
6853 | srcv = SET_SRC (uloc); |
6854 | } |
6855 | else |
6856 | { |
6857 | dstv = uloc; |
6858 | srcv = NULL; |
6859 | } |
6860 | |
6861 | if (GET_CODE (val) == CONCAT) |
6862 | { |
6863 | dstv = vloc = XEXP (val, 1); |
6864 | val = XEXP (val, 0); |
6865 | } |
6866 | |
6867 | if (GET_CODE (vloc) == SET) |
6868 | { |
6869 | srcv = SET_SRC (vloc); |
6870 | |
6871 | gcc_assert (val != srcv); |
6872 | gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc)); |
6873 | |
6874 | dstv = vloc = SET_DEST (vloc); |
6875 | |
6876 | if (VAL_NEEDS_RESOLUTION (loc)) |
6877 | val_resolve (set: out, val, loc: srcv, insn); |
6878 | } |
6879 | else if (VAL_NEEDS_RESOLUTION (loc)) |
6880 | { |
6881 | gcc_assert (GET_CODE (uloc) == SET |
6882 | && GET_CODE (SET_SRC (uloc)) == REG); |
6883 | val_resolve (set: out, val, SET_SRC (uloc), insn); |
6884 | } |
6885 | |
6886 | if (VAL_HOLDS_TRACK_EXPR (loc)) |
6887 | { |
6888 | if (VAL_EXPR_IS_CLOBBERED (loc)) |
6889 | { |
6890 | if (REG_P (uloc)) |
6891 | var_reg_delete (set: out, loc: uloc, clobber: true); |
6892 | else if (MEM_P (uloc)) |
6893 | { |
6894 | gcc_assert (MEM_P (dstv)); |
6895 | gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc)); |
6896 | var_mem_delete (set: out, loc: dstv, clobber: true); |
6897 | } |
6898 | } |
6899 | else |
6900 | { |
6901 | bool copied_p = VAL_EXPR_IS_COPIED (loc); |
6902 | rtx src = NULL, dst = uloc; |
6903 | enum var_init_status status = VAR_INIT_STATUS_INITIALIZED; |
6904 | |
6905 | if (GET_CODE (uloc) == SET) |
6906 | { |
6907 | src = SET_SRC (uloc); |
6908 | dst = SET_DEST (uloc); |
6909 | } |
6910 | |
6911 | if (copied_p) |
6912 | { |
6913 | if (flag_var_tracking_uninit) |
6914 | { |
6915 | status = find_src_status (in, src); |
6916 | |
6917 | if (status == VAR_INIT_STATUS_UNKNOWN) |
6918 | status = find_src_status (in: out, src); |
6919 | } |
6920 | |
6921 | src = find_src_set_src (set: in, src); |
6922 | } |
6923 | |
6924 | if (REG_P (dst)) |
6925 | var_reg_delete_and_set (set: out, loc: dst, modify: !copied_p, |
6926 | initialized: status, set_src: srcv); |
6927 | else if (MEM_P (dst)) |
6928 | { |
6929 | gcc_assert (MEM_P (dstv)); |
6930 | gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst)); |
6931 | var_mem_delete_and_set (set: out, loc: dstv, modify: !copied_p, |
6932 | initialized: status, set_src: srcv); |
6933 | } |
6934 | } |
6935 | } |
6936 | else if (REG_P (uloc)) |
6937 | var_regno_delete (set: out, REGNO (uloc)); |
6938 | else if (MEM_P (uloc)) |
6939 | { |
6940 | gcc_checking_assert (GET_CODE (vloc) == MEM); |
6941 | gcc_checking_assert (dstv == vloc); |
6942 | if (dstv != vloc) |
6943 | clobber_overlapping_mems (set: out, loc: vloc); |
6944 | } |
6945 | |
6946 | val_store (set: out, val, loc: dstv, insn, modified: true); |
6947 | } |
6948 | break; |
6949 | |
6950 | case MO_SET: |
6951 | { |
6952 | rtx loc = mo->u.loc; |
6953 | rtx set_src = NULL; |
6954 | |
6955 | if (GET_CODE (loc) == SET) |
6956 | { |
6957 | set_src = SET_SRC (loc); |
6958 | loc = SET_DEST (loc); |
6959 | } |
6960 | |
6961 | if (REG_P (loc)) |
6962 | var_reg_delete_and_set (set: out, loc, modify: true, initialized: VAR_INIT_STATUS_INITIALIZED, |
6963 | set_src); |
6964 | else if (MEM_P (loc)) |
6965 | var_mem_delete_and_set (set: out, loc, modify: true, initialized: VAR_INIT_STATUS_INITIALIZED, |
6966 | set_src); |
6967 | } |
6968 | break; |
6969 | |
6970 | case MO_COPY: |
6971 | { |
6972 | rtx loc = mo->u.loc; |
6973 | enum var_init_status src_status; |
6974 | rtx set_src = NULL; |
6975 | |
6976 | if (GET_CODE (loc) == SET) |
6977 | { |
6978 | set_src = SET_SRC (loc); |
6979 | loc = SET_DEST (loc); |
6980 | } |
6981 | |
6982 | if (! flag_var_tracking_uninit) |
6983 | src_status = VAR_INIT_STATUS_INITIALIZED; |
6984 | else |
6985 | { |
6986 | src_status = find_src_status (in, src: set_src); |
6987 | |
6988 | if (src_status == VAR_INIT_STATUS_UNKNOWN) |
6989 | src_status = find_src_status (in: out, src: set_src); |
6990 | } |
6991 | |
6992 | set_src = find_src_set_src (set: in, src: set_src); |
6993 | |
6994 | if (REG_P (loc)) |
6995 | var_reg_delete_and_set (set: out, loc, modify: false, initialized: src_status, set_src); |
6996 | else if (MEM_P (loc)) |
6997 | var_mem_delete_and_set (set: out, loc, modify: false, initialized: src_status, set_src); |
6998 | } |
6999 | break; |
7000 | |
7001 | case MO_USE_NO_VAR: |
7002 | { |
7003 | rtx loc = mo->u.loc; |
7004 | |
7005 | if (REG_P (loc)) |
7006 | var_reg_delete (set: out, loc, clobber: false); |
7007 | else if (MEM_P (loc)) |
7008 | var_mem_delete (set: out, loc, clobber: false); |
7009 | } |
7010 | break; |
7011 | |
7012 | case MO_CLOBBER: |
7013 | { |
7014 | rtx loc = mo->u.loc; |
7015 | |
7016 | if (REG_P (loc)) |
7017 | var_reg_delete (set: out, loc, clobber: true); |
7018 | else if (MEM_P (loc)) |
7019 | var_mem_delete (set: out, loc, clobber: true); |
7020 | } |
7021 | break; |
7022 | |
7023 | case MO_ADJUST: |
7024 | out->stack_adjust += mo->u.adjust; |
7025 | break; |
7026 | } |
7027 | } |
7028 | |
7029 | if (MAY_HAVE_DEBUG_BIND_INSNS) |
7030 | { |
7031 | delete local_get_addr_cache; |
7032 | local_get_addr_cache = NULL; |
7033 | |
7034 | dataflow_set_equiv_regs (set: out); |
7035 | shared_hash_htab (vars: out->vars) |
7036 | ->traverse <dataflow_set *, canonicalize_values_mark> (argument: out); |
7037 | shared_hash_htab (vars: out->vars) |
7038 | ->traverse <dataflow_set *, canonicalize_values_star> (argument: out); |
7039 | if (flag_checking) |
7040 | shared_hash_htab (vars: out->vars) |
7041 | ->traverse <dataflow_set *, canonicalize_loc_order_check> (argument: out); |
7042 | } |
7043 | changed = dataflow_set_different (old_set: &old_out, new_set: out); |
7044 | dataflow_set_destroy (set: &old_out); |
7045 | return changed; |
7046 | } |
7047 | |
7048 | /* Find the locations of variables in the whole function. */ |
7049 | |
7050 | static bool |
7051 | vt_find_locations (void) |
7052 | { |
7053 | bb_heap_t *worklist = new bb_heap_t (LONG_MIN); |
7054 | bb_heap_t *pending = new bb_heap_t (LONG_MIN); |
7055 | sbitmap in_worklist, in_pending; |
7056 | basic_block bb; |
7057 | edge e; |
7058 | int *bb_order; |
7059 | int *rc_order; |
7060 | int i; |
7061 | int htabsz = 0; |
7062 | int htabmax = param_max_vartrack_size; |
7063 | bool success = true; |
7064 | unsigned int n_blocks_processed = 0; |
7065 | |
7066 | timevar_push (tv: TV_VAR_TRACKING_DATAFLOW); |
7067 | /* Compute reverse completion order of depth first search of the CFG |
7068 | so that the data-flow runs faster. */ |
7069 | rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS); |
7070 | bb_order = XNEWVEC (int, last_basic_block_for_fn (cfun)); |
7071 | auto_bitmap exit_bbs; |
7072 | bitmap_set_bit (exit_bbs, EXIT_BLOCK); |
7073 | auto_vec<std::pair<int, int> > toplevel_scc_extents; |
7074 | int n = rev_post_order_and_mark_dfs_back_seme |
7075 | (cfun, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), exit_bbs, true, |
7076 | rc_order, &toplevel_scc_extents); |
7077 | for (i = 0; i < n; i++) |
7078 | bb_order[rc_order[i]] = i; |
7079 | |
7080 | in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun)); |
7081 | in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun)); |
7082 | bitmap_clear (in_worklist); |
7083 | bitmap_clear (in_pending); |
7084 | |
7085 | /* We're performing the dataflow iteration independently over the |
7086 | toplevel SCCs plus leading non-cyclic entry blocks and separately |
7087 | over the tail. That ensures best memory locality and the least |
7088 | number of visited blocks. */ |
7089 | unsigned extent = 0; |
7090 | int curr_start = -1; |
7091 | int curr_end = -1; |
7092 | do |
7093 | { |
7094 | curr_start = curr_end + 1; |
7095 | if (toplevel_scc_extents.length () <= extent) |
7096 | curr_end = n - 1; |
7097 | else |
7098 | curr_end = toplevel_scc_extents[extent++].second; |
7099 | |
7100 | for (int i = curr_start; i <= curr_end; ++i) |
7101 | { |
7102 | pending->insert (key: i, BASIC_BLOCK_FOR_FN (cfun, rc_order[i])); |
7103 | bitmap_set_bit (map: in_pending, bitno: rc_order[i]); |
7104 | } |
7105 | |
7106 | while (success && !pending->empty ()) |
7107 | { |
7108 | std::swap (a&: worklist, b&: pending); |
7109 | std::swap (a&: in_worklist, b&: in_pending); |
7110 | |
7111 | while (!worklist->empty ()) |
7112 | { |
7113 | bool changed; |
7114 | edge_iterator ei; |
7115 | int oldinsz, oldoutsz; |
7116 | |
7117 | bb = worklist->extract_min (); |
7118 | bitmap_clear_bit (map: in_worklist, bitno: bb->index); |
7119 | |
7120 | if (VTI (bb)->in.vars) |
7121 | { |
7122 | htabsz -= (shared_hash_htab (VTI (bb)->in.vars)->size () |
7123 | + shared_hash_htab (VTI (bb)->out.vars)->size ()); |
7124 | oldinsz = shared_hash_htab (VTI (bb)->in.vars)->elements (); |
7125 | oldoutsz = shared_hash_htab (VTI (bb)->out.vars)->elements (); |
7126 | } |
7127 | else |
7128 | oldinsz = oldoutsz = 0; |
7129 | |
7130 | if (MAY_HAVE_DEBUG_BIND_INSNS) |
7131 | { |
7132 | dataflow_set *in = &VTI (bb)->in, *first_out = NULL; |
7133 | bool first = true, adjust = false; |
7134 | |
7135 | /* Calculate the IN set as the intersection of |
7136 | predecessor OUT sets. */ |
7137 | |
7138 | dataflow_set_clear (set: in); |
7139 | dst_can_be_shared = true; |
7140 | |
7141 | FOR_EACH_EDGE (e, ei, bb->preds) |
7142 | if (!VTI (e->src)->flooded) |
7143 | gcc_assert (bb_order[bb->index] |
7144 | <= bb_order[e->src->index]); |
7145 | else if (first) |
7146 | { |
7147 | dataflow_set_copy (dst: in, src: &VTI (e->src)->out); |
7148 | first_out = &VTI (e->src)->out; |
7149 | first = false; |
7150 | } |
7151 | else |
7152 | { |
7153 | dataflow_set_merge (dst: in, src2: &VTI (e->src)->out); |
7154 | adjust = true; |
7155 | } |
7156 | |
7157 | if (adjust) |
7158 | { |
7159 | dataflow_post_merge_adjust (set: in, permp: &VTI (bb)->permp); |
7160 | |
7161 | if (flag_checking) |
7162 | /* Merge and merge_adjust should keep entries in |
7163 | canonical order. */ |
7164 | shared_hash_htab (vars: in->vars) |
7165 | ->traverse <dataflow_set *, |
7166 | canonicalize_loc_order_check> (argument: in); |
7167 | |
7168 | if (dst_can_be_shared) |
7169 | { |
7170 | shared_hash_destroy (vars: in->vars); |
7171 | in->vars = shared_hash_copy (vars: first_out->vars); |
7172 | } |
7173 | } |
7174 | |
7175 | VTI (bb)->flooded = true; |
7176 | } |
7177 | else |
7178 | { |
7179 | /* Calculate the IN set as union of predecessor OUT sets. */ |
7180 | dataflow_set_clear (set: &VTI (bb)->in); |
7181 | FOR_EACH_EDGE (e, ei, bb->preds) |
7182 | dataflow_set_union (dst: &VTI (bb)->in, src: &VTI (e->src)->out); |
7183 | } |
7184 | |
7185 | changed = compute_bb_dataflow (bb); |
7186 | n_blocks_processed++; |
7187 | htabsz += (shared_hash_htab (VTI (bb)->in.vars)->size () |
7188 | + shared_hash_htab (VTI (bb)->out.vars)->size ()); |
7189 | |
7190 | if (htabmax && htabsz > htabmax) |
7191 | { |
7192 | if (MAY_HAVE_DEBUG_BIND_INSNS) |
7193 | inform (DECL_SOURCE_LOCATION (cfun->decl), |
7194 | "variable tracking size limit exceeded with " |
7195 | "%<-fvar-tracking-assignments%>, retrying without" ); |
7196 | else |
7197 | inform (DECL_SOURCE_LOCATION (cfun->decl), |
7198 | "variable tracking size limit exceeded" ); |
7199 | success = false; |
7200 | break; |
7201 | } |
7202 | |
7203 | if (changed) |
7204 | { |
7205 | FOR_EACH_EDGE (e, ei, bb->succs) |
7206 | { |
7207 | if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)) |
7208 | continue; |
7209 | |
7210 | /* Iterate to an earlier block in RPO in the next |
7211 | round, iterate to the same block immediately. */ |
7212 | if (bb_order[e->dest->index] < bb_order[bb->index]) |
7213 | { |
7214 | gcc_assert (bb_order[e->dest->index] >= curr_start); |
7215 | if (!bitmap_bit_p (map: in_pending, bitno: e->dest->index)) |
7216 | { |
7217 | /* Send E->DEST to next round. */ |
7218 | bitmap_set_bit (map: in_pending, bitno: e->dest->index); |
7219 | pending->insert (key: bb_order[e->dest->index], |
7220 | data: e->dest); |
7221 | } |
7222 | } |
7223 | else if (bb_order[e->dest->index] <= curr_end |
7224 | && !bitmap_bit_p (map: in_worklist, bitno: e->dest->index)) |
7225 | { |
7226 | /* Add E->DEST to current round or delay |
7227 | processing if it is in the next SCC. */ |
7228 | bitmap_set_bit (map: in_worklist, bitno: e->dest->index); |
7229 | worklist->insert (key: bb_order[e->dest->index], |
7230 | data: e->dest); |
7231 | } |
7232 | } |
7233 | } |
7234 | |
7235 | if (dump_file) |
7236 | fprintf (stream: dump_file, |
7237 | format: "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, " |
7238 | "tsz %i\n" , bb->index, |
7239 | (int)shared_hash_htab (VTI (bb)->in.vars)->size (), |
7240 | oldinsz, |
7241 | (int)shared_hash_htab (VTI (bb)->out.vars)->size (), |
7242 | oldoutsz, |
7243 | (int)worklist->nodes (), (int)pending->nodes (), |
7244 | htabsz); |
7245 | |
7246 | if (dump_file && (dump_flags & TDF_DETAILS)) |
7247 | { |
7248 | fprintf (stream: dump_file, format: "BB %i IN:\n" , bb->index); |
7249 | dump_dataflow_set (&VTI (bb)->in); |
7250 | fprintf (stream: dump_file, format: "BB %i OUT:\n" , bb->index); |
7251 | dump_dataflow_set (&VTI (bb)->out); |
7252 | } |
7253 | } |
7254 | } |
7255 | } |
7256 | while (curr_end != n - 1); |
7257 | |
7258 | statistics_counter_event (cfun, "compute_bb_dataflow times" , |
7259 | n_blocks_processed); |
7260 | |
7261 | if (success && MAY_HAVE_DEBUG_BIND_INSNS) |
7262 | FOR_EACH_BB_FN (bb, cfun) |
7263 | gcc_assert (VTI (bb)->flooded); |
7264 | |
7265 | free (ptr: rc_order); |
7266 | free (ptr: bb_order); |
7267 | delete worklist; |
7268 | delete pending; |
7269 | sbitmap_free (map: in_worklist); |
7270 | sbitmap_free (map: in_pending); |
7271 | |
7272 | timevar_pop (tv: TV_VAR_TRACKING_DATAFLOW); |
7273 | return success; |
7274 | } |
7275 | |
7276 | /* Print the content of the LIST to dump file. */ |
7277 | |
7278 | static void |
7279 | dump_attrs_list (attrs *list) |
7280 | { |
7281 | for (; list; list = list->next) |
7282 | { |
7283 | if (dv_is_decl_p (dv: list->dv)) |
7284 | print_mem_expr (dump_file, dv_as_decl (dv: list->dv)); |
7285 | else |
7286 | print_rtl_single (dump_file, dv_as_value (dv: list->dv)); |
7287 | fprintf (stream: dump_file, format: "+" HOST_WIDE_INT_PRINT_DEC, list->offset); |
7288 | } |
7289 | fprintf (stream: dump_file, format: "\n" ); |
7290 | } |
7291 | |
7292 | /* Print the information about variable *SLOT to dump file. */ |
7293 | |
7294 | int |
7295 | dump_var_tracking_slot (variable **slot, void *data ATTRIBUTE_UNUSED) |
7296 | { |
7297 | variable *var = *slot; |
7298 | |
7299 | dump_var (var); |
7300 | |
7301 | /* Continue traversing the hash table. */ |
7302 | return 1; |
7303 | } |
7304 | |
7305 | /* Print the information about variable VAR to dump file. */ |
7306 | |
7307 | static void |
7308 | dump_var (variable *var) |
7309 | { |
7310 | int i; |
7311 | location_chain *node; |
7312 | |
7313 | if (dv_is_decl_p (dv: var->dv)) |
7314 | { |
7315 | const_tree decl = dv_as_decl (dv: var->dv); |
7316 | |
7317 | if (DECL_NAME (decl)) |
7318 | { |
7319 | fprintf (stream: dump_file, format: " name: %s" , |
7320 | IDENTIFIER_POINTER (DECL_NAME (decl))); |
7321 | if (dump_flags & TDF_UID) |
7322 | fprintf (stream: dump_file, format: "D.%u" , DECL_UID (decl)); |
7323 | } |
7324 | else if (TREE_CODE (decl) == DEBUG_EXPR_DECL) |
7325 | fprintf (stream: dump_file, format: " name: D#%u" , DEBUG_TEMP_UID (decl)); |
7326 | else |
7327 | fprintf (stream: dump_file, format: " name: D.%u" , DECL_UID (decl)); |
7328 | fprintf (stream: dump_file, format: "\n" ); |
7329 | } |
7330 | else |
7331 | { |
7332 | fputc (c: ' ', stream: dump_file); |
7333 | print_rtl_single (dump_file, dv_as_value (dv: var->dv)); |
7334 | } |
7335 | |
7336 | for (i = 0; i < var->n_var_parts; i++) |
7337 | { |
7338 | fprintf (stream: dump_file, format: " offset %ld\n" , |
7339 | (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i))); |
7340 | for (node = var->var_part[i].loc_chain; node; node = node->next) |
7341 | { |
7342 | fprintf (stream: dump_file, format: " " ); |
7343 | if (node->init == VAR_INIT_STATUS_UNINITIALIZED) |
7344 | fprintf (stream: dump_file, format: "[uninit]" ); |
7345 | print_rtl_single (dump_file, node->loc); |
7346 | } |
7347 | } |
7348 | } |
7349 | |
7350 | /* Print the information about variables from hash table VARS to dump file. */ |
7351 | |
7352 | static void |
7353 | dump_vars (variable_table_type *vars) |
7354 | { |
7355 | if (!vars->is_empty ()) |
7356 | { |
7357 | fprintf (stream: dump_file, format: "Variables:\n" ); |
7358 | vars->traverse <void *, dump_var_tracking_slot> (NULL); |
7359 | } |
7360 | } |
7361 | |
7362 | /* Print the dataflow set SET to dump file. */ |
7363 | |
7364 | static void |
7365 | dump_dataflow_set (dataflow_set *set) |
7366 | { |
7367 | int i; |
7368 | |
7369 | fprintf (stream: dump_file, format: "Stack adjustment: " HOST_WIDE_INT_PRINT_DEC "\n" , |
7370 | set->stack_adjust); |
7371 | for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
7372 | { |
7373 | if (set->regs[i]) |
7374 | { |
7375 | fprintf (stream: dump_file, format: "Reg %d:" , i); |
7376 | dump_attrs_list (list: set->regs[i]); |
7377 | } |
7378 | } |
7379 | dump_vars (vars: shared_hash_htab (vars: set->vars)); |
7380 | fprintf (stream: dump_file, format: "\n" ); |
7381 | } |
7382 | |
7383 | /* Print the IN and OUT sets for each basic block to dump file. */ |
7384 | |
7385 | static void |
7386 | dump_dataflow_sets (void) |
7387 | { |
7388 | basic_block bb; |
7389 | |
7390 | FOR_EACH_BB_FN (bb, cfun) |
7391 | { |
7392 | fprintf (stream: dump_file, format: "\nBasic block %d:\n" , bb->index); |
7393 | fprintf (stream: dump_file, format: "IN:\n" ); |
7394 | dump_dataflow_set (set: &VTI (bb)->in); |
7395 | fprintf (stream: dump_file, format: "OUT:\n" ); |
7396 | dump_dataflow_set (set: &VTI (bb)->out); |
7397 | } |
7398 | } |
7399 | |
7400 | /* Return the variable for DV in dropped_values, inserting one if |
7401 | requested with INSERT. */ |
7402 | |
7403 | static inline variable * |
7404 | variable_from_dropped (decl_or_value dv, enum insert_option insert) |
7405 | { |
7406 | variable **slot; |
7407 | variable *empty_var; |
7408 | onepart_enum onepart; |
7409 | |
7410 | slot = dropped_values->find_slot_with_hash (comparable: dv, hash: dv_htab_hash (dv), insert); |
7411 | |
7412 | if (!slot) |
7413 | return NULL; |
7414 | |
7415 | if (*slot) |
7416 | return *slot; |
7417 | |
7418 | gcc_checking_assert (insert == INSERT); |
7419 | |
7420 | onepart = dv_onepart_p (dv); |
7421 | |
7422 | gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR); |
7423 | |
7424 | empty_var = onepart_pool_allocate (onepart); |
7425 | empty_var->dv = dv; |
7426 | empty_var->refcount = 1; |
7427 | empty_var->n_var_parts = 0; |
7428 | empty_var->onepart = onepart; |
7429 | empty_var->in_changed_variables = false; |
7430 | empty_var->var_part[0].loc_chain = NULL; |
7431 | empty_var->var_part[0].cur_loc = NULL; |
7432 | VAR_LOC_1PAUX (empty_var) = NULL; |
7433 | set_dv_changed (dv, newv: true); |
7434 | |
7435 | *slot = empty_var; |
7436 | |
7437 | return empty_var; |
7438 | } |
7439 | |
7440 | /* Recover the one-part aux from dropped_values. */ |
7441 | |
7442 | static struct onepart_aux * |
7443 | recover_dropped_1paux (variable *var) |
7444 | { |
7445 | variable *dvar; |
7446 | |
7447 | gcc_checking_assert (var->onepart); |
7448 | |
7449 | if (VAR_LOC_1PAUX (var)) |
7450 | return VAR_LOC_1PAUX (var); |
7451 | |
7452 | if (var->onepart == ONEPART_VDECL) |
7453 | return NULL; |
7454 | |
7455 | dvar = variable_from_dropped (dv: var->dv, insert: NO_INSERT); |
7456 | |
7457 | if (!dvar) |
7458 | return NULL; |
7459 | |
7460 | VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar); |
7461 | VAR_LOC_1PAUX (dvar) = NULL; |
7462 | |
7463 | return VAR_LOC_1PAUX (var); |
7464 | } |
7465 | |
7466 | /* Add variable VAR to the hash table of changed variables and |
7467 | if it has no locations delete it from SET's hash table. */ |
7468 | |
7469 | static void |
7470 | variable_was_changed (variable *var, dataflow_set *set) |
7471 | { |
7472 | hashval_t hash = dv_htab_hash (dv: var->dv); |
7473 | |
7474 | if (emit_notes) |
7475 | { |
7476 | variable **slot; |
7477 | |
7478 | /* Remember this decl or VALUE has been added to changed_variables. */ |
7479 | set_dv_changed (dv: var->dv, newv: true); |
7480 | |
7481 | slot = changed_variables->find_slot_with_hash (comparable: var->dv, hash, insert: INSERT); |
7482 | |
7483 | if (*slot) |
7484 | { |
7485 | variable *old_var = *slot; |
7486 | gcc_assert (old_var->in_changed_variables); |
7487 | old_var->in_changed_variables = false; |
7488 | if (var != old_var && var->onepart) |
7489 | { |
7490 | /* Restore the auxiliary info from an empty variable |
7491 | previously created for changed_variables, so it is |
7492 | not lost. */ |
7493 | gcc_checking_assert (!VAR_LOC_1PAUX (var)); |
7494 | VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var); |
7495 | VAR_LOC_1PAUX (old_var) = NULL; |
7496 | } |
7497 | variable_htab_free (elem: *slot); |
7498 | } |
7499 | |
7500 | if (set && var->n_var_parts == 0) |
7501 | { |
7502 | onepart_enum onepart = var->onepart; |
7503 | variable *empty_var = NULL; |
7504 | variable **dslot = NULL; |
7505 | |
7506 | if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR) |
7507 | { |
7508 | dslot = dropped_values->find_slot_with_hash (comparable: var->dv, |
7509 | hash: dv_htab_hash (dv: var->dv), |
7510 | insert: INSERT); |
7511 | empty_var = *dslot; |
7512 | |
7513 | if (empty_var) |
7514 | { |
7515 | gcc_checking_assert (!empty_var->in_changed_variables); |
7516 | if (!VAR_LOC_1PAUX (var)) |
7517 | { |
7518 | VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var); |
7519 | VAR_LOC_1PAUX (empty_var) = NULL; |
7520 | } |
7521 | else |
7522 | gcc_checking_assert (!VAR_LOC_1PAUX (empty_var)); |
7523 | } |
7524 | } |
7525 | |
7526 | if (!empty_var) |
7527 | { |
7528 | empty_var = onepart_pool_allocate (onepart); |
7529 | empty_var->dv = var->dv; |
7530 | empty_var->refcount = 1; |
7531 | empty_var->n_var_parts = 0; |
7532 | empty_var->onepart = onepart; |
7533 | if (dslot) |
7534 | { |
7535 | empty_var->refcount++; |
7536 | *dslot = empty_var; |
7537 | } |
7538 | } |
7539 | else |
7540 | empty_var->refcount++; |
7541 | empty_var->in_changed_variables = true; |
7542 | *slot = empty_var; |
7543 | if (onepart) |
7544 | { |
7545 | empty_var->var_part[0].loc_chain = NULL; |
7546 | empty_var->var_part[0].cur_loc = NULL; |
7547 | VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var); |
7548 | VAR_LOC_1PAUX (var) = NULL; |
7549 | } |
7550 | goto drop_var; |
7551 | } |
7552 | else |
7553 | { |
7554 | if (var->onepart && !VAR_LOC_1PAUX (var)) |
7555 | recover_dropped_1paux (var); |
7556 | var->refcount++; |
7557 | var->in_changed_variables = true; |
7558 | *slot = var; |
7559 | } |
7560 | } |
7561 | else |
7562 | { |
7563 | gcc_assert (set); |
7564 | if (var->n_var_parts == 0) |
7565 | { |
7566 | variable **slot; |
7567 | |
7568 | drop_var: |
7569 | slot = shared_hash_find_slot_noinsert (vars: set->vars, dv: var->dv); |
7570 | if (slot) |
7571 | { |
7572 | if (shared_hash_shared (vars: set->vars)) |
7573 | slot = shared_hash_find_slot_unshare (pvars: &set->vars, dv: var->dv, |
7574 | ins: NO_INSERT); |
7575 | shared_hash_htab (vars: set->vars)->clear_slot (slot); |
7576 | } |
7577 | } |
7578 | } |
7579 | } |
7580 | |
7581 | /* Look for the index in VAR->var_part corresponding to OFFSET. |
7582 | Return -1 if not found. If INSERTION_POINT is non-NULL, the |
7583 | referenced int will be set to the index that the part has or should |
7584 | have, if it should be inserted. */ |
7585 | |
7586 | static inline int |
7587 | find_variable_location_part (variable *var, HOST_WIDE_INT offset, |
7588 | int *insertion_point) |
7589 | { |
7590 | int pos, low, high; |
7591 | |
7592 | if (var->onepart) |
7593 | { |
7594 | if (offset != 0) |
7595 | return -1; |
7596 | |
7597 | if (insertion_point) |
7598 | *insertion_point = 0; |
7599 | |
7600 | return var->n_var_parts - 1; |
7601 | } |
7602 | |
7603 | /* Find the location part. */ |
7604 | low = 0; |
7605 | high = var->n_var_parts; |
7606 | while (low != high) |
7607 | { |
7608 | pos = (low + high) / 2; |
7609 | if (VAR_PART_OFFSET (var, pos) < offset) |
7610 | low = pos + 1; |
7611 | else |
7612 | high = pos; |
7613 | } |
7614 | pos = low; |
7615 | |
7616 | if (insertion_point) |
7617 | *insertion_point = pos; |
7618 | |
7619 | if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset) |
7620 | return pos; |
7621 | |
7622 | return -1; |
7623 | } |
7624 | |
7625 | static variable ** |
7626 | set_slot_part (dataflow_set *set, rtx loc, variable **slot, |
7627 | decl_or_value dv, HOST_WIDE_INT offset, |
7628 | enum var_init_status initialized, rtx set_src) |
7629 | { |
7630 | int pos; |
7631 | location_chain *node, *next; |
7632 | location_chain **nextp; |
7633 | variable *var; |
7634 | onepart_enum onepart; |
7635 | |
7636 | var = *slot; |
7637 | |
7638 | if (var) |
7639 | onepart = var->onepart; |
7640 | else |
7641 | onepart = dv_onepart_p (dv); |
7642 | |
7643 | gcc_checking_assert (offset == 0 || !onepart); |
7644 | gcc_checking_assert (dv != loc); |
7645 | |
7646 | if (! flag_var_tracking_uninit) |
7647 | initialized = VAR_INIT_STATUS_INITIALIZED; |
7648 | |
7649 | if (!var) |
7650 | { |
7651 | /* Create new variable information. */ |
7652 | var = onepart_pool_allocate (onepart); |
7653 | var->dv = dv; |
7654 | var->refcount = 1; |
7655 | var->n_var_parts = 1; |
7656 | var->onepart = onepart; |
7657 | var->in_changed_variables = false; |
7658 | if (var->onepart) |
7659 | VAR_LOC_1PAUX (var) = NULL; |
7660 | else |
7661 | VAR_PART_OFFSET (var, 0) = offset; |
7662 | var->var_part[0].loc_chain = NULL; |
7663 | var->var_part[0].cur_loc = NULL; |
7664 | *slot = var; |
7665 | pos = 0; |
7666 | nextp = &var->var_part[0].loc_chain; |
7667 | } |
7668 | else if (onepart) |
7669 | { |
7670 | int r = -1, c = 0; |
7671 | |
7672 | gcc_assert (var->dv == dv); |
7673 | |
7674 | pos = 0; |
7675 | |
7676 | if (GET_CODE (loc) == VALUE) |
7677 | { |
7678 | for (nextp = &var->var_part[0].loc_chain; (node = *nextp); |
7679 | nextp = &node->next) |
7680 | if (GET_CODE (node->loc) == VALUE) |
7681 | { |
7682 | if (node->loc == loc) |
7683 | { |
7684 | r = 0; |
7685 | break; |
7686 | } |
7687 | if (canon_value_cmp (tval: node->loc, cval: loc)) |
7688 | c++; |
7689 | else |
7690 | { |
7691 | r = 1; |
7692 | break; |
7693 | } |
7694 | } |
7695 | else if (REG_P (node->loc) || MEM_P (node->loc)) |
7696 | c++; |
7697 | else |
7698 | { |
7699 | r = 1; |
7700 | break; |
7701 | } |
7702 | } |
7703 | else if (REG_P (loc)) |
7704 | { |
7705 | for (nextp = &var->var_part[0].loc_chain; (node = *nextp); |
7706 | nextp = &node->next) |
7707 | if (REG_P (node->loc)) |
7708 | { |
7709 | if (REGNO (node->loc) < REGNO (loc)) |
7710 | c++; |
7711 | else |
7712 | { |
7713 | if (REGNO (node->loc) == REGNO (loc)) |
7714 | r = 0; |
7715 | else |
7716 | r = 1; |
7717 | break; |
7718 | } |
7719 | } |
7720 | else |
7721 | { |
7722 | r = 1; |
7723 | break; |
7724 | } |
7725 | } |
7726 | else if (MEM_P (loc)) |
7727 | { |
7728 | for (nextp = &var->var_part[0].loc_chain; (node = *nextp); |
7729 | nextp = &node->next) |
7730 | if (REG_P (node->loc)) |
7731 | c++; |
7732 | else if (MEM_P (node->loc)) |
7733 | { |
7734 | if ((r = loc_cmp (XEXP (node->loc, 0), XEXP (loc, 0))) >= 0) |
7735 | break; |
7736 | else |
7737 | c++; |
7738 | } |
7739 | else |
7740 | { |
7741 | r = 1; |
7742 | break; |
7743 | } |
7744 | } |
7745 | else |
7746 | for (nextp = &var->var_part[0].loc_chain; (node = *nextp); |
7747 | nextp = &node->next) |
7748 | if ((r = loc_cmp (x: node->loc, y: loc)) >= 0) |
7749 | break; |
7750 | else |
7751 | c++; |
7752 | |
7753 | if (r == 0) |
7754 | return slot; |
7755 | |
7756 | if (shared_var_p (var, vars: set->vars)) |
7757 | { |
7758 | slot = unshare_variable (set, slot, var, initialized); |
7759 | var = *slot; |
7760 | for (nextp = &var->var_part[0].loc_chain; c; |
7761 | nextp = &(*nextp)->next) |
7762 | c--; |
7763 | gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc); |
7764 | } |
7765 | } |
7766 | else |
7767 | { |
7768 | int inspos = 0; |
7769 | |
7770 | gcc_assert (dv_as_decl (var->dv) == dv_as_decl (dv)); |
7771 | |
7772 | pos = find_variable_location_part (var, offset, insertion_point: &inspos); |
7773 | |
7774 | if (pos >= 0) |
7775 | { |
7776 | node = var->var_part[pos].loc_chain; |
7777 | |
7778 | if (node |
7779 | && ((REG_P (node->loc) && REG_P (loc) |
7780 | && REGNO (node->loc) == REGNO (loc)) |
7781 | || rtx_equal_p (node->loc, loc))) |
7782 | { |
7783 | /* LOC is in the beginning of the chain so we have nothing |
7784 | to do. */ |
7785 | if (node->init < initialized) |
7786 | node->init = initialized; |
7787 | if (set_src != NULL) |
7788 | node->set_src = set_src; |
7789 | |
7790 | return slot; |
7791 | } |
7792 | else |
7793 | { |
7794 | /* We have to make a copy of a shared variable. */ |
7795 | if (shared_var_p (var, vars: set->vars)) |
7796 | { |
7797 | slot = unshare_variable (set, slot, var, initialized); |
7798 | var = *slot; |
7799 | } |
7800 | } |
7801 | } |
7802 | else |
7803 | { |
7804 | /* We have not found the location part, new one will be created. */ |
7805 | |
7806 | /* We have to make a copy of the shared variable. */ |
7807 | if (shared_var_p (var, vars: set->vars)) |
7808 | { |
7809 | slot = unshare_variable (set, slot, var, initialized); |
7810 | var = *slot; |
7811 | } |
7812 | |
7813 | /* We track only variables whose size is <= MAX_VAR_PARTS bytes |
7814 | thus there are at most MAX_VAR_PARTS different offsets. */ |
7815 | gcc_assert (var->n_var_parts < MAX_VAR_PARTS |
7816 | && (!var->n_var_parts || !onepart)); |
7817 | |
7818 | /* We have to move the elements of array starting at index |
7819 | inspos to the next position. */ |
7820 | for (pos = var->n_var_parts; pos > inspos; pos--) |
7821 | var->var_part[pos] = var->var_part[pos - 1]; |
7822 | |
7823 | var->n_var_parts++; |
7824 | gcc_checking_assert (!onepart); |
7825 | VAR_PART_OFFSET (var, pos) = offset; |
7826 | var->var_part[pos].loc_chain = NULL; |
7827 | var->var_part[pos].cur_loc = NULL; |
7828 | } |
7829 | |
7830 | /* Delete the location from the list. */ |
7831 | nextp = &var->var_part[pos].loc_chain; |
7832 | for (node = var->var_part[pos].loc_chain; node; node = next) |
7833 | { |
7834 | next = node->next; |
7835 | if ((REG_P (node->loc) && REG_P (loc) |
7836 | && REGNO (node->loc) == REGNO (loc)) |
7837 | || rtx_equal_p (node->loc, loc)) |
7838 | { |
7839 | /* Save these values, to assign to the new node, before |
7840 | deleting this one. */ |
7841 | if (node->init > initialized) |
7842 | initialized = node->init; |
7843 | if (node->set_src != NULL && set_src == NULL) |
7844 | set_src = node->set_src; |
7845 | if (var->var_part[pos].cur_loc == node->loc) |
7846 | var->var_part[pos].cur_loc = NULL; |
7847 | delete node; |
7848 | *nextp = next; |
7849 | break; |
7850 | } |
7851 | else |
7852 | nextp = &node->next; |
7853 | } |
7854 | |
7855 | nextp = &var->var_part[pos].loc_chain; |
7856 | } |
7857 | |
7858 | /* Add the location to the beginning. */ |
7859 | node = new location_chain; |
7860 | node->loc = loc; |
7861 | node->init = initialized; |
7862 | node->set_src = set_src; |
7863 | node->next = *nextp; |
7864 | *nextp = node; |
7865 | |
7866 | /* If no location was emitted do so. */ |
7867 | if (var->var_part[pos].cur_loc == NULL) |
7868 | variable_was_changed (var, set); |
7869 | |
7870 | return slot; |
7871 | } |
7872 | |
7873 | /* Set the part of variable's location in the dataflow set SET. The |
7874 | variable part is specified by variable's declaration in DV and |
7875 | offset OFFSET and the part's location by LOC. IOPT should be |
7876 | NO_INSERT if the variable is known to be in SET already and the |
7877 | variable hash table must not be resized, and INSERT otherwise. */ |
7878 | |
7879 | static void |
7880 | set_variable_part (dataflow_set *set, rtx loc, |
7881 | decl_or_value dv, HOST_WIDE_INT offset, |
7882 | enum var_init_status initialized, rtx set_src, |
7883 | enum insert_option iopt) |
7884 | { |
7885 | variable **slot; |
7886 | |
7887 | if (iopt == NO_INSERT) |
7888 | slot = shared_hash_find_slot_noinsert (vars: set->vars, dv); |
7889 | else |
7890 | { |
7891 | slot = shared_hash_find_slot (vars: set->vars, dv); |
7892 | if (!slot) |
7893 | slot = shared_hash_find_slot_unshare (pvars: &set->vars, dv, ins: iopt); |
7894 | } |
7895 | set_slot_part (set, loc, slot, dv, offset, initialized, set_src); |
7896 | } |
7897 | |
7898 | /* Remove all recorded register locations for the given variable part |
7899 | from dataflow set SET, except for those that are identical to loc. |
7900 | The variable part is specified by variable's declaration or value |
7901 | DV and offset OFFSET. */ |
7902 | |
7903 | static variable ** |
7904 | clobber_slot_part (dataflow_set *set, rtx loc, variable **slot, |
7905 | HOST_WIDE_INT offset, rtx set_src) |
7906 | { |
7907 | variable *var = *slot; |
7908 | int pos = find_variable_location_part (var, offset, NULL); |
7909 | |
7910 | if (pos >= 0) |
7911 | { |
7912 | location_chain *node, *next; |
7913 | |
7914 | /* Remove the register locations from the dataflow set. */ |
7915 | next = var->var_part[pos].loc_chain; |
7916 | for (node = next; node; node = next) |
7917 | { |
7918 | next = node->next; |
7919 | if (node->loc != loc |
7920 | && (!flag_var_tracking_uninit |
7921 | || !set_src |
7922 | || MEM_P (set_src) |
7923 | || !rtx_equal_p (set_src, node->set_src))) |
7924 | { |
7925 | if (REG_P (node->loc)) |
7926 | { |
7927 | attrs *anode, *anext; |
7928 | attrs **anextp; |
7929 | |
7930 | /* Remove the variable part from the register's |
7931 | list, but preserve any other variable parts |
7932 | that might be regarded as live in that same |
7933 | register. */ |
7934 | anextp = &set->regs[REGNO (node->loc)]; |
7935 | for (anode = *anextp; anode; anode = anext) |
7936 | { |
7937 | anext = anode->next; |
7938 | if (anode->dv == var->dv && anode->offset == offset) |
7939 | { |
7940 | delete anode; |
7941 | *anextp = anext; |
7942 | } |
7943 | else |
7944 | anextp = &anode->next; |
7945 | } |
7946 | } |
7947 | |
7948 | slot = delete_slot_part (set, node->loc, slot, offset); |
7949 | } |
7950 | } |
7951 | } |
7952 | |
7953 | return slot; |
7954 | } |
7955 | |
7956 | /* Remove all recorded register locations for the given variable part |
7957 | from dataflow set SET, except for those that are identical to loc. |
7958 | The variable part is specified by variable's declaration or value |
7959 | DV and offset OFFSET. */ |
7960 | |
7961 | static void |
7962 | clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv, |
7963 | HOST_WIDE_INT offset, rtx set_src) |
7964 | { |
7965 | variable **slot; |
7966 | |
7967 | if (!dv || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv)))) |
7968 | return; |
7969 | |
7970 | slot = shared_hash_find_slot_noinsert (vars: set->vars, dv); |
7971 | if (!slot) |
7972 | return; |
7973 | |
7974 | clobber_slot_part (set, loc, slot, offset, set_src); |
7975 | } |
7976 | |
7977 | /* Delete the part of variable's location from dataflow set SET. The |
7978 | variable part is specified by its SET->vars slot SLOT and offset |
7979 | OFFSET and the part's location by LOC. */ |
7980 | |
7981 | static variable ** |
7982 | delete_slot_part (dataflow_set *set, rtx loc, variable **slot, |
7983 | HOST_WIDE_INT offset) |
7984 | { |
7985 | variable *var = *slot; |
7986 | int pos = find_variable_location_part (var, offset, NULL); |
7987 | |
7988 | if (pos >= 0) |
7989 | { |
7990 | location_chain *node, *next; |
7991 | location_chain **nextp; |
7992 | bool changed; |
7993 | rtx cur_loc; |
7994 | |
7995 | if (shared_var_p (var, vars: set->vars)) |
7996 | { |
7997 | /* If the variable contains the location part we have to |
7998 | make a copy of the variable. */ |
7999 | for (node = var->var_part[pos].loc_chain; node; |
8000 | node = node->next) |
8001 | { |
8002 | if ((REG_P (node->loc) && REG_P (loc) |
8003 | && REGNO (node->loc) == REGNO (loc)) |
8004 | || rtx_equal_p (node->loc, loc)) |
8005 | { |
8006 | slot = unshare_variable (set, slot, var, |
8007 | initialized: VAR_INIT_STATUS_UNKNOWN); |
8008 | var = *slot; |
8009 | break; |
8010 | } |
8011 | } |
8012 | } |
8013 | |
8014 | if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var)) |
8015 | cur_loc = VAR_LOC_FROM (var); |
8016 | else |
8017 | cur_loc = var->var_part[pos].cur_loc; |
8018 | |
8019 | /* Delete the location part. */ |
8020 | changed = false; |
8021 | nextp = &var->var_part[pos].loc_chain; |
8022 | for (node = *nextp; node; node = next) |
8023 | { |
8024 | next = node->next; |
8025 | if ((REG_P (node->loc) && REG_P (loc) |
8026 | && REGNO (node->loc) == REGNO (loc)) |
8027 | || rtx_equal_p (node->loc, loc)) |
8028 | { |
8029 | /* If we have deleted the location which was last emitted |
8030 | we have to emit new location so add the variable to set |
8031 | of changed variables. */ |
8032 | if (cur_loc == node->loc) |
8033 | { |
8034 | changed = true; |
8035 | var->var_part[pos].cur_loc = NULL; |
8036 | if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var)) |
8037 | VAR_LOC_FROM (var) = NULL; |
8038 | } |
8039 | delete node; |
8040 | *nextp = next; |
8041 | break; |
8042 | } |
8043 | else |
8044 | nextp = &node->next; |
8045 | } |
8046 | |
8047 | if (var->var_part[pos].loc_chain == NULL) |
8048 | { |
8049 | changed = true; |
8050 | var->n_var_parts--; |
8051 | while (pos < var->n_var_parts) |
8052 | { |
8053 | var->var_part[pos] = var->var_part[pos + 1]; |
8054 | pos++; |
8055 | } |
8056 | } |
8057 | if (changed) |
8058 | variable_was_changed (var, set); |
8059 | } |
8060 | |
8061 | return slot; |
8062 | } |
8063 | |
8064 | /* Delete the part of variable's location from dataflow set SET. The |
8065 | variable part is specified by variable's declaration or value DV |
8066 | and offset OFFSET and the part's location by LOC. */ |
8067 | |
8068 | static void |
8069 | delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv, |
8070 | HOST_WIDE_INT offset) |
8071 | { |
8072 | variable **slot = shared_hash_find_slot_noinsert (vars: set->vars, dv); |
8073 | if (!slot) |
8074 | return; |
8075 | |
8076 | delete_slot_part (set, loc, slot, offset); |
8077 | } |
8078 | |
8079 | |
8080 | /* Structure for passing some other parameters to function |
8081 | vt_expand_loc_callback. */ |
8082 | class expand_loc_callback_data |
8083 | { |
8084 | public: |
8085 | /* The variables and values active at this point. */ |
8086 | variable_table_type *vars; |
8087 | |
8088 | /* Stack of values and debug_exprs under expansion, and their |
8089 | children. */ |
8090 | auto_vec<rtx, 4> expanding; |
8091 | |
8092 | /* Stack of values and debug_exprs whose expansion hit recursion |
8093 | cycles. They will have VALUE_RECURSED_INTO marked when added to |
8094 | this list. This flag will be cleared if any of its dependencies |
8095 | resolves to a valid location. So, if the flag remains set at the |
8096 | end of the search, we know no valid location for this one can |
8097 | possibly exist. */ |
8098 | auto_vec<rtx, 4> pending; |
8099 | |
8100 | /* The maximum depth among the sub-expressions under expansion. |
8101 | Zero indicates no expansion so far. */ |
8102 | expand_depth depth; |
8103 | }; |
8104 | |
8105 | /* Allocate the one-part auxiliary data structure for VAR, with enough |
8106 | room for COUNT dependencies. */ |
8107 | |
8108 | static void |
8109 | loc_exp_dep_alloc (variable *var, int count) |
8110 | { |
8111 | size_t allocsize; |
8112 | |
8113 | gcc_checking_assert (var->onepart); |
8114 | |
8115 | /* We can be called with COUNT == 0 to allocate the data structure |
8116 | without any dependencies, e.g. for the backlinks only. However, |
8117 | if we are specifying a COUNT, then the dependency list must have |
8118 | been emptied before. It would be possible to adjust pointers or |
8119 | force it empty here, but this is better done at an earlier point |
8120 | in the algorithm, so we instead leave an assertion to catch |
8121 | errors. */ |
8122 | gcc_checking_assert (!count |
8123 | || VAR_LOC_DEP_VEC (var) == NULL |
8124 | || VAR_LOC_DEP_VEC (var)->is_empty ()); |
8125 | |
8126 | if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (nelems: count)) |
8127 | return; |
8128 | |
8129 | allocsize = offsetof (struct onepart_aux, deps) |
8130 | + deps_vec::embedded_size (alloc: count); |
8131 | |
8132 | if (VAR_LOC_1PAUX (var)) |
8133 | { |
8134 | VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux, |
8135 | VAR_LOC_1PAUX (var), allocsize); |
8136 | /* If the reallocation moves the onepaux structure, the |
8137 | back-pointer to BACKLINKS in the first list member will still |
8138 | point to its old location. Adjust it. */ |
8139 | if (VAR_LOC_DEP_LST (var)) |
8140 | VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var); |
8141 | } |
8142 | else |
8143 | { |
8144 | VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize); |
8145 | *VAR_LOC_DEP_LSTP (var) = NULL; |
8146 | VAR_LOC_FROM (var) = NULL; |
8147 | VAR_LOC_DEPTH (var).complexity = 0; |
8148 | VAR_LOC_DEPTH (var).entryvals = 0; |
8149 | } |
8150 | VAR_LOC_DEP_VEC (var)->embedded_init (alloc: count); |
8151 | } |
8152 | |
8153 | /* Remove all entries from the vector of active dependencies of VAR, |
8154 | removing them from the back-links lists too. */ |
8155 | |
8156 | static void |
8157 | loc_exp_dep_clear (variable *var) |
8158 | { |
8159 | while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ()) |
8160 | { |
8161 | loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last (); |
8162 | if (led->next) |
8163 | led->next->pprev = led->pprev; |
8164 | if (led->pprev) |
8165 | *led->pprev = led->next; |
8166 | VAR_LOC_DEP_VEC (var)->pop (); |
8167 | } |
8168 | } |
8169 | |
8170 | /* Insert an active dependency from VAR on X to the vector of |
8171 | dependencies, and add the corresponding back-link to X's list of |
8172 | back-links in VARS. */ |
8173 | |
8174 | static void |
8175 | loc_exp_insert_dep (variable *var, rtx x, variable_table_type *vars) |
8176 | { |
8177 | decl_or_value dv; |
8178 | variable *xvar; |
8179 | loc_exp_dep *led; |
8180 | |
8181 | dv = dv_from_rtx (x); |
8182 | |
8183 | /* ??? Build a vector of variables parallel to EXPANDING, to avoid |
8184 | an additional look up? */ |
8185 | xvar = vars->find_with_hash (comparable: dv, hash: dv_htab_hash (dv)); |
8186 | |
8187 | if (!xvar) |
8188 | { |
8189 | xvar = variable_from_dropped (dv, insert: NO_INSERT); |
8190 | gcc_checking_assert (xvar); |
8191 | } |
8192 | |
8193 | /* No point in adding the same backlink more than once. This may |
8194 | arise if say the same value appears in two complex expressions in |
8195 | the same loc_list, or even more than once in a single |
8196 | expression. */ |
8197 | if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv) |
8198 | return; |
8199 | |
8200 | if (var->onepart == NOT_ONEPART) |
8201 | led = new loc_exp_dep; |
8202 | else |
8203 | { |
8204 | loc_exp_dep empty; |
8205 | memset (s: &empty, c: 0, n: sizeof (empty)); |
8206 | VAR_LOC_DEP_VEC (var)->quick_push (obj: empty); |
8207 | led = &VAR_LOC_DEP_VEC (var)->last (); |
8208 | } |
8209 | led->dv = var->dv; |
8210 | led->value = x; |
8211 | |
8212 | loc_exp_dep_alloc (var: xvar, count: 0); |
8213 | led->pprev = VAR_LOC_DEP_LSTP (xvar); |
8214 | led->next = *led->pprev; |
8215 | if (led->next) |
8216 | led->next->pprev = &led->next; |
8217 | *led->pprev = led; |
8218 | } |
8219 | |
8220 | /* Create active dependencies of VAR on COUNT values starting at |
8221 | VALUE, and corresponding back-links to the entries in VARS. Return |
8222 | true if we found any pending-recursion results. */ |
8223 | |
8224 | static bool |
8225 | loc_exp_dep_set (variable *var, rtx result, rtx *value, int count, |
8226 | variable_table_type *vars) |
8227 | { |
8228 | bool pending_recursion = false; |
8229 | |
8230 | gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL |
8231 | || VAR_LOC_DEP_VEC (var)->is_empty ()); |
8232 | |
8233 | /* Set up all dependencies from last_child (as set up at the end of |
8234 | the loop above) to the end. */ |
8235 | loc_exp_dep_alloc (var, count); |
8236 | |
8237 | while (count--) |
8238 | { |
8239 | rtx x = *value++; |
8240 | |
8241 | if (!pending_recursion) |
8242 | pending_recursion = !result && VALUE_RECURSED_INTO (x); |
8243 | |
8244 | loc_exp_insert_dep (var, x, vars); |
8245 | } |
8246 | |
8247 | return pending_recursion; |
8248 | } |
8249 | |
8250 | /* Notify the back-links of IVAR that are pending recursion that we |
8251 | have found a non-NIL value for it, so they are cleared for another |
8252 | attempt to compute a current location. */ |
8253 | |
8254 | static void |
8255 | notify_dependents_of_resolved_value (variable *ivar, variable_table_type *vars) |
8256 | { |
8257 | loc_exp_dep *led, *next; |
8258 | |
8259 | for (led = VAR_LOC_DEP_LST (ivar); led; led = next) |
8260 | { |
8261 | decl_or_value dv = led->dv; |
8262 | variable *var; |
8263 | |
8264 | next = led->next; |
8265 | |
8266 | if (dv_is_value_p (dv)) |
8267 | { |
8268 | rtx value = dv_as_value (dv); |
8269 | |
8270 | /* If we have already resolved it, leave it alone. */ |
8271 | if (!VALUE_RECURSED_INTO (value)) |
8272 | continue; |
8273 | |
8274 | /* Check that VALUE_RECURSED_INTO, true from the test above, |
8275 | implies NO_LOC_P. */ |
8276 | gcc_checking_assert (NO_LOC_P (value)); |
8277 | |
8278 | /* We won't notify variables that are being expanded, |
8279 | because their dependency list is cleared before |
8280 | recursing. */ |
8281 | NO_LOC_P (value) = false; |
8282 | VALUE_RECURSED_INTO (value) = false; |
8283 | |
8284 | gcc_checking_assert (dv_changed_p (dv)); |
8285 | } |
8286 | else |
8287 | { |
8288 | gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART); |
8289 | if (!dv_changed_p (dv)) |
8290 | continue; |
8291 | } |
8292 | |
8293 | var = vars->find_with_hash (comparable: dv, hash: dv_htab_hash (dv)); |
8294 | |
8295 | if (!var) |
8296 | var = variable_from_dropped (dv, insert: NO_INSERT); |
8297 | |
8298 | if (var) |
8299 | notify_dependents_of_resolved_value (ivar: var, vars); |
8300 | |
8301 | if (next) |
8302 | next->pprev = led->pprev; |
8303 | if (led->pprev) |
8304 | *led->pprev = next; |
8305 | led->next = NULL; |
8306 | led->pprev = NULL; |
8307 | } |
8308 | } |
8309 | |
8310 | static rtx vt_expand_loc_callback (rtx x, bitmap regs, |
8311 | int max_depth, void *data); |
8312 | |
8313 | /* Return the combined depth, when one sub-expression evaluated to |
8314 | BEST_DEPTH and the previous known depth was SAVED_DEPTH. */ |
8315 | |
8316 | static inline expand_depth |
8317 | update_depth (expand_depth saved_depth, expand_depth best_depth) |
8318 | { |
8319 | /* If we didn't find anything, stick with what we had. */ |
8320 | if (!best_depth.complexity) |
8321 | return saved_depth; |
8322 | |
8323 | /* If we found hadn't found anything, use the depth of the current |
8324 | expression. Do NOT add one extra level, we want to compute the |
8325 | maximum depth among sub-expressions. We'll increment it later, |
8326 | if appropriate. */ |
8327 | if (!saved_depth.complexity) |
8328 | return best_depth; |
8329 | |
8330 | /* Combine the entryval count so that regardless of which one we |
8331 | return, the entryval count is accurate. */ |
8332 | best_depth.entryvals = saved_depth.entryvals |
8333 | = best_depth.entryvals + saved_depth.entryvals; |
8334 | |
8335 | if (saved_depth.complexity < best_depth.complexity) |
8336 | return best_depth; |
8337 | else |
8338 | return saved_depth; |
8339 | } |
8340 | |
8341 | /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and |
8342 | DATA for cselib expand callback. If PENDRECP is given, indicate in |
8343 | it whether any sub-expression couldn't be fully evaluated because |
8344 | it is pending recursion resolution. */ |
8345 | |
8346 | static inline rtx |
8347 | vt_expand_var_loc_chain (variable *var, bitmap regs, void *data, |
8348 | bool *pendrecp) |
8349 | { |
8350 | class expand_loc_callback_data *elcd |
8351 | = (class expand_loc_callback_data *) data; |
8352 | location_chain *loc, *next; |
8353 | rtx result = NULL; |
8354 | int first_child, result_first_child, last_child; |
8355 | bool pending_recursion; |
8356 | rtx loc_from = NULL; |
8357 | struct elt_loc_list *cloc = NULL; |
8358 | expand_depth depth = { .complexity: 0, .entryvals: 0 }, saved_depth = elcd->depth; |
8359 | int wanted_entryvals, found_entryvals = 0; |
8360 | |
8361 | /* Clear all backlinks pointing at this, so that we're not notified |
8362 | while we're active. */ |
8363 | loc_exp_dep_clear (var); |
8364 | |
8365 | retry: |
8366 | if (var->onepart == ONEPART_VALUE) |
8367 | { |
8368 | cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv)); |
8369 | |
8370 | gcc_checking_assert (cselib_preserved_value_p (val)); |
8371 | |
8372 | cloc = val->locs; |
8373 | } |
8374 | |
8375 | first_child = result_first_child = last_child |
8376 | = elcd->expanding.length (); |
8377 | |
8378 | wanted_entryvals = found_entryvals; |
8379 | |
8380 | /* Attempt to expand each available location in turn. */ |
8381 | for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL; |
8382 | loc || cloc; loc = next) |
8383 | { |
8384 | result_first_child = last_child; |
8385 | |
8386 | if (!loc) |
8387 | { |
8388 | loc_from = cloc->loc; |
8389 | next = loc; |
8390 | cloc = cloc->next; |
8391 | if (unsuitable_loc (loc: loc_from)) |
8392 | continue; |
8393 | } |
8394 | else |
8395 | { |
8396 | loc_from = loc->loc; |
8397 | next = loc->next; |
8398 | } |
8399 | |
8400 | gcc_checking_assert (!unsuitable_loc (loc_from)); |
8401 | |
8402 | elcd->depth.complexity = elcd->depth.entryvals = 0; |
8403 | result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH, |
8404 | vt_expand_loc_callback, data); |
8405 | last_child = elcd->expanding.length (); |
8406 | |
8407 | if (result) |
8408 | { |
8409 | depth = elcd->depth; |
8410 | |
8411 | gcc_checking_assert (depth.complexity |
8412 | || result_first_child == last_child); |
8413 | |
8414 | if (last_child - result_first_child != 1) |
8415 | { |
8416 | if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE) |
8417 | depth.entryvals++; |
8418 | depth.complexity++; |
8419 | } |
8420 | |
8421 | if (depth.complexity <= EXPR_USE_DEPTH) |
8422 | { |
8423 | if (depth.entryvals <= wanted_entryvals) |
8424 | break; |
8425 | else if (!found_entryvals || depth.entryvals < found_entryvals) |
8426 | found_entryvals = depth.entryvals; |
8427 | } |
8428 | |
8429 | result = NULL; |
8430 | } |
8431 | |
8432 | /* Set it up in case we leave the loop. */ |
8433 | depth.complexity = depth.entryvals = 0; |
8434 | loc_from = NULL; |
8435 | result_first_child = first_child; |
8436 | } |
8437 | |
8438 | if (!loc_from && wanted_entryvals < found_entryvals) |
8439 | { |
8440 | /* We found entries with ENTRY_VALUEs and skipped them. Since |
8441 | we could not find any expansions without ENTRY_VALUEs, but we |
8442 | found at least one with them, go back and get an entry with |
8443 | the minimum number ENTRY_VALUE count that we found. We could |
8444 | avoid looping, but since each sub-loc is already resolved, |
8445 | the re-expansion should be trivial. ??? Should we record all |
8446 | attempted locs as dependencies, so that we retry the |
8447 | expansion should any of them change, in the hope it can give |
8448 | us a new entry without an ENTRY_VALUE? */ |
8449 | elcd->expanding.truncate (size: first_child); |
8450 | goto retry; |
8451 | } |
8452 | |
8453 | /* Register all encountered dependencies as active. */ |
8454 | pending_recursion = loc_exp_dep_set |
8455 | (var, result, value: elcd->expanding.address () + result_first_child, |
8456 | count: last_child - result_first_child, vars: elcd->vars); |
8457 | |
8458 | elcd->expanding.truncate (size: first_child); |
8459 | |
8460 | /* Record where the expansion came from. */ |
8461 | gcc_checking_assert (!result || !pending_recursion); |
8462 | VAR_LOC_FROM (var) = loc_from; |
8463 | VAR_LOC_DEPTH (var) = depth; |
8464 | |
8465 | gcc_checking_assert (!depth.complexity == !result); |
8466 | |
8467 | elcd->depth = update_depth (saved_depth, best_depth: depth); |
8468 | |
8469 | /* Indicate whether any of the dependencies are pending recursion |
8470 | resolution. */ |
8471 | if (pendrecp) |
8472 | *pendrecp = pending_recursion; |
8473 | |
8474 | if (!pendrecp || !pending_recursion) |
8475 | var->var_part[0].cur_loc = result; |
8476 | |
8477 | return result; |
8478 | } |
8479 | |
8480 | /* Callback for cselib_expand_value, that looks for expressions |
8481 | holding the value in the var-tracking hash tables. Return X for |
8482 | standard processing, anything else is to be used as-is. */ |
8483 | |
8484 | static rtx |
8485 | vt_expand_loc_callback (rtx x, bitmap regs, |
8486 | int max_depth ATTRIBUTE_UNUSED, |
8487 | void *data) |
8488 | { |
8489 | class expand_loc_callback_data *elcd |
8490 | = (class expand_loc_callback_data *) data; |
8491 | decl_or_value dv; |
8492 | variable *var; |
8493 | rtx result, subreg; |
8494 | bool pending_recursion = false; |
8495 | bool from_empty = false; |
8496 | |
8497 | switch (GET_CODE (x)) |
8498 | { |
8499 | case SUBREG: |
8500 | subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs, |
8501 | EXPR_DEPTH, |
8502 | vt_expand_loc_callback, data); |
8503 | |
8504 | if (!subreg) |
8505 | return NULL; |
8506 | |
8507 | result = simplify_gen_subreg (GET_MODE (x), op: subreg, |
8508 | GET_MODE (SUBREG_REG (x)), |
8509 | SUBREG_BYTE (x)); |
8510 | |
8511 | /* Invalid SUBREGs are ok in debug info. ??? We could try |
8512 | alternate expansions for the VALUE as well. */ |
8513 | if (!result && GET_MODE (subreg) != VOIDmode) |
8514 | result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x)); |
8515 | |
8516 | return result; |
8517 | |
8518 | case DEBUG_EXPR: |
8519 | case VALUE: |
8520 | dv = dv_from_rtx (x); |
8521 | break; |
8522 | |
8523 | default: |
8524 | return x; |
8525 | } |
8526 | |
8527 | elcd->expanding.safe_push (obj: x); |
8528 | |
8529 | /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */ |
8530 | gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x)); |
8531 | |
8532 | if (NO_LOC_P (x)) |
8533 | { |
8534 | gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv)); |
8535 | return NULL; |
8536 | } |
8537 | |
8538 | var = elcd->vars->find_with_hash (comparable: dv, hash: dv_htab_hash (dv)); |
8539 | |
8540 | if (!var) |
8541 | { |
8542 | from_empty = true; |
8543 | var = variable_from_dropped (dv, insert: INSERT); |
8544 | } |
8545 | |
8546 | gcc_checking_assert (var); |
8547 | |
8548 | if (!dv_changed_p (dv)) |
8549 | { |
8550 | gcc_checking_assert (!NO_LOC_P (x)); |
8551 | gcc_checking_assert (var->var_part[0].cur_loc); |
8552 | gcc_checking_assert (VAR_LOC_1PAUX (var)); |
8553 | gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity); |
8554 | |
8555 | elcd->depth = update_depth (saved_depth: elcd->depth, VAR_LOC_1PAUX (var)->depth); |
8556 | |
8557 | return var->var_part[0].cur_loc; |
8558 | } |
8559 | |
8560 | VALUE_RECURSED_INTO (x) = true; |
8561 | /* This is tentative, but it makes some tests simpler. */ |
8562 | NO_LOC_P (x) = true; |
8563 | |
8564 | gcc_checking_assert (var->n_var_parts == 1 || from_empty); |
8565 | |
8566 | result = vt_expand_var_loc_chain (var, regs, data, pendrecp: &pending_recursion); |
8567 | |
8568 | if (pending_recursion) |
8569 | { |
8570 | gcc_checking_assert (!result); |
8571 | elcd->pending.safe_push (obj: x); |
8572 | } |
8573 | else |
8574 | { |
8575 | NO_LOC_P (x) = !result; |
8576 | VALUE_RECURSED_INTO (x) = false; |
8577 | set_dv_changed (dv, newv: false); |
8578 | |
8579 | if (result) |
8580 | notify_dependents_of_resolved_value (ivar: var, vars: elcd->vars); |
8581 | } |
8582 | |
8583 | return result; |
8584 | } |
8585 | |
8586 | /* While expanding variables, we may encounter recursion cycles |
8587 | because of mutual (possibly indirect) dependencies between two |
8588 | particular variables (or values), say A and B. If we're trying to |
8589 | expand A when we get to B, which in turn attempts to expand A, if |
8590 | we can't find any other expansion for B, we'll add B to this |
8591 | pending-recursion stack, and tentatively return NULL for its |
8592 | location. This tentative value will be used for any other |
8593 | occurrences of B, unless A gets some other location, in which case |
8594 | it will notify B that it is worth another try at computing a |
8595 | location for it, and it will use the location computed for A then. |
8596 | At the end of the expansion, the tentative NULL locations become |
8597 | final for all members of PENDING that didn't get a notification. |
8598 | This function performs this finalization of NULL locations. */ |
8599 | |
8600 | static void |
8601 | resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending) |
8602 | { |
8603 | while (!pending->is_empty ()) |
8604 | { |
8605 | rtx x = pending->pop (); |
8606 | decl_or_value dv; |
8607 | |
8608 | if (!VALUE_RECURSED_INTO (x)) |
8609 | continue; |
8610 | |
8611 | gcc_checking_assert (NO_LOC_P (x)); |
8612 | VALUE_RECURSED_INTO (x) = false; |
8613 | dv = dv_from_rtx (x); |
8614 | gcc_checking_assert (dv_changed_p (dv)); |
8615 | set_dv_changed (dv, newv: false); |
8616 | } |
8617 | } |
8618 | |
8619 | /* Initialize expand_loc_callback_data D with variable hash table V. |
8620 | It must be a macro because of alloca (vec stack). */ |
8621 | #define INIT_ELCD(d, v) \ |
8622 | do \ |
8623 | { \ |
8624 | (d).vars = (v); \ |
8625 | (d).depth.complexity = (d).depth.entryvals = 0; \ |
8626 | } \ |
8627 | while (0) |
8628 | /* Finalize expand_loc_callback_data D, resolved to location L. */ |
8629 | #define FINI_ELCD(d, l) \ |
8630 | do \ |
8631 | { \ |
8632 | resolve_expansions_pending_recursion (&(d).pending); \ |
8633 | (d).pending.release (); \ |
8634 | (d).expanding.release (); \ |
8635 | \ |
8636 | if ((l) && MEM_P (l)) \ |
8637 | (l) = targetm.delegitimize_address (l); \ |
8638 | } \ |
8639 | while (0) |
8640 | |
8641 | /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the |
8642 | equivalences in VARS, updating their CUR_LOCs in the process. */ |
8643 | |
8644 | static rtx |
8645 | vt_expand_loc (rtx loc, variable_table_type *vars) |
8646 | { |
8647 | class expand_loc_callback_data data; |
8648 | rtx result; |
8649 | |
8650 | if (!MAY_HAVE_DEBUG_BIND_INSNS) |
8651 | return loc; |
8652 | |
8653 | INIT_ELCD (data, vars); |
8654 | |
8655 | result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH, |
8656 | vt_expand_loc_callback, &data); |
8657 | |
8658 | FINI_ELCD (data, result); |
8659 | |
8660 | return result; |
8661 | } |
8662 | |
8663 | /* Expand the one-part VARiable to a location, using the equivalences |
8664 | in VARS, updating their CUR_LOCs in the process. */ |
8665 | |
8666 | static rtx |
8667 | vt_expand_1pvar (variable *var, variable_table_type *vars) |
8668 | { |
8669 | class expand_loc_callback_data data; |
8670 | rtx loc; |
8671 | |
8672 | gcc_checking_assert (var->onepart && var->n_var_parts == 1); |
8673 | |
8674 | if (!dv_changed_p (dv: var->dv)) |
8675 | return var->var_part[0].cur_loc; |
8676 | |
8677 | INIT_ELCD (data, vars); |
8678 | |
8679 | loc = vt_expand_var_loc_chain (var, regs: scratch_regs, data: &data, NULL); |
8680 | |
8681 | gcc_checking_assert (data.expanding.is_empty ()); |
8682 | |
8683 | FINI_ELCD (data, loc); |
8684 | |
8685 | return loc; |
8686 | } |
8687 | |
8688 | /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains |
8689 | additional parameters: WHERE specifies whether the note shall be emitted |
8690 | before or after instruction INSN. */ |
8691 | |
8692 | int |
8693 | emit_note_insn_var_location (variable **varp, emit_note_data *data) |
8694 | { |
8695 | variable *var = *varp; |
8696 | rtx_insn *insn = data->insn; |
8697 | enum emit_note_where where = data->where; |
8698 | variable_table_type *vars = data->vars; |
8699 | rtx_note *note; |
8700 | rtx note_vl; |
8701 | int i, j, n_var_parts; |
8702 | bool complete; |
8703 | enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED; |
8704 | HOST_WIDE_INT last_limit; |
8705 | HOST_WIDE_INT offsets[MAX_VAR_PARTS]; |
8706 | rtx loc[MAX_VAR_PARTS]; |
8707 | tree decl; |
8708 | location_chain *lc; |
8709 | |
8710 | gcc_checking_assert (var->onepart == NOT_ONEPART |
8711 | || var->onepart == ONEPART_VDECL); |
8712 | |
8713 | decl = dv_as_decl (dv: var->dv); |
8714 | |
8715 | complete = true; |
8716 | last_limit = 0; |
8717 | n_var_parts = 0; |
8718 | if (!var->onepart) |
8719 | for (i = 0; i < var->n_var_parts; i++) |
8720 | if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain) |
8721 | var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc; |
8722 | for (i = 0; i < var->n_var_parts; i++) |
8723 | { |
8724 | machine_mode mode, wider_mode; |
8725 | rtx loc2; |
8726 | HOST_WIDE_INT offset, size, wider_size; |
8727 | |
8728 | if (i == 0 && var->onepart) |
8729 | { |
8730 | gcc_checking_assert (var->n_var_parts == 1); |
8731 | offset = 0; |
8732 | initialized = VAR_INIT_STATUS_INITIALIZED; |
8733 | loc2 = vt_expand_1pvar (var, vars); |
8734 | } |
8735 | else |
8736 | { |
8737 | if (last_limit < VAR_PART_OFFSET (var, i)) |
8738 | { |
8739 | complete = false; |
8740 | break; |
8741 | } |
8742 | else if (last_limit > VAR_PART_OFFSET (var, i)) |
8743 | continue; |
8744 | offset = VAR_PART_OFFSET (var, i); |
8745 | loc2 = var->var_part[i].cur_loc; |
8746 | if (loc2 && GET_CODE (loc2) == MEM |
8747 | && GET_CODE (XEXP (loc2, 0)) == VALUE) |
8748 | { |
8749 | rtx depval = XEXP (loc2, 0); |
8750 | |
8751 | loc2 = vt_expand_loc (loc: loc2, vars); |
8752 | |
8753 | if (loc2) |
8754 | loc_exp_insert_dep (var, x: depval, vars); |
8755 | } |
8756 | if (!loc2) |
8757 | { |
8758 | complete = false; |
8759 | continue; |
8760 | } |
8761 | gcc_checking_assert (GET_CODE (loc2) != VALUE); |
8762 | for (lc = var->var_part[i].loc_chain; lc; lc = lc->next) |
8763 | if (var->var_part[i].cur_loc == lc->loc) |
8764 | { |
8765 | initialized = lc->init; |
8766 | break; |
8767 | } |
8768 | gcc_assert (lc); |
8769 | } |
8770 | |
8771 | offsets[n_var_parts] = offset; |
8772 | if (!loc2) |
8773 | { |
8774 | complete = false; |
8775 | continue; |
8776 | } |
8777 | loc[n_var_parts] = loc2; |
8778 | mode = GET_MODE (var->var_part[i].cur_loc); |
8779 | if (mode == VOIDmode && var->onepart) |
8780 | mode = DECL_MODE (decl); |
8781 | /* We ony track subparts of constant-sized objects, since at present |
8782 | there's no representation for polynomial pieces. */ |
8783 | if (!GET_MODE_SIZE (mode).is_constant (const_value: &size)) |
8784 | { |
8785 | complete = false; |
8786 | continue; |
8787 | } |
8788 | last_limit = offsets[n_var_parts] + size; |
8789 | |
8790 | /* Attempt to merge adjacent registers or memory. */ |
8791 | for (j = i + 1; j < var->n_var_parts; j++) |
8792 | if (last_limit <= VAR_PART_OFFSET (var, j)) |
8793 | break; |
8794 | if (j < var->n_var_parts |
8795 | && GET_MODE_WIDER_MODE (m: mode).exists (mode: &wider_mode) |
8796 | && GET_MODE_SIZE (mode: wider_mode).is_constant (const_value: &wider_size) |
8797 | && var->var_part[j].cur_loc |
8798 | && mode == GET_MODE (var->var_part[j].cur_loc) |
8799 | && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts])) |
8800 | && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j)) |
8801 | && (loc2 = vt_expand_loc (loc: var->var_part[j].cur_loc, vars)) |
8802 | && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2)) |
8803 | { |
8804 | rtx new_loc = NULL; |
8805 | poly_int64 offset2; |
8806 | |
8807 | if (REG_P (loc[n_var_parts]) |
8808 | && hard_regno_nregs (REGNO (loc[n_var_parts]), mode) * 2 |
8809 | == hard_regno_nregs (REGNO (loc[n_var_parts]), mode: wider_mode) |
8810 | && end_hard_regno (mode, REGNO (loc[n_var_parts])) |
8811 | == REGNO (loc2)) |
8812 | { |
8813 | if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN) |
8814 | new_loc = simplify_subreg (outermode: wider_mode, op: loc[n_var_parts], |
8815 | innermode: mode, byte: 0); |
8816 | else if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN) |
8817 | new_loc = simplify_subreg (outermode: wider_mode, op: loc2, innermode: mode, byte: 0); |
8818 | if (new_loc) |
8819 | { |
8820 | if (!REG_P (new_loc) |
8821 | || REGNO (new_loc) != REGNO (loc[n_var_parts])) |
8822 | new_loc = NULL; |
8823 | else |
8824 | REG_ATTRS (new_loc) = REG_ATTRS (loc[n_var_parts]); |
8825 | } |
8826 | } |
8827 | else if (MEM_P (loc[n_var_parts]) |
8828 | && GET_CODE (XEXP (loc2, 0)) == PLUS |
8829 | && REG_P (XEXP (XEXP (loc2, 0), 0)) |
8830 | && poly_int_rtx_p (XEXP (XEXP (loc2, 0), 1), res: &offset2)) |
8831 | { |
8832 | poly_int64 end1 = size; |
8833 | rtx base1 = strip_offset_and_add (XEXP (loc[n_var_parts], 0), |
8834 | offset: &end1); |
8835 | if (rtx_equal_p (base1, XEXP (XEXP (loc2, 0), 0)) |
8836 | && known_eq (end1, offset2)) |
8837 | new_loc = adjust_address_nv (loc[n_var_parts], |
8838 | wider_mode, 0); |
8839 | } |
8840 | |
8841 | if (new_loc) |
8842 | { |
8843 | loc[n_var_parts] = new_loc; |
8844 | mode = wider_mode; |
8845 | last_limit = offsets[n_var_parts] + wider_size; |
8846 | i = j; |
8847 | } |
8848 | } |
8849 | ++n_var_parts; |
8850 | } |
8851 | poly_uint64 type_size_unit |
8852 | = tree_to_poly_uint64 (TYPE_SIZE_UNIT (TREE_TYPE (decl))); |
8853 | if (maybe_lt (a: poly_uint64 (last_limit), b: type_size_unit)) |
8854 | complete = false; |
8855 | |
8856 | if (! flag_var_tracking_uninit) |
8857 | initialized = VAR_INIT_STATUS_INITIALIZED; |
8858 | |
8859 | note_vl = NULL_RTX; |
8860 | if (!complete) |
8861 | note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX, initialized); |
8862 | else if (n_var_parts == 1) |
8863 | { |
8864 | rtx expr_list; |
8865 | |
8866 | if (offsets[0] || GET_CODE (loc[0]) == PARALLEL) |
8867 | expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0])); |
8868 | else |
8869 | expr_list = loc[0]; |
8870 | |
8871 | note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list, initialized); |
8872 | } |
8873 | else if (n_var_parts) |
8874 | { |
8875 | rtx parallel; |
8876 | |
8877 | for (i = 0; i < n_var_parts; i++) |
8878 | loc[i] |
8879 | = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i])); |
8880 | |
8881 | parallel = gen_rtx_PARALLEL (VOIDmode, |
8882 | gen_rtvec_v (n_var_parts, loc)); |
8883 | note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, |
8884 | parallel, initialized); |
8885 | } |
8886 | |
8887 | if (where != EMIT_NOTE_BEFORE_INSN) |
8888 | { |
8889 | note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn); |
8890 | if (where == EMIT_NOTE_AFTER_CALL_INSN) |
8891 | NOTE_DURING_CALL_P (note) = true; |
8892 | } |
8893 | else |
8894 | { |
8895 | /* Make sure that the call related notes come first. */ |
8896 | while (NEXT_INSN (insn) |
8897 | && NOTE_P (insn) |
8898 | && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION |
8899 | && NOTE_DURING_CALL_P (insn)) |
8900 | insn = NEXT_INSN (insn); |
8901 | if (NOTE_P (insn) |
8902 | && NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION |
8903 | && NOTE_DURING_CALL_P (insn)) |
8904 | note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn); |
8905 | else |
8906 | note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn); |
8907 | } |
8908 | NOTE_VAR_LOCATION (note) = note_vl; |
8909 | |
8910 | set_dv_changed (dv: var->dv, newv: false); |
8911 | gcc_assert (var->in_changed_variables); |
8912 | var->in_changed_variables = false; |
8913 | changed_variables->clear_slot (slot: varp); |
8914 | |
8915 | /* Continue traversing the hash table. */ |
8916 | return 1; |
8917 | } |
8918 | |
8919 | /* While traversing changed_variables, push onto DATA (a stack of RTX |
8920 | values) entries that aren't user variables. */ |
8921 | |
8922 | int |
8923 | var_track_values_to_stack (variable **slot, |
8924 | vec<rtx, va_heap> *changed_values_stack) |
8925 | { |
8926 | variable *var = *slot; |
8927 | |
8928 | if (var->onepart == ONEPART_VALUE) |
8929 | changed_values_stack->safe_push (obj: dv_as_value (dv: var->dv)); |
8930 | else if (var->onepart == ONEPART_DEXPR) |
8931 | changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv))); |
8932 | |
8933 | return 1; |
8934 | } |
8935 | |
8936 | /* Remove from changed_variables the entry whose DV corresponds to |
8937 | value or debug_expr VAL. */ |
8938 | static void |
8939 | remove_value_from_changed_variables (rtx val) |
8940 | { |
8941 | decl_or_value dv = dv_from_rtx (x: val); |
8942 | variable **slot; |
8943 | variable *var; |
8944 | |
8945 | slot = changed_variables->find_slot_with_hash (comparable: dv, hash: dv_htab_hash (dv), |
8946 | insert: NO_INSERT); |
8947 | var = *slot; |
8948 | var->in_changed_variables = false; |
8949 | changed_variables->clear_slot (slot); |
8950 | } |
8951 | |
8952 | /* If VAL (a value or debug_expr) has backlinks to variables actively |
8953 | dependent on it in HTAB or in CHANGED_VARIABLES, mark them as |
8954 | changed, adding to CHANGED_VALUES_STACK any dependencies that may |
8955 | have dependencies of their own to notify. */ |
8956 | |
8957 | static void |
8958 | notify_dependents_of_changed_value (rtx val, variable_table_type *htab, |
8959 | vec<rtx, va_heap> *changed_values_stack) |
8960 | { |
8961 | variable **slot; |
8962 | variable *var; |
8963 | loc_exp_dep *led; |
8964 | decl_or_value dv = dv_from_rtx (x: val); |
8965 | |
8966 | slot = changed_variables->find_slot_with_hash (comparable: dv, hash: dv_htab_hash (dv), |
8967 | insert: NO_INSERT); |
8968 | if (!slot) |
8969 | slot = htab->find_slot_with_hash (comparable: dv, hash: dv_htab_hash (dv), insert: NO_INSERT); |
8970 | if (!slot) |
8971 | slot = dropped_values->find_slot_with_hash (comparable: dv, hash: dv_htab_hash (dv), |
8972 | insert: NO_INSERT); |
8973 | var = *slot; |
8974 | |
8975 | while ((led = VAR_LOC_DEP_LST (var))) |
8976 | { |
8977 | decl_or_value ldv = led->dv; |
8978 | variable *ivar; |
8979 | |
8980 | /* Deactivate and remove the backlink, as it was “used up”. It |
8981 | makes no sense to attempt to notify the same entity again: |
8982 | either it will be recomputed and re-register an active |
8983 | dependency, or it will still have the changed mark. */ |
8984 | if (led->next) |
8985 | led->next->pprev = led->pprev; |
8986 | if (led->pprev) |
8987 | *led->pprev = led->next; |
8988 | led->next = NULL; |
8989 | led->pprev = NULL; |
8990 | |
8991 | if (dv_changed_p (dv: ldv)) |
8992 | continue; |
8993 | |
8994 | switch (dv_onepart_p (dv: ldv)) |
8995 | { |
8996 | case ONEPART_VALUE: |
8997 | case ONEPART_DEXPR: |
8998 | set_dv_changed (dv: ldv, newv: true); |
8999 | changed_values_stack->safe_push (obj: dv_as_rtx (dv: ldv)); |
9000 | break; |
9001 | |
9002 | case ONEPART_VDECL: |
9003 | ivar = htab->find_with_hash (comparable: ldv, hash: dv_htab_hash (dv: ldv)); |
9004 | gcc_checking_assert (!VAR_LOC_DEP_LST (ivar)); |
9005 | variable_was_changed (var: ivar, NULL); |
9006 | break; |
9007 | |
9008 | case NOT_ONEPART: |
9009 | delete led; |
9010 | ivar = htab->find_with_hash (comparable: ldv, hash: dv_htab_hash (dv: ldv)); |
9011 | if (ivar) |
9012 | { |
9013 | int i = ivar->n_var_parts; |
9014 | while (i--) |
9015 | { |
9016 | rtx loc = ivar->var_part[i].cur_loc; |
9017 | |
9018 | if (loc && GET_CODE (loc) == MEM |
9019 | && XEXP (loc, 0) == val) |
9020 | { |
9021 | variable_was_changed (var: ivar, NULL); |
9022 | break; |
9023 | } |
9024 | } |
9025 | } |
9026 | break; |
9027 | |
9028 | default: |
9029 | gcc_unreachable (); |
9030 | } |
9031 | } |
9032 | } |
9033 | |
9034 | /* Take out of changed_variables any entries that don't refer to use |
9035 | variables. Back-propagate change notifications from values and |
9036 | debug_exprs to their active dependencies in HTAB or in |
9037 | CHANGED_VARIABLES. */ |
9038 | |
9039 | static void |
9040 | process_changed_values (variable_table_type *htab) |
9041 | { |
9042 | int i, n; |
9043 | rtx val; |
9044 | auto_vec<rtx, 20> changed_values_stack; |
9045 | |
9046 | /* Move values from changed_variables to changed_values_stack. */ |
9047 | changed_variables |
9048 | ->traverse <vec<rtx, va_heap>*, var_track_values_to_stack> |
9049 | (argument: &changed_values_stack); |
9050 | |
9051 | /* Back-propagate change notifications in values while popping |
9052 | them from the stack. */ |
9053 | for (n = i = changed_values_stack.length (); |
9054 | i > 0; i = changed_values_stack.length ()) |
9055 | { |
9056 | val = changed_values_stack.pop (); |
9057 | notify_dependents_of_changed_value (val, htab, changed_values_stack: &changed_values_stack); |
9058 | |
9059 | /* This condition will hold when visiting each of the entries |
9060 | originally in changed_variables. We can't remove them |
9061 | earlier because this could drop the backlinks before we got a |
9062 | chance to use them. */ |
9063 | if (i == n) |
9064 | { |
9065 | remove_value_from_changed_variables (val); |
9066 | n--; |
9067 | } |
9068 | } |
9069 | } |
9070 | |
9071 | /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain |
9072 | CHANGED_VARIABLES and delete this chain. WHERE specifies whether |
9073 | the notes shall be emitted before of after instruction INSN. */ |
9074 | |
9075 | static void |
9076 | emit_notes_for_changes (rtx_insn *insn, enum emit_note_where where, |
9077 | shared_hash *vars) |
9078 | { |
9079 | emit_note_data data; |
9080 | variable_table_type *htab = shared_hash_htab (vars); |
9081 | |
9082 | if (changed_variables->is_empty ()) |
9083 | return; |
9084 | |
9085 | if (MAY_HAVE_DEBUG_BIND_INSNS) |
9086 | process_changed_values (htab); |
9087 | |
9088 | data.insn = insn; |
9089 | data.where = where; |
9090 | data.vars = htab; |
9091 | |
9092 | changed_variables |
9093 | ->traverse <emit_note_data*, emit_note_insn_var_location> (argument: &data); |
9094 | } |
9095 | |
9096 | /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the |
9097 | same variable in hash table DATA or is not there at all. */ |
9098 | |
9099 | int |
9100 | emit_notes_for_differences_1 (variable **slot, variable_table_type *new_vars) |
9101 | { |
9102 | variable *old_var, *new_var; |
9103 | |
9104 | old_var = *slot; |
9105 | new_var = new_vars->find_with_hash (comparable: old_var->dv, hash: dv_htab_hash (dv: old_var->dv)); |
9106 | |
9107 | if (!new_var) |
9108 | { |
9109 | /* Variable has disappeared. */ |
9110 | variable *empty_var = NULL; |
9111 | |
9112 | if (old_var->onepart == ONEPART_VALUE |
9113 | || old_var->onepart == ONEPART_DEXPR) |
9114 | { |
9115 | empty_var = variable_from_dropped (dv: old_var->dv, insert: NO_INSERT); |
9116 | if (empty_var) |
9117 | { |
9118 | gcc_checking_assert (!empty_var->in_changed_variables); |
9119 | if (!VAR_LOC_1PAUX (old_var)) |
9120 | { |
9121 | VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var); |
9122 | VAR_LOC_1PAUX (empty_var) = NULL; |
9123 | } |
9124 | else |
9125 | gcc_checking_assert (!VAR_LOC_1PAUX (empty_var)); |
9126 | } |
9127 | } |
9128 | |
9129 | if (!empty_var) |
9130 | { |
9131 | empty_var = onepart_pool_allocate (onepart: old_var->onepart); |
9132 | empty_var->dv = old_var->dv; |
9133 | empty_var->refcount = 0; |
9134 | empty_var->n_var_parts = 0; |
9135 | empty_var->onepart = old_var->onepart; |
9136 | empty_var->in_changed_variables = false; |
9137 | } |
9138 | |
9139 | if (empty_var->onepart) |
9140 | { |
9141 | /* Propagate the auxiliary data to (ultimately) |
9142 | changed_variables. */ |
9143 | empty_var->var_part[0].loc_chain = NULL; |
9144 | empty_var->var_part[0].cur_loc = NULL; |
9145 | VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var); |
9146 | VAR_LOC_1PAUX (old_var) = NULL; |
9147 | } |
9148 | variable_was_changed (var: empty_var, NULL); |
9149 | /* Continue traversing the hash table. */ |
9150 | return 1; |
9151 | } |
9152 | /* Update cur_loc and one-part auxiliary data, before new_var goes |
9153 | through variable_was_changed. */ |
9154 | if (old_var != new_var && new_var->onepart) |
9155 | { |
9156 | gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL); |
9157 | VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var); |
9158 | VAR_LOC_1PAUX (old_var) = NULL; |
9159 | new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc; |
9160 | } |
9161 | if (variable_different_p (var1: old_var, var2: new_var)) |
9162 | variable_was_changed (var: new_var, NULL); |
9163 | |
9164 | /* Continue traversing the hash table. */ |
9165 | return 1; |
9166 | } |
9167 | |
9168 | /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash |
9169 | table DATA. */ |
9170 | |
9171 | int |
9172 | emit_notes_for_differences_2 (variable **slot, variable_table_type *old_vars) |
9173 | { |
9174 | variable *old_var, *new_var; |
9175 | |
9176 | new_var = *slot; |
9177 | old_var = old_vars->find_with_hash (comparable: new_var->dv, hash: dv_htab_hash (dv: new_var->dv)); |
9178 | if (!old_var) |
9179 | { |
9180 | int i; |
9181 | for (i = 0; i < new_var->n_var_parts; i++) |
9182 | new_var->var_part[i].cur_loc = NULL; |
9183 | variable_was_changed (var: new_var, NULL); |
9184 | } |
9185 | |
9186 | /* Continue traversing the hash table. */ |
9187 | return 1; |
9188 | } |
9189 | |
9190 | /* Emit notes before INSN for differences between dataflow sets OLD_SET and |
9191 | NEW_SET. */ |
9192 | |
9193 | static void |
9194 | emit_notes_for_differences (rtx_insn *insn, dataflow_set *old_set, |
9195 | dataflow_set *new_set) |
9196 | { |
9197 | shared_hash_htab (vars: old_set->vars) |
9198 | ->traverse <variable_table_type *, emit_notes_for_differences_1> |
9199 | (argument: shared_hash_htab (vars: new_set->vars)); |
9200 | shared_hash_htab (vars: new_set->vars) |
9201 | ->traverse <variable_table_type *, emit_notes_for_differences_2> |
9202 | (argument: shared_hash_htab (vars: old_set->vars)); |
9203 | emit_notes_for_changes (insn, where: EMIT_NOTE_BEFORE_INSN, vars: new_set->vars); |
9204 | } |
9205 | |
9206 | /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */ |
9207 | |
9208 | static rtx_insn * |
9209 | next_non_note_insn_var_location (rtx_insn *insn) |
9210 | { |
9211 | while (insn) |
9212 | { |
9213 | insn = NEXT_INSN (insn); |
9214 | if (insn == 0 |
9215 | || !NOTE_P (insn) |
9216 | || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION) |
9217 | break; |
9218 | } |
9219 | |
9220 | return insn; |
9221 | } |
9222 | |
9223 | /* Emit the notes for changes of location parts in the basic block BB. */ |
9224 | |
9225 | static void |
9226 | emit_notes_in_bb (basic_block bb, dataflow_set *set) |
9227 | { |
9228 | unsigned int i; |
9229 | micro_operation *mo; |
9230 | |
9231 | dataflow_set_clear (set); |
9232 | dataflow_set_copy (dst: set, src: &VTI (bb)->in); |
9233 | |
9234 | FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo) |
9235 | { |
9236 | rtx_insn *insn = mo->insn; |
9237 | rtx_insn *next_insn = next_non_note_insn_var_location (insn); |
9238 | |
9239 | switch (mo->type) |
9240 | { |
9241 | case MO_CALL: |
9242 | dataflow_set_clear_at_call (set, call_insn: insn); |
9243 | emit_notes_for_changes (insn, where: EMIT_NOTE_AFTER_CALL_INSN, vars: set->vars); |
9244 | { |
9245 | rtx arguments = mo->u.loc, *p = &arguments; |
9246 | while (*p) |
9247 | { |
9248 | XEXP (XEXP (*p, 0), 1) |
9249 | = vt_expand_loc (XEXP (XEXP (*p, 0), 1), |
9250 | vars: shared_hash_htab (vars: set->vars)); |
9251 | /* If expansion is successful, keep it in the list. */ |
9252 | if (XEXP (XEXP (*p, 0), 1)) |
9253 | { |
9254 | XEXP (XEXP (*p, 0), 1) |
9255 | = copy_rtx_if_shared (XEXP (XEXP (*p, 0), 1)); |
9256 | p = &XEXP (*p, 1); |
9257 | } |
9258 | /* Otherwise, if the following item is data_value for it, |
9259 | drop it too too. */ |
9260 | else if (XEXP (*p, 1) |
9261 | && REG_P (XEXP (XEXP (*p, 0), 0)) |
9262 | && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0)) |
9263 | && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0), |
9264 | 0)) |
9265 | && REGNO (XEXP (XEXP (*p, 0), 0)) |
9266 | == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), |
9267 | 0), 0))) |
9268 | *p = XEXP (XEXP (*p, 1), 1); |
9269 | /* Just drop this item. */ |
9270 | else |
9271 | *p = XEXP (*p, 1); |
9272 | } |
9273 | add_reg_note (insn, REG_CALL_ARG_LOCATION, arguments); |
9274 | } |
9275 | break; |
9276 | |
9277 | case MO_USE: |
9278 | { |
9279 | rtx loc = mo->u.loc; |
9280 | |
9281 | if (REG_P (loc)) |
9282 | var_reg_set (set, loc, initialized: VAR_INIT_STATUS_UNINITIALIZED, NULL); |
9283 | else |
9284 | var_mem_set (set, loc, initialized: VAR_INIT_STATUS_UNINITIALIZED, NULL); |
9285 | |
9286 | emit_notes_for_changes (insn, where: EMIT_NOTE_BEFORE_INSN, vars: set->vars); |
9287 | } |
9288 | break; |
9289 | |
9290 | case MO_VAL_LOC: |
9291 | { |
9292 | rtx loc = mo->u.loc; |
9293 | rtx val, vloc; |
9294 | tree var; |
9295 | |
9296 | if (GET_CODE (loc) == CONCAT) |
9297 | { |
9298 | val = XEXP (loc, 0); |
9299 | vloc = XEXP (loc, 1); |
9300 | } |
9301 | else |
9302 | { |
9303 | val = NULL_RTX; |
9304 | vloc = loc; |
9305 | } |
9306 | |
9307 | var = PAT_VAR_LOCATION_DECL (vloc); |
9308 | |
9309 | clobber_variable_part (set, NULL_RTX, |
9310 | dv: dv_from_decl (decl: var), offset: 0, NULL_RTX); |
9311 | if (val) |
9312 | { |
9313 | if (VAL_NEEDS_RESOLUTION (loc)) |
9314 | val_resolve (set, val, PAT_VAR_LOCATION_LOC (vloc), insn); |
9315 | set_variable_part (set, loc: val, dv: dv_from_decl (decl: var), offset: 0, |
9316 | initialized: VAR_INIT_STATUS_INITIALIZED, NULL_RTX, |
9317 | iopt: INSERT); |
9318 | } |
9319 | else if (!VAR_LOC_UNKNOWN_P (PAT_VAR_LOCATION_LOC (vloc))) |
9320 | set_variable_part (set, PAT_VAR_LOCATION_LOC (vloc), |
9321 | dv: dv_from_decl (decl: var), offset: 0, |
9322 | initialized: VAR_INIT_STATUS_INITIALIZED, NULL_RTX, |
9323 | iopt: INSERT); |
9324 | |
9325 | emit_notes_for_changes (insn, where: EMIT_NOTE_AFTER_INSN, vars: set->vars); |
9326 | } |
9327 | break; |
9328 | |
9329 | case MO_VAL_USE: |
9330 | { |
9331 | rtx loc = mo->u.loc; |
9332 | rtx val, vloc, uloc; |
9333 | |
9334 | vloc = uloc = XEXP (loc, 1); |
9335 | val = XEXP (loc, 0); |
9336 | |
9337 | if (GET_CODE (val) == CONCAT) |
9338 | { |
9339 | uloc = XEXP (val, 1); |
9340 | val = XEXP (val, 0); |
9341 | } |
9342 | |
9343 | if (VAL_NEEDS_RESOLUTION (loc)) |
9344 | val_resolve (set, val, loc: vloc, insn); |
9345 | else |
9346 | val_store (set, val, loc: uloc, insn, modified: false); |
9347 | |
9348 | if (VAL_HOLDS_TRACK_EXPR (loc)) |
9349 | { |
9350 | if (GET_CODE (uloc) == REG) |
9351 | var_reg_set (set, loc: uloc, initialized: VAR_INIT_STATUS_UNINITIALIZED, |
9352 | NULL); |
9353 | else if (GET_CODE (uloc) == MEM) |
9354 | var_mem_set (set, loc: uloc, initialized: VAR_INIT_STATUS_UNINITIALIZED, |
9355 | NULL); |
9356 | } |
9357 | |
9358 | emit_notes_for_changes (insn, where: EMIT_NOTE_BEFORE_INSN, vars: set->vars); |
9359 | } |
9360 | break; |
9361 | |
9362 | case MO_VAL_SET: |
9363 | { |
9364 | rtx loc = mo->u.loc; |
9365 | rtx val, vloc, uloc; |
9366 | rtx dstv, srcv; |
9367 | |
9368 | vloc = loc; |
9369 | uloc = XEXP (vloc, 1); |
9370 | val = XEXP (vloc, 0); |
9371 | vloc = uloc; |
9372 | |
9373 | if (GET_CODE (uloc) == SET) |
9374 | { |
9375 | dstv = SET_DEST (uloc); |
9376 | srcv = SET_SRC (uloc); |
9377 | } |
9378 | else |
9379 | { |
9380 | dstv = uloc; |
9381 | srcv = NULL; |
9382 | } |
9383 | |
9384 | if (GET_CODE (val) == CONCAT) |
9385 | { |
9386 | dstv = vloc = XEXP (val, 1); |
9387 | val = XEXP (val, 0); |
9388 | } |
9389 | |
9390 | if (GET_CODE (vloc) == SET) |
9391 | { |
9392 | srcv = SET_SRC (vloc); |
9393 | |
9394 | gcc_assert (val != srcv); |
9395 | gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc)); |
9396 | |
9397 | dstv = vloc = SET_DEST (vloc); |
9398 | |
9399 | if (VAL_NEEDS_RESOLUTION (loc)) |
9400 | val_resolve (set, val, loc: srcv, insn); |
9401 | } |
9402 | else if (VAL_NEEDS_RESOLUTION (loc)) |
9403 | { |
9404 | gcc_assert (GET_CODE (uloc) == SET |
9405 | && GET_CODE (SET_SRC (uloc)) == REG); |
9406 | val_resolve (set, val, SET_SRC (uloc), insn); |
9407 | } |
9408 | |
9409 | if (VAL_HOLDS_TRACK_EXPR (loc)) |
9410 | { |
9411 | if (VAL_EXPR_IS_CLOBBERED (loc)) |
9412 | { |
9413 | if (REG_P (uloc)) |
9414 | var_reg_delete (set, loc: uloc, clobber: true); |
9415 | else if (MEM_P (uloc)) |
9416 | { |
9417 | gcc_assert (MEM_P (dstv)); |
9418 | gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc)); |
9419 | var_mem_delete (set, loc: dstv, clobber: true); |
9420 | } |
9421 | } |
9422 | else |
9423 | { |
9424 | bool copied_p = VAL_EXPR_IS_COPIED (loc); |
9425 | rtx src = NULL, dst = uloc; |
9426 | enum var_init_status status = VAR_INIT_STATUS_INITIALIZED; |
9427 | |
9428 | if (GET_CODE (uloc) == SET) |
9429 | { |
9430 | src = SET_SRC (uloc); |
9431 | dst = SET_DEST (uloc); |
9432 | } |
9433 | |
9434 | if (copied_p) |
9435 | { |
9436 | status = find_src_status (in: set, src); |
9437 | |
9438 | src = find_src_set_src (set, src); |
9439 | } |
9440 | |
9441 | if (REG_P (dst)) |
9442 | var_reg_delete_and_set (set, loc: dst, modify: !copied_p, |
9443 | initialized: status, set_src: srcv); |
9444 | else if (MEM_P (dst)) |
9445 | { |
9446 | gcc_assert (MEM_P (dstv)); |
9447 | gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst)); |
9448 | var_mem_delete_and_set (set, loc: dstv, modify: !copied_p, |
9449 | initialized: status, set_src: srcv); |
9450 | } |
9451 | } |
9452 | } |
9453 | else if (REG_P (uloc)) |
9454 | var_regno_delete (set, REGNO (uloc)); |
9455 | else if (MEM_P (uloc)) |
9456 | { |
9457 | gcc_checking_assert (GET_CODE (vloc) == MEM); |
9458 | gcc_checking_assert (vloc == dstv); |
9459 | if (vloc != dstv) |
9460 | clobber_overlapping_mems (set, loc: vloc); |
9461 | } |
9462 | |
9463 | val_store (set, val, loc: dstv, insn, modified: true); |
9464 | |
9465 | emit_notes_for_changes (insn: next_insn, where: EMIT_NOTE_BEFORE_INSN, |
9466 | vars: set->vars); |
9467 | } |
9468 | break; |
9469 | |
9470 | case MO_SET: |
9471 | { |
9472 | rtx loc = mo->u.loc; |
9473 | rtx set_src = NULL; |
9474 | |
9475 | if (GET_CODE (loc) == SET) |
9476 | { |
9477 | set_src = SET_SRC (loc); |
9478 | loc = SET_DEST (loc); |
9479 | } |
9480 | |
9481 | if (REG_P (loc)) |
9482 | var_reg_delete_and_set (set, loc, modify: true, initialized: VAR_INIT_STATUS_INITIALIZED, |
9483 | set_src); |
9484 | else |
9485 | var_mem_delete_and_set (set, loc, modify: true, initialized: VAR_INIT_STATUS_INITIALIZED, |
9486 | set_src); |
9487 | |
9488 | emit_notes_for_changes (insn: next_insn, where: EMIT_NOTE_BEFORE_INSN, |
9489 | vars: set->vars); |
9490 | } |
9491 | break; |
9492 | |
9493 | case MO_COPY: |
9494 | { |
9495 | rtx loc = mo->u.loc; |
9496 | enum var_init_status src_status; |
9497 | rtx set_src = NULL; |
9498 | |
9499 | if (GET_CODE (loc) == SET) |
9500 | { |
9501 | set_src = SET_SRC (loc); |
9502 | loc = SET_DEST (loc); |
9503 | } |
9504 | |
9505 | src_status = find_src_status (in: set, src: set_src); |
9506 | set_src = find_src_set_src (set, src: set_src); |
9507 | |
9508 | if (REG_P (loc)) |
9509 | var_reg_delete_and_set (set, loc, modify: false, initialized: src_status, set_src); |
9510 | else |
9511 | var_mem_delete_and_set (set, loc, modify: false, initialized: src_status, set_src); |
9512 | |
9513 | emit_notes_for_changes (insn: next_insn, where: EMIT_NOTE_BEFORE_INSN, |
9514 | vars: set->vars); |
9515 | } |
9516 | break; |
9517 | |
9518 | case MO_USE_NO_VAR: |
9519 | { |
9520 | rtx loc = mo->u.loc; |
9521 | |
9522 | if (REG_P (loc)) |
9523 | var_reg_delete (set, loc, clobber: false); |
9524 | else |
9525 | var_mem_delete (set, loc, clobber: false); |
9526 | |
9527 | emit_notes_for_changes (insn, where: EMIT_NOTE_AFTER_INSN, vars: set->vars); |
9528 | } |
9529 | break; |
9530 | |
9531 | case MO_CLOBBER: |
9532 | { |
9533 | rtx loc = mo->u.loc; |
9534 | |
9535 | if (REG_P (loc)) |
9536 | var_reg_delete (set, loc, clobber: true); |
9537 | else |
9538 | var_mem_delete (set, loc, clobber: true); |
9539 | |
9540 | emit_notes_for_changes (insn: next_insn, where: EMIT_NOTE_BEFORE_INSN, |
9541 | vars: set->vars); |
9542 | } |
9543 | break; |
9544 | |
9545 | case MO_ADJUST: |
9546 | set->stack_adjust += mo->u.adjust; |
9547 | break; |
9548 | } |
9549 | } |
9550 | } |
9551 | |
9552 | /* Emit notes for the whole function. */ |
9553 | |
9554 | static void |
9555 | vt_emit_notes (void) |
9556 | { |
9557 | basic_block bb; |
9558 | dataflow_set cur; |
9559 | |
9560 | gcc_assert (changed_variables->is_empty ()); |
9561 | |
9562 | /* Free memory occupied by the out hash tables, as they aren't used |
9563 | anymore. */ |
9564 | FOR_EACH_BB_FN (bb, cfun) |
9565 | dataflow_set_clear (set: &VTI (bb)->out); |
9566 | |
9567 | /* Enable emitting notes by functions (mainly by set_variable_part and |
9568 | delete_variable_part). */ |
9569 | emit_notes = true; |
9570 | |
9571 | if (MAY_HAVE_DEBUG_BIND_INSNS) |
9572 | dropped_values = new variable_table_type (cselib_get_next_uid () * 2); |
9573 | |
9574 | dataflow_set_init (set: &cur); |
9575 | |
9576 | FOR_EACH_BB_FN (bb, cfun) |
9577 | { |
9578 | /* Emit the notes for changes of variable locations between two |
9579 | subsequent basic blocks. */ |
9580 | emit_notes_for_differences (BB_HEAD (bb), old_set: &cur, new_set: &VTI (bb)->in); |
9581 | |
9582 | if (MAY_HAVE_DEBUG_BIND_INSNS) |
9583 | local_get_addr_cache = new hash_map<rtx, rtx>; |
9584 | |
9585 | /* Emit the notes for the changes in the basic block itself. */ |
9586 | emit_notes_in_bb (bb, set: &cur); |
9587 | |
9588 | if (MAY_HAVE_DEBUG_BIND_INSNS) |
9589 | delete local_get_addr_cache; |
9590 | local_get_addr_cache = NULL; |
9591 | |
9592 | /* Free memory occupied by the in hash table, we won't need it |
9593 | again. */ |
9594 | dataflow_set_clear (set: &VTI (bb)->in); |
9595 | } |
9596 | |
9597 | if (flag_checking) |
9598 | shared_hash_htab (vars: cur.vars) |
9599 | ->traverse <variable_table_type *, emit_notes_for_differences_1> |
9600 | (argument: shared_hash_htab (vars: empty_shared_hash)); |
9601 | |
9602 | dataflow_set_destroy (set: &cur); |
9603 | |
9604 | if (MAY_HAVE_DEBUG_BIND_INSNS) |
9605 | delete dropped_values; |
9606 | dropped_values = NULL; |
9607 | |
9608 | emit_notes = false; |
9609 | } |
9610 | |
9611 | /* If there is a declaration and offset associated with register/memory RTL |
9612 | assign declaration to *DECLP and offset to *OFFSETP, and return true. */ |
9613 | |
9614 | static bool |
9615 | vt_get_decl_and_offset (rtx rtl, tree *declp, poly_int64 *offsetp) |
9616 | { |
9617 | if (REG_P (rtl)) |
9618 | { |
9619 | if (REG_ATTRS (rtl)) |
9620 | { |
9621 | *declp = REG_EXPR (rtl); |
9622 | *offsetp = REG_OFFSET (rtl); |
9623 | return true; |
9624 | } |
9625 | } |
9626 | else if (GET_CODE (rtl) == PARALLEL) |
9627 | { |
9628 | tree decl = NULL_TREE; |
9629 | HOST_WIDE_INT offset = MAX_VAR_PARTS; |
9630 | int len = XVECLEN (rtl, 0), i; |
9631 | |
9632 | for (i = 0; i < len; i++) |
9633 | { |
9634 | rtx reg = XEXP (XVECEXP (rtl, 0, i), 0); |
9635 | if (!REG_P (reg) || !REG_ATTRS (reg)) |
9636 | break; |
9637 | if (!decl) |
9638 | decl = REG_EXPR (reg); |
9639 | if (REG_EXPR (reg) != decl) |
9640 | break; |
9641 | HOST_WIDE_INT this_offset; |
9642 | if (!track_offset_p (REG_OFFSET (reg), offset_out: &this_offset)) |
9643 | break; |
9644 | offset = MIN (offset, this_offset); |
9645 | } |
9646 | |
9647 | if (i == len) |
9648 | { |
9649 | *declp = decl; |
9650 | *offsetp = offset; |
9651 | return true; |
9652 | } |
9653 | } |
9654 | else if (MEM_P (rtl)) |
9655 | { |
9656 | if (MEM_ATTRS (rtl)) |
9657 | { |
9658 | *declp = MEM_EXPR (rtl); |
9659 | *offsetp = int_mem_offset (mem: rtl); |
9660 | return true; |
9661 | } |
9662 | } |
9663 | return false; |
9664 | } |
9665 | |
9666 | /* Record the value for the ENTRY_VALUE of RTL as a global equivalence |
9667 | of VAL. */ |
9668 | |
9669 | static void |
9670 | record_entry_value (cselib_val *val, rtx rtl) |
9671 | { |
9672 | rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl)); |
9673 | |
9674 | ENTRY_VALUE_EXP (ev) = rtl; |
9675 | |
9676 | cselib_add_permanent_equiv (val, ev, get_insns ()); |
9677 | } |
9678 | |
9679 | /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */ |
9680 | |
9681 | static void |
9682 | vt_add_function_parameter (tree parm) |
9683 | { |
9684 | rtx decl_rtl = DECL_RTL_IF_SET (parm); |
9685 | rtx incoming = DECL_INCOMING_RTL (parm); |
9686 | tree decl; |
9687 | machine_mode mode; |
9688 | poly_int64 offset; |
9689 | dataflow_set *out; |
9690 | decl_or_value dv; |
9691 | bool incoming_ok = true; |
9692 | |
9693 | if (TREE_CODE (parm) != PARM_DECL) |
9694 | return; |
9695 | |
9696 | if (!decl_rtl || !incoming) |
9697 | return; |
9698 | |
9699 | if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode) |
9700 | return; |
9701 | |
9702 | /* If there is a DRAP register or a pseudo in internal_arg_pointer, |
9703 | rewrite the incoming location of parameters passed on the stack |
9704 | into MEMs based on the argument pointer, so that incoming doesn't |
9705 | depend on a pseudo. */ |
9706 | poly_int64 incoming_offset = 0; |
9707 | if (MEM_P (incoming) |
9708 | && (strip_offset (XEXP (incoming, 0), &incoming_offset) |
9709 | == crtl->args.internal_arg_pointer)) |
9710 | { |
9711 | HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl); |
9712 | incoming |
9713 | = replace_equiv_address_nv (incoming, |
9714 | plus_constant (Pmode, |
9715 | arg_pointer_rtx, |
9716 | off + incoming_offset)); |
9717 | } |
9718 | |
9719 | #ifdef HAVE_window_save |
9720 | /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers. |
9721 | If the target machine has an explicit window save instruction, the |
9722 | actual entry value is the corresponding OUTGOING_REGNO instead. */ |
9723 | if (HAVE_window_save && !crtl->uses_only_leaf_regs) |
9724 | { |
9725 | if (REG_P (incoming) |
9726 | && HARD_REGISTER_P (incoming) |
9727 | && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming)) |
9728 | { |
9729 | parm_reg p; |
9730 | p.incoming = incoming; |
9731 | incoming |
9732 | = gen_rtx_REG_offset (incoming, GET_MODE (incoming), |
9733 | OUTGOING_REGNO (REGNO (incoming)), 0); |
9734 | p.outgoing = incoming; |
9735 | vec_safe_push (windowed_parm_regs, p); |
9736 | } |
9737 | else if (GET_CODE (incoming) == PARALLEL) |
9738 | { |
9739 | rtx outgoing |
9740 | = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (XVECLEN (incoming, 0))); |
9741 | int i; |
9742 | |
9743 | for (i = 0; i < XVECLEN (incoming, 0); i++) |
9744 | { |
9745 | rtx reg = XEXP (XVECEXP (incoming, 0, i), 0); |
9746 | parm_reg p; |
9747 | p.incoming = reg; |
9748 | reg = gen_rtx_REG_offset (reg, GET_MODE (reg), |
9749 | OUTGOING_REGNO (REGNO (reg)), 0); |
9750 | p.outgoing = reg; |
9751 | XVECEXP (outgoing, 0, i) |
9752 | = gen_rtx_EXPR_LIST (VOIDmode, reg, |
9753 | XEXP (XVECEXP (incoming, 0, i), 1)); |
9754 | vec_safe_push (windowed_parm_regs, p); |
9755 | } |
9756 | |
9757 | incoming = outgoing; |
9758 | } |
9759 | else if (MEM_P (incoming) |
9760 | && REG_P (XEXP (incoming, 0)) |
9761 | && HARD_REGISTER_P (XEXP (incoming, 0))) |
9762 | { |
9763 | rtx reg = XEXP (incoming, 0); |
9764 | if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg)) |
9765 | { |
9766 | parm_reg p; |
9767 | p.incoming = reg; |
9768 | reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg))); |
9769 | p.outgoing = reg; |
9770 | vec_safe_push (windowed_parm_regs, p); |
9771 | incoming = replace_equiv_address_nv (incoming, reg); |
9772 | } |
9773 | } |
9774 | } |
9775 | #endif |
9776 | |
9777 | if (!vt_get_decl_and_offset (rtl: incoming, declp: &decl, offsetp: &offset)) |
9778 | { |
9779 | incoming_ok = false; |
9780 | if (MEM_P (incoming)) |
9781 | { |
9782 | /* This means argument is passed by invisible reference. */ |
9783 | offset = 0; |
9784 | decl = parm; |
9785 | } |
9786 | else |
9787 | { |
9788 | if (!vt_get_decl_and_offset (rtl: decl_rtl, declp: &decl, offsetp: &offset)) |
9789 | return; |
9790 | offset += byte_lowpart_offset (GET_MODE (incoming), |
9791 | GET_MODE (decl_rtl)); |
9792 | } |
9793 | } |
9794 | |
9795 | if (!decl) |
9796 | return; |
9797 | |
9798 | if (parm != decl) |
9799 | { |
9800 | /* If that DECL_RTL wasn't a pseudo that got spilled to |
9801 | memory, bail out. Otherwise, the spill slot sharing code |
9802 | will force the memory to reference spill_slot_decl (%sfp), |
9803 | so we don't match above. That's ok, the pseudo must have |
9804 | referenced the entire parameter, so just reset OFFSET. */ |
9805 | if (decl != get_spill_slot_decl (false)) |
9806 | return; |
9807 | offset = 0; |
9808 | } |
9809 | |
9810 | HOST_WIDE_INT const_offset; |
9811 | if (!track_loc_p (loc: incoming, expr: parm, offset, store_reg_p: false, mode_out: &mode, offset_out: &const_offset)) |
9812 | return; |
9813 | |
9814 | out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out; |
9815 | |
9816 | dv = dv_from_decl (decl: parm); |
9817 | |
9818 | if (target_for_debug_bind (parm) |
9819 | /* We can't deal with these right now, because this kind of |
9820 | variable is single-part. ??? We could handle parallels |
9821 | that describe multiple locations for the same single |
9822 | value, but ATM we don't. */ |
9823 | && GET_CODE (incoming) != PARALLEL) |
9824 | { |
9825 | cselib_val *val; |
9826 | rtx lowpart; |
9827 | |
9828 | /* ??? We shouldn't ever hit this, but it may happen because |
9829 | arguments passed by invisible reference aren't dealt with |
9830 | above: incoming-rtl will have Pmode rather than the |
9831 | expected mode for the type. */ |
9832 | if (const_offset) |
9833 | return; |
9834 | |
9835 | lowpart = var_lowpart (mode, loc: incoming); |
9836 | if (!lowpart) |
9837 | return; |
9838 | |
9839 | val = cselib_lookup_from_insn (lowpart, mode, true, |
9840 | VOIDmode, get_insns ()); |
9841 | |
9842 | /* ??? Float-typed values in memory are not handled by |
9843 | cselib. */ |
9844 | if (val) |
9845 | { |
9846 | preserve_value (val); |
9847 | set_variable_part (set: out, loc: val->val_rtx, dv, offset: const_offset, |
9848 | initialized: VAR_INIT_STATUS_INITIALIZED, NULL, iopt: INSERT); |
9849 | dv = dv_from_value (value: val->val_rtx); |
9850 | } |
9851 | |
9852 | if (MEM_P (incoming)) |
9853 | { |
9854 | val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true, |
9855 | VOIDmode, get_insns ()); |
9856 | if (val) |
9857 | { |
9858 | preserve_value (val); |
9859 | incoming = replace_equiv_address_nv (incoming, val->val_rtx); |
9860 | } |
9861 | } |
9862 | } |
9863 | |
9864 | if (REG_P (incoming)) |
9865 | { |
9866 | incoming = var_lowpart (mode, loc: incoming); |
9867 | gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER); |
9868 | attrs_list_insert (listp: &out->regs[REGNO (incoming)], dv, offset: const_offset, |
9869 | loc: incoming); |
9870 | set_variable_part (set: out, loc: incoming, dv, offset: const_offset, |
9871 | initialized: VAR_INIT_STATUS_INITIALIZED, NULL, iopt: INSERT); |
9872 | if (dv_is_value_p (dv)) |
9873 | { |
9874 | record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), rtl: incoming); |
9875 | if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE |
9876 | && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm)))) |
9877 | { |
9878 | machine_mode indmode |
9879 | = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm))); |
9880 | rtx mem = gen_rtx_MEM (indmode, incoming); |
9881 | cselib_val *val = cselib_lookup_from_insn (mem, indmode, true, |
9882 | VOIDmode, |
9883 | get_insns ()); |
9884 | if (val) |
9885 | { |
9886 | preserve_value (val); |
9887 | record_entry_value (val, rtl: mem); |
9888 | set_variable_part (set: out, loc: mem, dv: dv_from_value (value: val->val_rtx), offset: 0, |
9889 | initialized: VAR_INIT_STATUS_INITIALIZED, NULL, iopt: INSERT); |
9890 | } |
9891 | } |
9892 | |
9893 | if (GET_MODE_CLASS (mode) == MODE_INT) |
9894 | { |
9895 | machine_mode wider_mode_iter; |
9896 | FOR_EACH_WIDER_MODE (wider_mode_iter, mode) |
9897 | { |
9898 | if (!HWI_COMPUTABLE_MODE_P (mode: wider_mode_iter)) |
9899 | break; |
9900 | rtx wider_reg |
9901 | = gen_rtx_REG (wider_mode_iter, REGNO (incoming)); |
9902 | cselib_val *wider_val |
9903 | = cselib_lookup_from_insn (wider_reg, wider_mode_iter, 1, |
9904 | VOIDmode, get_insns ()); |
9905 | preserve_value (val: wider_val); |
9906 | record_entry_value (val: wider_val, rtl: wider_reg); |
9907 | } |
9908 | } |
9909 | } |
9910 | } |
9911 | else if (GET_CODE (incoming) == PARALLEL && !dv_onepart_p (dv)) |
9912 | { |
9913 | int i; |
9914 | |
9915 | /* The following code relies on vt_get_decl_and_offset returning true for |
9916 | incoming, which might not be always the case. */ |
9917 | if (!incoming_ok) |
9918 | return; |
9919 | for (i = 0; i < XVECLEN (incoming, 0); i++) |
9920 | { |
9921 | rtx reg = XEXP (XVECEXP (incoming, 0, i), 0); |
9922 | /* vt_get_decl_and_offset has already checked that the offset |
9923 | is a valid variable part. */ |
9924 | const_offset = get_tracked_reg_offset (loc: reg); |
9925 | gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER); |
9926 | attrs_list_insert (listp: &out->regs[REGNO (reg)], dv, offset: const_offset, loc: reg); |
9927 | set_variable_part (set: out, loc: reg, dv, offset: const_offset, |
9928 | initialized: VAR_INIT_STATUS_INITIALIZED, NULL, iopt: INSERT); |
9929 | } |
9930 | } |
9931 | else if (MEM_P (incoming)) |
9932 | { |
9933 | incoming = var_lowpart (mode, loc: incoming); |
9934 | set_variable_part (set: out, loc: incoming, dv, offset: const_offset, |
9935 | initialized: VAR_INIT_STATUS_INITIALIZED, NULL, iopt: INSERT); |
9936 | } |
9937 | } |
9938 | |
9939 | /* Insert function parameters to IN and OUT sets of ENTRY_BLOCK. */ |
9940 | |
9941 | static void |
9942 | vt_add_function_parameters (void) |
9943 | { |
9944 | tree parm; |
9945 | |
9946 | for (parm = DECL_ARGUMENTS (current_function_decl); |
9947 | parm; parm = DECL_CHAIN (parm)) |
9948 | vt_add_function_parameter (parm); |
9949 | |
9950 | if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl))) |
9951 | { |
9952 | tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl)); |
9953 | |
9954 | if (INDIRECT_REF_P (vexpr)) |
9955 | vexpr = TREE_OPERAND (vexpr, 0); |
9956 | |
9957 | if (TREE_CODE (vexpr) == PARM_DECL |
9958 | && DECL_ARTIFICIAL (vexpr) |
9959 | && !DECL_IGNORED_P (vexpr) |
9960 | && DECL_NAMELESS (vexpr)) |
9961 | vt_add_function_parameter (parm: vexpr); |
9962 | } |
9963 | } |
9964 | |
9965 | /* Initialize cfa_base_rtx, create a preserved VALUE for it and |
9966 | ensure it isn't flushed during cselib_reset_table. |
9967 | Can be called only if frame_pointer_rtx resp. arg_pointer_rtx |
9968 | has been eliminated. */ |
9969 | |
9970 | static void |
9971 | vt_init_cfa_base (void) |
9972 | { |
9973 | cselib_val *val; |
9974 | |
9975 | #ifdef FRAME_POINTER_CFA_OFFSET |
9976 | cfa_base_rtx = frame_pointer_rtx; |
9977 | cfa_base_offset = -FRAME_POINTER_CFA_OFFSET (current_function_decl); |
9978 | #else |
9979 | cfa_base_rtx = arg_pointer_rtx; |
9980 | cfa_base_offset = -ARG_POINTER_CFA_OFFSET (current_function_decl); |
9981 | #endif |
9982 | if (cfa_base_rtx == hard_frame_pointer_rtx |
9983 | || !fixed_regs[REGNO (cfa_base_rtx)]) |
9984 | { |
9985 | cfa_base_rtx = NULL_RTX; |
9986 | return; |
9987 | } |
9988 | if (!MAY_HAVE_DEBUG_BIND_INSNS) |
9989 | return; |
9990 | |
9991 | /* Tell alias analysis that cfa_base_rtx should share |
9992 | find_base_term value with stack pointer or hard frame pointer. */ |
9993 | if (!frame_pointer_needed) |
9994 | vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx); |
9995 | else if (!crtl->stack_realign_tried) |
9996 | vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx); |
9997 | |
9998 | val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1, |
9999 | VOIDmode, get_insns ()); |
10000 | preserve_value (val); |
10001 | cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx)); |
10002 | } |
10003 | |
10004 | /* Reemit INSN, a MARKER_DEBUG_INSN, as a note. */ |
10005 | |
10006 | static rtx_insn * |
10007 | reemit_marker_as_note (rtx_insn *insn) |
10008 | { |
10009 | gcc_checking_assert (DEBUG_MARKER_INSN_P (insn)); |
10010 | |
10011 | enum insn_note kind = INSN_DEBUG_MARKER_KIND (insn); |
10012 | |
10013 | switch (kind) |
10014 | { |
10015 | case NOTE_INSN_BEGIN_STMT: |
10016 | case NOTE_INSN_INLINE_ENTRY: |
10017 | { |
10018 | rtx_insn *note = NULL; |
10019 | if (cfun->debug_nonbind_markers) |
10020 | { |
10021 | note = emit_note_before (kind, insn); |
10022 | NOTE_MARKER_LOCATION (note) = INSN_LOCATION (insn); |
10023 | } |
10024 | delete_insn (insn); |
10025 | return note; |
10026 | } |
10027 | |
10028 | default: |
10029 | gcc_unreachable (); |
10030 | } |
10031 | } |
10032 | |
10033 | /* Allocate and initialize the data structures for variable tracking |
10034 | and parse the RTL to get the micro operations. */ |
10035 | |
10036 | static bool |
10037 | vt_initialize (void) |
10038 | { |
10039 | basic_block bb; |
10040 | poly_int64 fp_cfa_offset = -1; |
10041 | |
10042 | alloc_aux_for_blocks (sizeof (variable_tracking_info)); |
10043 | |
10044 | empty_shared_hash = shared_hash_pool.allocate (); |
10045 | empty_shared_hash->refcount = 1; |
10046 | empty_shared_hash->htab = new variable_table_type (1); |
10047 | changed_variables = new variable_table_type (10); |
10048 | |
10049 | /* Init the IN and OUT sets. */ |
10050 | FOR_ALL_BB_FN (bb, cfun) |
10051 | { |
10052 | VTI (bb)->visited = false; |
10053 | VTI (bb)->flooded = false; |
10054 | dataflow_set_init (set: &VTI (bb)->in); |
10055 | dataflow_set_init (set: &VTI (bb)->out); |
10056 | VTI (bb)->permp = NULL; |
10057 | } |
10058 | |
10059 | if (MAY_HAVE_DEBUG_BIND_INSNS) |
10060 | { |
10061 | cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS); |
10062 | scratch_regs = BITMAP_ALLOC (NULL); |
10063 | preserved_values.create (nelems: 256); |
10064 | global_get_addr_cache = new hash_map<rtx, rtx>; |
10065 | } |
10066 | else |
10067 | { |
10068 | scratch_regs = NULL; |
10069 | global_get_addr_cache = NULL; |
10070 | } |
10071 | |
10072 | if (MAY_HAVE_DEBUG_BIND_INSNS) |
10073 | { |
10074 | rtx reg, expr; |
10075 | int ofst; |
10076 | cselib_val *val; |
10077 | |
10078 | #ifdef FRAME_POINTER_CFA_OFFSET |
10079 | reg = frame_pointer_rtx; |
10080 | ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl); |
10081 | #else |
10082 | reg = arg_pointer_rtx; |
10083 | ofst = ARG_POINTER_CFA_OFFSET (current_function_decl); |
10084 | #endif |
10085 | |
10086 | ofst -= INCOMING_FRAME_SP_OFFSET; |
10087 | |
10088 | val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1, |
10089 | VOIDmode, get_insns ()); |
10090 | preserve_value (val); |
10091 | if (reg != hard_frame_pointer_rtx && fixed_regs[REGNO (reg)]) |
10092 | cselib_preserve_cfa_base_value (val, REGNO (reg)); |
10093 | if (ofst) |
10094 | { |
10095 | cselib_val *valsp |
10096 | = cselib_lookup_from_insn (stack_pointer_rtx, |
10097 | GET_MODE (stack_pointer_rtx), 1, |
10098 | VOIDmode, get_insns ()); |
10099 | preserve_value (val: valsp); |
10100 | expr = plus_constant (GET_MODE (reg), reg, ofst); |
10101 | /* This cselib_add_permanent_equiv call needs to be done before |
10102 | the other cselib_add_permanent_equiv a few lines later, |
10103 | because after that one is done, cselib_lookup on this expr |
10104 | will due to the cselib SP_DERIVED_VALUE_P optimizations |
10105 | return valsp and so no permanent equivalency will be added. */ |
10106 | cselib_add_permanent_equiv (valsp, expr, get_insns ()); |
10107 | } |
10108 | |
10109 | expr = plus_constant (GET_MODE (stack_pointer_rtx), |
10110 | stack_pointer_rtx, -ofst); |
10111 | cselib_add_permanent_equiv (val, expr, get_insns ()); |
10112 | } |
10113 | |
10114 | /* In order to factor out the adjustments made to the stack pointer or to |
10115 | the hard frame pointer and thus be able to use DW_OP_fbreg operations |
10116 | instead of individual location lists, we're going to rewrite MEMs based |
10117 | on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx |
10118 | or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx |
10119 | resp. arg_pointer_rtx. We can do this either when there is no frame |
10120 | pointer in the function and stack adjustments are consistent for all |
10121 | basic blocks or when there is a frame pointer and no stack realignment. |
10122 | But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx |
10123 | has been eliminated. */ |
10124 | if (!frame_pointer_needed) |
10125 | { |
10126 | rtx reg, elim; |
10127 | |
10128 | if (!vt_stack_adjustments ()) |
10129 | return false; |
10130 | |
10131 | #ifdef FRAME_POINTER_CFA_OFFSET |
10132 | reg = frame_pointer_rtx; |
10133 | #else |
10134 | reg = arg_pointer_rtx; |
10135 | #endif |
10136 | elim = (ira_use_lra_p |
10137 | ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX) |
10138 | : eliminate_regs (reg, VOIDmode, NULL_RTX)); |
10139 | if (elim != reg) |
10140 | { |
10141 | if (GET_CODE (elim) == PLUS) |
10142 | elim = XEXP (elim, 0); |
10143 | if (elim == stack_pointer_rtx) |
10144 | vt_init_cfa_base (); |
10145 | } |
10146 | } |
10147 | else if (!crtl->stack_realign_tried) |
10148 | { |
10149 | rtx reg, elim; |
10150 | |
10151 | #ifdef FRAME_POINTER_CFA_OFFSET |
10152 | reg = frame_pointer_rtx; |
10153 | fp_cfa_offset = FRAME_POINTER_CFA_OFFSET (current_function_decl); |
10154 | #else |
10155 | reg = arg_pointer_rtx; |
10156 | fp_cfa_offset = ARG_POINTER_CFA_OFFSET (current_function_decl); |
10157 | #endif |
10158 | elim = (ira_use_lra_p |
10159 | ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX) |
10160 | : eliminate_regs (reg, VOIDmode, NULL_RTX)); |
10161 | if (elim != reg) |
10162 | { |
10163 | if (GET_CODE (elim) == PLUS) |
10164 | { |
10165 | fp_cfa_offset -= rtx_to_poly_int64 (XEXP (elim, 1)); |
10166 | elim = XEXP (elim, 0); |
10167 | } |
10168 | if (elim != hard_frame_pointer_rtx) |
10169 | fp_cfa_offset = -1; |
10170 | } |
10171 | else |
10172 | fp_cfa_offset = -1; |
10173 | } |
10174 | |
10175 | /* If the stack is realigned and a DRAP register is used, we're going to |
10176 | rewrite MEMs based on it representing incoming locations of parameters |
10177 | passed on the stack into MEMs based on the argument pointer. Although |
10178 | we aren't going to rewrite other MEMs, we still need to initialize the |
10179 | virtual CFA pointer in order to ensure that the argument pointer will |
10180 | be seen as a constant throughout the function. |
10181 | |
10182 | ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */ |
10183 | else if (stack_realign_drap) |
10184 | { |
10185 | rtx reg, elim; |
10186 | |
10187 | #ifdef FRAME_POINTER_CFA_OFFSET |
10188 | reg = frame_pointer_rtx; |
10189 | #else |
10190 | reg = arg_pointer_rtx; |
10191 | #endif |
10192 | elim = (ira_use_lra_p |
10193 | ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX) |
10194 | : eliminate_regs (reg, VOIDmode, NULL_RTX)); |
10195 | if (elim != reg) |
10196 | { |
10197 | if (GET_CODE (elim) == PLUS) |
10198 | elim = XEXP (elim, 0); |
10199 | if (elim == hard_frame_pointer_rtx) |
10200 | vt_init_cfa_base (); |
10201 | } |
10202 | } |
10203 | |
10204 | hard_frame_pointer_adjustment = -1; |
10205 | |
10206 | vt_add_function_parameters (); |
10207 | |
10208 | bool record_sp_value = false; |
10209 | FOR_EACH_BB_FN (bb, cfun) |
10210 | { |
10211 | rtx_insn *insn; |
10212 | basic_block first_bb, last_bb; |
10213 | |
10214 | if (MAY_HAVE_DEBUG_BIND_INSNS) |
10215 | { |
10216 | cselib_record_sets_hook = add_with_sets; |
10217 | if (dump_file && (dump_flags & TDF_DETAILS)) |
10218 | fprintf (stream: dump_file, format: "first value: %i\n" , |
10219 | cselib_get_next_uid ()); |
10220 | } |
10221 | |
10222 | if (MAY_HAVE_DEBUG_BIND_INSNS |
10223 | && cfa_base_rtx |
10224 | && !frame_pointer_needed |
10225 | && record_sp_value) |
10226 | cselib_record_sp_cfa_base_equiv (-cfa_base_offset |
10227 | - VTI (bb)->in.stack_adjust, |
10228 | BB_HEAD (bb)); |
10229 | record_sp_value = true; |
10230 | |
10231 | first_bb = bb; |
10232 | for (;;) |
10233 | { |
10234 | edge e; |
10235 | if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun) |
10236 | || ! single_pred_p (bb: bb->next_bb)) |
10237 | break; |
10238 | e = find_edge (bb, bb->next_bb); |
10239 | if (! e || (e->flags & EDGE_FALLTHRU) == 0) |
10240 | break; |
10241 | bb = bb->next_bb; |
10242 | } |
10243 | last_bb = bb; |
10244 | |
10245 | /* Add the micro-operations to the vector. */ |
10246 | FOR_BB_BETWEEN (bb, first_bb, last_bb->next_bb, next_bb) |
10247 | { |
10248 | HOST_WIDE_INT offset = VTI (bb)->out.stack_adjust; |
10249 | VTI (bb)->out.stack_adjust = VTI (bb)->in.stack_adjust; |
10250 | |
10251 | rtx_insn *next; |
10252 | FOR_BB_INSNS_SAFE (bb, insn, next) |
10253 | { |
10254 | if (INSN_P (insn)) |
10255 | { |
10256 | HOST_WIDE_INT pre = 0, post = 0; |
10257 | |
10258 | if (!frame_pointer_needed) |
10259 | { |
10260 | insn_stack_adjust_offset_pre_post (insn, pre: &pre, post: &post); |
10261 | if (pre) |
10262 | { |
10263 | micro_operation mo; |
10264 | mo.type = MO_ADJUST; |
10265 | mo.u.adjust = pre; |
10266 | mo.insn = insn; |
10267 | if (dump_file && (dump_flags & TDF_DETAILS)) |
10268 | log_op_type (x: PATTERN (insn), bb, insn, |
10269 | mopt: MO_ADJUST, out: dump_file); |
10270 | VTI (bb)->mos.safe_push (obj: mo); |
10271 | } |
10272 | } |
10273 | |
10274 | cselib_hook_called = false; |
10275 | adjust_insn (bb, insn); |
10276 | |
10277 | if (pre) |
10278 | VTI (bb)->out.stack_adjust += pre; |
10279 | |
10280 | if (DEBUG_MARKER_INSN_P (insn)) |
10281 | { |
10282 | reemit_marker_as_note (insn); |
10283 | continue; |
10284 | } |
10285 | |
10286 | if (MAY_HAVE_DEBUG_BIND_INSNS) |
10287 | { |
10288 | if (CALL_P (insn)) |
10289 | prepare_call_arguments (bb, insn); |
10290 | cselib_process_insn (insn); |
10291 | if (dump_file && (dump_flags & TDF_DETAILS)) |
10292 | { |
10293 | if (dump_flags & TDF_SLIM) |
10294 | dump_insn_slim (dump_file, insn); |
10295 | else |
10296 | print_rtl_single (dump_file, insn); |
10297 | dump_cselib_table (dump_file); |
10298 | } |
10299 | } |
10300 | if (!cselib_hook_called) |
10301 | add_with_sets (insn, sets: 0, n_sets: 0); |
10302 | cancel_changes (0); |
10303 | |
10304 | if (post) |
10305 | { |
10306 | micro_operation mo; |
10307 | mo.type = MO_ADJUST; |
10308 | mo.u.adjust = post; |
10309 | mo.insn = insn; |
10310 | if (dump_file && (dump_flags & TDF_DETAILS)) |
10311 | log_op_type (x: PATTERN (insn), bb, insn, |
10312 | mopt: MO_ADJUST, out: dump_file); |
10313 | VTI (bb)->mos.safe_push (obj: mo); |
10314 | VTI (bb)->out.stack_adjust += post; |
10315 | } |
10316 | |
10317 | if (maybe_ne (a: fp_cfa_offset, b: -1) |
10318 | && known_eq (hard_frame_pointer_adjustment, -1) |
10319 | && fp_setter_insn (insn)) |
10320 | { |
10321 | vt_init_cfa_base (); |
10322 | hard_frame_pointer_adjustment = fp_cfa_offset; |
10323 | /* Disassociate sp from fp now. */ |
10324 | if (MAY_HAVE_DEBUG_BIND_INSNS) |
10325 | { |
10326 | cselib_val *v; |
10327 | cselib_invalidate_rtx (stack_pointer_rtx); |
10328 | v = cselib_lookup (stack_pointer_rtx, Pmode, 1, |
10329 | VOIDmode); |
10330 | if (v && !cselib_preserved_value_p (v)) |
10331 | { |
10332 | cselib_set_value_sp_based (v); |
10333 | preserve_value (val: v); |
10334 | } |
10335 | } |
10336 | } |
10337 | } |
10338 | } |
10339 | gcc_assert (offset == VTI (bb)->out.stack_adjust); |
10340 | } |
10341 | |
10342 | bb = last_bb; |
10343 | |
10344 | if (MAY_HAVE_DEBUG_BIND_INSNS) |
10345 | { |
10346 | cselib_preserve_only_values (); |
10347 | cselib_reset_table (cselib_get_next_uid ()); |
10348 | cselib_record_sets_hook = NULL; |
10349 | } |
10350 | } |
10351 | |
10352 | hard_frame_pointer_adjustment = -1; |
10353 | VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flooded = true; |
10354 | cfa_base_rtx = NULL_RTX; |
10355 | return true; |
10356 | } |
10357 | |
10358 | /* This is *not* reset after each function. It gives each |
10359 | NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation |
10360 | a unique label number. */ |
10361 | |
10362 | static int debug_label_num = 1; |
10363 | |
10364 | /* Remove from the insn stream a single debug insn used for |
10365 | variable tracking at assignments. */ |
10366 | |
10367 | static inline void |
10368 | delete_vta_debug_insn (rtx_insn *insn) |
10369 | { |
10370 | if (DEBUG_MARKER_INSN_P (insn)) |
10371 | { |
10372 | reemit_marker_as_note (insn); |
10373 | return; |
10374 | } |
10375 | |
10376 | tree decl = INSN_VAR_LOCATION_DECL (insn); |
10377 | if (TREE_CODE (decl) == LABEL_DECL |
10378 | && DECL_NAME (decl) |
10379 | && !DECL_RTL_SET_P (decl)) |
10380 | { |
10381 | PUT_CODE (insn, NOTE); |
10382 | NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL; |
10383 | NOTE_DELETED_LABEL_NAME (insn) |
10384 | = IDENTIFIER_POINTER (DECL_NAME (decl)); |
10385 | SET_DECL_RTL (decl, insn); |
10386 | CODE_LABEL_NUMBER (insn) = debug_label_num++; |
10387 | } |
10388 | else |
10389 | delete_insn (insn); |
10390 | } |
10391 | |
10392 | /* Remove from the insn stream all debug insns used for variable |
10393 | tracking at assignments. USE_CFG should be false if the cfg is no |
10394 | longer usable. */ |
10395 | |
10396 | void |
10397 | delete_vta_debug_insns (bool use_cfg) |
10398 | { |
10399 | basic_block bb; |
10400 | rtx_insn *insn, *next; |
10401 | |
10402 | if (!MAY_HAVE_DEBUG_INSNS) |
10403 | return; |
10404 | |
10405 | if (use_cfg) |
10406 | FOR_EACH_BB_FN (bb, cfun) |
10407 | { |
10408 | FOR_BB_INSNS_SAFE (bb, insn, next) |
10409 | if (DEBUG_INSN_P (insn)) |
10410 | delete_vta_debug_insn (insn); |
10411 | } |
10412 | else |
10413 | for (insn = get_insns (); insn; insn = next) |
10414 | { |
10415 | next = NEXT_INSN (insn); |
10416 | if (DEBUG_INSN_P (insn)) |
10417 | delete_vta_debug_insn (insn); |
10418 | } |
10419 | } |
10420 | |
10421 | /* Run a fast, BB-local only version of var tracking, to take care of |
10422 | information that we don't do global analysis on, such that not all |
10423 | information is lost. If SKIPPED holds, we're skipping the global |
10424 | pass entirely, so we should try to use information it would have |
10425 | handled as well.. */ |
10426 | |
10427 | static void |
10428 | vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED) |
10429 | { |
10430 | /* ??? Just skip it all for now. */ |
10431 | delete_vta_debug_insns (use_cfg: true); |
10432 | } |
10433 | |
10434 | /* Free the data structures needed for variable tracking. */ |
10435 | |
10436 | static void |
10437 | vt_finalize (void) |
10438 | { |
10439 | basic_block bb; |
10440 | |
10441 | FOR_EACH_BB_FN (bb, cfun) |
10442 | { |
10443 | VTI (bb)->mos.release (); |
10444 | } |
10445 | |
10446 | FOR_ALL_BB_FN (bb, cfun) |
10447 | { |
10448 | dataflow_set_destroy (set: &VTI (bb)->in); |
10449 | dataflow_set_destroy (set: &VTI (bb)->out); |
10450 | if (VTI (bb)->permp) |
10451 | { |
10452 | dataflow_set_destroy (VTI (bb)->permp); |
10453 | XDELETE (VTI (bb)->permp); |
10454 | } |
10455 | } |
10456 | free_aux_for_blocks (); |
10457 | delete empty_shared_hash->htab; |
10458 | empty_shared_hash->htab = NULL; |
10459 | delete changed_variables; |
10460 | changed_variables = NULL; |
10461 | attrs_pool.release (); |
10462 | var_pool.release (); |
10463 | location_chain_pool.release (); |
10464 | shared_hash_pool.release (); |
10465 | |
10466 | if (MAY_HAVE_DEBUG_BIND_INSNS) |
10467 | { |
10468 | if (global_get_addr_cache) |
10469 | delete global_get_addr_cache; |
10470 | global_get_addr_cache = NULL; |
10471 | loc_exp_dep_pool.release (); |
10472 | valvar_pool.release (); |
10473 | preserved_values.release (); |
10474 | cselib_finish (); |
10475 | BITMAP_FREE (scratch_regs); |
10476 | scratch_regs = NULL; |
10477 | } |
10478 | |
10479 | #ifdef HAVE_window_save |
10480 | vec_free (windowed_parm_regs); |
10481 | #endif |
10482 | |
10483 | if (vui_vec) |
10484 | XDELETEVEC (vui_vec); |
10485 | vui_vec = NULL; |
10486 | vui_allocated = 0; |
10487 | } |
10488 | |
10489 | /* The entry point to variable tracking pass. */ |
10490 | |
10491 | static inline unsigned int |
10492 | variable_tracking_main_1 (void) |
10493 | { |
10494 | bool success; |
10495 | |
10496 | /* We won't be called as a separate pass if flag_var_tracking is not |
10497 | set, but final may call us to turn debug markers into notes. */ |
10498 | if ((!flag_var_tracking && MAY_HAVE_DEBUG_INSNS) |
10499 | || flag_var_tracking_assignments < 0 |
10500 | /* Var-tracking right now assumes the IR doesn't contain |
10501 | any pseudos at this point. */ |
10502 | || targetm.no_register_allocation) |
10503 | { |
10504 | delete_vta_debug_insns (use_cfg: true); |
10505 | return 0; |
10506 | } |
10507 | |
10508 | if (!flag_var_tracking) |
10509 | return 0; |
10510 | |
10511 | if (n_basic_blocks_for_fn (cfun) > 500 |
10512 | && n_edges_for_fn (cfun) / n_basic_blocks_for_fn (cfun) >= 20) |
10513 | { |
10514 | vt_debug_insns_local (skipped: true); |
10515 | return 0; |
10516 | } |
10517 | |
10518 | if (!vt_initialize ()) |
10519 | { |
10520 | vt_finalize (); |
10521 | vt_debug_insns_local (skipped: true); |
10522 | return 0; |
10523 | } |
10524 | |
10525 | success = vt_find_locations (); |
10526 | |
10527 | if (!success && flag_var_tracking_assignments > 0) |
10528 | { |
10529 | vt_finalize (); |
10530 | |
10531 | delete_vta_debug_insns (use_cfg: true); |
10532 | |
10533 | /* This is later restored by our caller. */ |
10534 | flag_var_tracking_assignments = 0; |
10535 | |
10536 | success = vt_initialize (); |
10537 | gcc_assert (success); |
10538 | |
10539 | success = vt_find_locations (); |
10540 | } |
10541 | |
10542 | if (!success) |
10543 | { |
10544 | vt_finalize (); |
10545 | vt_debug_insns_local (skipped: false); |
10546 | return 0; |
10547 | } |
10548 | |
10549 | if (dump_file && (dump_flags & TDF_DETAILS)) |
10550 | { |
10551 | dump_dataflow_sets (); |
10552 | dump_reg_info (dump_file); |
10553 | dump_flow_info (dump_file, dump_flags); |
10554 | } |
10555 | |
10556 | timevar_push (tv: TV_VAR_TRACKING_EMIT); |
10557 | vt_emit_notes (); |
10558 | timevar_pop (tv: TV_VAR_TRACKING_EMIT); |
10559 | |
10560 | vt_finalize (); |
10561 | vt_debug_insns_local (skipped: false); |
10562 | return 0; |
10563 | } |
10564 | |
10565 | unsigned int |
10566 | variable_tracking_main (void) |
10567 | { |
10568 | unsigned int ret; |
10569 | int save = flag_var_tracking_assignments; |
10570 | |
10571 | ret = variable_tracking_main_1 (); |
10572 | |
10573 | flag_var_tracking_assignments = save; |
10574 | |
10575 | return ret; |
10576 | } |
10577 | |
10578 | namespace { |
10579 | |
10580 | const pass_data pass_data_variable_tracking = |
10581 | { |
10582 | .type: RTL_PASS, /* type */ |
10583 | .name: "vartrack" , /* name */ |
10584 | .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */ |
10585 | .tv_id: TV_VAR_TRACKING, /* tv_id */ |
10586 | .properties_required: 0, /* properties_required */ |
10587 | .properties_provided: 0, /* properties_provided */ |
10588 | .properties_destroyed: 0, /* properties_destroyed */ |
10589 | .todo_flags_start: 0, /* todo_flags_start */ |
10590 | .todo_flags_finish: 0, /* todo_flags_finish */ |
10591 | }; |
10592 | |
10593 | class pass_variable_tracking : public rtl_opt_pass |
10594 | { |
10595 | public: |
10596 | pass_variable_tracking (gcc::context *ctxt) |
10597 | : rtl_opt_pass (pass_data_variable_tracking, ctxt) |
10598 | {} |
10599 | |
10600 | /* opt_pass methods: */ |
10601 | bool gate (function *) final override |
10602 | { |
10603 | return (flag_var_tracking && !targetm.delay_vartrack); |
10604 | } |
10605 | |
10606 | unsigned int execute (function *) final override |
10607 | { |
10608 | return variable_tracking_main (); |
10609 | } |
10610 | |
10611 | }; // class pass_variable_tracking |
10612 | |
10613 | } // anon namespace |
10614 | |
10615 | rtl_opt_pass * |
10616 | make_pass_variable_tracking (gcc::context *ctxt) |
10617 | { |
10618 | return new pass_variable_tracking (ctxt); |
10619 | } |
10620 | |