Mercurial > hg > CbC > CbC_gcc
comparison gcc/var-tracking.c @ 111:04ced10e8804
gcc 7
author | kono |
---|---|
date | Fri, 27 Oct 2017 22:46:09 +0900 |
parents | f6334be47118 |
children | 84e7813d76e9 |
comparison
equal
deleted
inserted
replaced
68:561a7518be6b | 111:04ced10e8804 |
---|---|
1 /* Variable tracking routines for the GNU compiler. | 1 /* Variable tracking routines for the GNU compiler. |
2 Copyright (C) 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011 | 2 Copyright (C) 2002-2017 Free Software Foundation, Inc. |
3 Free Software Foundation, Inc. | |
4 | 3 |
5 This file is part of GCC. | 4 This file is part of GCC. |
6 | 5 |
7 GCC is free software; you can redistribute it and/or modify it | 6 GCC is free software; you can redistribute it and/or modify it |
8 under the terms of the GNU General Public License as published by | 7 under the terms of the GNU General Public License as published by |
32 references in instructions), for call insns and for stack adjustments | 31 references in instructions), for call insns and for stack adjustments |
33 separately for each basic block and saves them to an array of micro | 32 separately for each basic block and saves them to an array of micro |
34 operations. | 33 operations. |
35 The micro operations of one instruction are ordered so that | 34 The micro operations of one instruction are ordered so that |
36 pre-modifying stack adjustment < use < use with no var < call insn < | 35 pre-modifying stack adjustment < use < use with no var < call insn < |
37 < set < clobber < post-modifying stack adjustment | 36 < clobber < set < post-modifying stack adjustment |
38 | 37 |
39 Then, a forward dataflow analysis is performed to find out how locations | 38 Then, a forward dataflow analysis is performed to find out how locations |
40 of variables change through code and to propagate the variable locations | 39 of variables change through code and to propagate the variable locations |
41 along control flow graph. | 40 along control flow graph. |
42 The IN set for basic block BB is computed as a union of OUT sets of BB's | 41 The IN set for basic block BB is computed as a union of OUT sets of BB's |
87 */ | 86 */ |
88 | 87 |
89 #include "config.h" | 88 #include "config.h" |
90 #include "system.h" | 89 #include "system.h" |
91 #include "coretypes.h" | 90 #include "coretypes.h" |
92 #include "tm.h" | 91 #include "backend.h" |
92 #include "target.h" | |
93 #include "rtl.h" | 93 #include "rtl.h" |
94 #include "tree.h" | 94 #include "tree.h" |
95 #include "cfghooks.h" | |
96 #include "alloc-pool.h" | |
97 #include "tree-pass.h" | |
98 #include "memmodel.h" | |
95 #include "tm_p.h" | 99 #include "tm_p.h" |
96 #include "hard-reg-set.h" | |
97 #include "basic-block.h" | |
98 #include "flags.h" | |
99 #include "output.h" | |
100 #include "insn-config.h" | 100 #include "insn-config.h" |
101 #include "regs.h" | |
102 #include "emit-rtl.h" | |
103 #include "recog.h" | |
104 #include "diagnostic.h" | |
105 #include "varasm.h" | |
106 #include "stor-layout.h" | |
107 #include "cfgrtl.h" | |
108 #include "cfganal.h" | |
101 #include "reload.h" | 109 #include "reload.h" |
102 #include "sbitmap.h" | 110 #include "calls.h" |
103 #include "alloc-pool.h" | 111 #include "tree-dfa.h" |
104 #include "fibheap.h" | 112 #include "tree-ssa.h" |
105 #include "hashtab.h" | |
106 #include "regs.h" | |
107 #include "expr.h" | |
108 #include "timevar.h" | |
109 #include "tree-pass.h" | |
110 #include "tree-flow.h" | |
111 #include "cselib.h" | 113 #include "cselib.h" |
112 #include "target.h" | |
113 #include "params.h" | 114 #include "params.h" |
114 #include "diagnostic.h" | |
115 #include "tree-pretty-print.h" | 115 #include "tree-pretty-print.h" |
116 #include "pointer-set.h" | 116 #include "rtl-iter.h" |
117 #include "recog.h" | 117 #include "fibonacci_heap.h" |
118 | |
119 typedef fibonacci_heap <long, basic_block_def> bb_heap_t; | |
120 typedef fibonacci_node <long, basic_block_def> bb_heap_node_t; | |
118 | 121 |
119 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code | 122 /* var-tracking.c assumes that tree code with the same value as VALUE rtx code |
120 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl. | 123 has no chance to appear in REG_EXPR/MEM_EXPRs and isn't a decl. |
121 Currently the value is the same as IDENTIFIER_NODE, which has such | 124 Currently the value is the same as IDENTIFIER_NODE, which has such |
122 a property. If this compile time assertion ever fails, make sure that | 125 a property. If this compile time assertion ever fails, make sure that |
164 EMIT_NOTE_AFTER_INSN, | 167 EMIT_NOTE_AFTER_INSN, |
165 EMIT_NOTE_AFTER_CALL_INSN | 168 EMIT_NOTE_AFTER_CALL_INSN |
166 }; | 169 }; |
167 | 170 |
168 /* Structure holding information about micro operation. */ | 171 /* Structure holding information about micro operation. */ |
169 typedef struct micro_operation_def | 172 struct micro_operation |
170 { | 173 { |
171 /* Type of micro operation. */ | 174 /* Type of micro operation. */ |
172 enum micro_operation_type type; | 175 enum micro_operation_type type; |
173 | 176 |
174 /* The instruction which the micro operation is in, for MO_USE, | 177 /* The instruction which the micro operation is in, for MO_USE, |
175 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent | 178 MO_USE_NO_VAR, MO_CALL and MO_ADJUST, or the subsequent |
176 instruction or note in the original flow (before any var-tracking | 179 instruction or note in the original flow (before any var-tracking |
177 notes are inserted, to simplify emission of notes), for MO_SET | 180 notes are inserted, to simplify emission of notes), for MO_SET |
178 and MO_CLOBBER. */ | 181 and MO_CLOBBER. */ |
179 rtx insn; | 182 rtx_insn *insn; |
180 | 183 |
181 union { | 184 union { |
182 /* Location. For MO_SET and MO_COPY, this is the SET that | 185 /* Location. For MO_SET and MO_COPY, this is the SET that |
183 performs the assignment, if known, otherwise it is the target | 186 performs the assignment, if known, otherwise it is the target |
184 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a | 187 of the assignment. For MO_VAL_USE and MO_VAL_SET, it is a |
188 rtx loc; | 191 rtx loc; |
189 | 192 |
190 /* Stack adjustment. */ | 193 /* Stack adjustment. */ |
191 HOST_WIDE_INT adjust; | 194 HOST_WIDE_INT adjust; |
192 } u; | 195 } u; |
193 } micro_operation; | 196 }; |
194 | 197 |
195 DEF_VEC_O(micro_operation); | |
196 DEF_VEC_ALLOC_O(micro_operation,heap); | |
197 | 198 |
198 /* A declaration of a variable, or an RTL value being handled like a | 199 /* A declaration of a variable, or an RTL value being handled like a |
199 declaration. */ | 200 declaration. */ |
200 typedef void *decl_or_value; | 201 typedef void *decl_or_value; |
201 | 202 |
202 /* Structure for passing some other parameters to function | 203 /* Return true if a decl_or_value DV is a DECL or NULL. */ |
203 emit_note_insn_var_location. */ | 204 static inline bool |
204 typedef struct emit_note_data_def | 205 dv_is_decl_p (decl_or_value dv) |
205 { | 206 { |
206 /* The instruction which the note will be emitted before/after. */ | 207 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE; |
207 rtx insn; | 208 } |
208 | 209 |
209 /* Where the note will be emitted (before/after insn)? */ | 210 /* Return true if a decl_or_value is a VALUE rtl. */ |
210 enum emit_note_where where; | 211 static inline bool |
211 | 212 dv_is_value_p (decl_or_value dv) |
212 /* The variables and values active at this point. */ | 213 { |
213 htab_t vars; | 214 return dv && !dv_is_decl_p (dv); |
214 } emit_note_data; | 215 } |
216 | |
217 /* Return the decl in the decl_or_value. */ | |
218 static inline tree | |
219 dv_as_decl (decl_or_value dv) | |
220 { | |
221 gcc_checking_assert (dv_is_decl_p (dv)); | |
222 return (tree) dv; | |
223 } | |
224 | |
225 /* Return the value in the decl_or_value. */ | |
226 static inline rtx | |
227 dv_as_value (decl_or_value dv) | |
228 { | |
229 gcc_checking_assert (dv_is_value_p (dv)); | |
230 return (rtx)dv; | |
231 } | |
232 | |
233 /* Return the opaque pointer in the decl_or_value. */ | |
234 static inline void * | |
235 dv_as_opaque (decl_or_value dv) | |
236 { | |
237 return dv; | |
238 } | |
239 | |
215 | 240 |
216 /* Description of location of a part of a variable. The content of a physical | 241 /* Description of location of a part of a variable. The content of a physical |
217 register is described by a chain of these structures. | 242 register is described by a chain of these structures. |
218 The chains are pretty short (usually 1 or 2 elements) and thus | 243 The chains are pretty short (usually 1 or 2 elements) and thus |
219 chain is the best data structure. */ | 244 chain is the best data structure. */ |
220 typedef struct attrs_def | 245 struct attrs |
221 { | 246 { |
222 /* Pointer to next member of the list. */ | 247 /* Pointer to next member of the list. */ |
223 struct attrs_def *next; | 248 attrs *next; |
224 | 249 |
225 /* The rtx of register. */ | 250 /* The rtx of register. */ |
226 rtx loc; | 251 rtx loc; |
227 | 252 |
228 /* The declaration corresponding to LOC. */ | 253 /* The declaration corresponding to LOC. */ |
229 decl_or_value dv; | 254 decl_or_value dv; |
230 | 255 |
231 /* Offset from start of DECL. */ | 256 /* Offset from start of DECL. */ |
232 HOST_WIDE_INT offset; | 257 HOST_WIDE_INT offset; |
233 } *attrs; | 258 }; |
259 | |
260 /* Structure for chaining the locations. */ | |
261 struct location_chain | |
262 { | |
263 /* Next element in the chain. */ | |
264 location_chain *next; | |
265 | |
266 /* The location (REG, MEM or VALUE). */ | |
267 rtx loc; | |
268 | |
269 /* The "value" stored in this location. */ | |
270 rtx set_src; | |
271 | |
272 /* Initialized? */ | |
273 enum var_init_status init; | |
274 }; | |
275 | |
276 /* A vector of loc_exp_dep holds the active dependencies of a one-part | |
277 DV on VALUEs, i.e., the VALUEs expanded so as to form the current | |
278 location of DV. Each entry is also part of VALUE' s linked-list of | |
279 backlinks back to DV. */ | |
280 struct loc_exp_dep | |
281 { | |
282 /* The dependent DV. */ | |
283 decl_or_value dv; | |
284 /* The dependency VALUE or DECL_DEBUG. */ | |
285 rtx value; | |
286 /* The next entry in VALUE's backlinks list. */ | |
287 struct loc_exp_dep *next; | |
288 /* A pointer to the pointer to this entry (head or prev's next) in | |
289 the doubly-linked list. */ | |
290 struct loc_exp_dep **pprev; | |
291 }; | |
292 | |
293 | |
294 /* This data structure holds information about the depth of a variable | |
295 expansion. */ | |
296 struct expand_depth | |
297 { | |
298 /* This measures the complexity of the expanded expression. It | |
299 grows by one for each level of expansion that adds more than one | |
300 operand. */ | |
301 int complexity; | |
302 /* This counts the number of ENTRY_VALUE expressions in an | |
303 expansion. We want to minimize their use. */ | |
304 int entryvals; | |
305 }; | |
306 | |
307 /* This data structure is allocated for one-part variables at the time | |
308 of emitting notes. */ | |
309 struct onepart_aux | |
310 { | |
311 /* Doubly-linked list of dependent DVs. These are DVs whose cur_loc | |
312 computation used the expansion of this variable, and that ought | |
313 to be notified should this variable change. If the DV's cur_loc | |
314 expanded to NULL, all components of the loc list are regarded as | |
315 active, so that any changes in them give us a chance to get a | |
316 location. Otherwise, only components of the loc that expanded to | |
317 non-NULL are regarded as active dependencies. */ | |
318 loc_exp_dep *backlinks; | |
319 /* This holds the LOC that was expanded into cur_loc. We need only | |
320 mark a one-part variable as changed if the FROM loc is removed, | |
321 or if it has no known location and a loc is added, or if it gets | |
322 a change notification from any of its active dependencies. */ | |
323 rtx from; | |
324 /* The depth of the cur_loc expression. */ | |
325 expand_depth depth; | |
326 /* Dependencies actively used when expand FROM into cur_loc. */ | |
327 vec<loc_exp_dep, va_heap, vl_embed> deps; | |
328 }; | |
329 | |
330 /* Structure describing one part of variable. */ | |
331 struct variable_part | |
332 { | |
333 /* Chain of locations of the part. */ | |
334 location_chain *loc_chain; | |
335 | |
336 /* Location which was last emitted to location list. */ | |
337 rtx cur_loc; | |
338 | |
339 union variable_aux | |
340 { | |
341 /* The offset in the variable, if !var->onepart. */ | |
342 HOST_WIDE_INT offset; | |
343 | |
344 /* Pointer to auxiliary data, if var->onepart and emit_notes. */ | |
345 struct onepart_aux *onepaux; | |
346 } aux; | |
347 }; | |
348 | |
349 /* Maximum number of location parts. */ | |
350 #define MAX_VAR_PARTS 16 | |
351 | |
352 /* Enumeration type used to discriminate various types of one-part | |
353 variables. */ | |
354 enum onepart_enum | |
355 { | |
356 /* Not a one-part variable. */ | |
357 NOT_ONEPART = 0, | |
358 /* A one-part DECL that is not a DEBUG_EXPR_DECL. */ | |
359 ONEPART_VDECL = 1, | |
360 /* A DEBUG_EXPR_DECL. */ | |
361 ONEPART_DEXPR = 2, | |
362 /* A VALUE. */ | |
363 ONEPART_VALUE = 3 | |
364 }; | |
365 | |
366 /* Structure describing where the variable is located. */ | |
367 struct variable | |
368 { | |
369 /* The declaration of the variable, or an RTL value being handled | |
370 like a declaration. */ | |
371 decl_or_value dv; | |
372 | |
373 /* Reference count. */ | |
374 int refcount; | |
375 | |
376 /* Number of variable parts. */ | |
377 char n_var_parts; | |
378 | |
379 /* What type of DV this is, according to enum onepart_enum. */ | |
380 ENUM_BITFIELD (onepart_enum) onepart : CHAR_BIT; | |
381 | |
382 /* True if this variable_def struct is currently in the | |
383 changed_variables hash table. */ | |
384 bool in_changed_variables; | |
385 | |
386 /* The variable parts. */ | |
387 variable_part var_part[1]; | |
388 }; | |
389 | |
390 /* Pointer to the BB's information specific to variable tracking pass. */ | |
391 #define VTI(BB) ((variable_tracking_info *) (BB)->aux) | |
392 | |
393 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */ | |
394 #define INT_MEM_OFFSET(mem) (MEM_OFFSET_KNOWN_P (mem) ? MEM_OFFSET (mem) : 0) | |
395 | |
396 #if CHECKING_P && (GCC_VERSION >= 2007) | |
397 | |
398 /* Access VAR's Ith part's offset, checking that it's not a one-part | |
399 variable. */ | |
400 #define VAR_PART_OFFSET(var, i) __extension__ \ | |
401 (*({ variable *const __v = (var); \ | |
402 gcc_checking_assert (!__v->onepart); \ | |
403 &__v->var_part[(i)].aux.offset; })) | |
404 | |
405 /* Access VAR's one-part auxiliary data, checking that it is a | |
406 one-part variable. */ | |
407 #define VAR_LOC_1PAUX(var) __extension__ \ | |
408 (*({ variable *const __v = (var); \ | |
409 gcc_checking_assert (__v->onepart); \ | |
410 &__v->var_part[0].aux.onepaux; })) | |
411 | |
412 #else | |
413 #define VAR_PART_OFFSET(var, i) ((var)->var_part[(i)].aux.offset) | |
414 #define VAR_LOC_1PAUX(var) ((var)->var_part[0].aux.onepaux) | |
415 #endif | |
416 | |
417 /* These are accessor macros for the one-part auxiliary data. When | |
418 convenient for users, they're guarded by tests that the data was | |
419 allocated. */ | |
420 #define VAR_LOC_DEP_LST(var) (VAR_LOC_1PAUX (var) \ | |
421 ? VAR_LOC_1PAUX (var)->backlinks \ | |
422 : NULL) | |
423 #define VAR_LOC_DEP_LSTP(var) (VAR_LOC_1PAUX (var) \ | |
424 ? &VAR_LOC_1PAUX (var)->backlinks \ | |
425 : NULL) | |
426 #define VAR_LOC_FROM(var) (VAR_LOC_1PAUX (var)->from) | |
427 #define VAR_LOC_DEPTH(var) (VAR_LOC_1PAUX (var)->depth) | |
428 #define VAR_LOC_DEP_VEC(var) (VAR_LOC_1PAUX (var) \ | |
429 ? &VAR_LOC_1PAUX (var)->deps \ | |
430 : NULL) | |
431 | |
432 | |
433 | |
434 typedef unsigned int dvuid; | |
435 | |
436 /* Return the uid of DV. */ | |
437 | |
438 static inline dvuid | |
439 dv_uid (decl_or_value dv) | |
440 { | |
441 if (dv_is_value_p (dv)) | |
442 return CSELIB_VAL_PTR (dv_as_value (dv))->uid; | |
443 else | |
444 return DECL_UID (dv_as_decl (dv)); | |
445 } | |
446 | |
447 /* Compute the hash from the uid. */ | |
448 | |
449 static inline hashval_t | |
450 dv_uid2hash (dvuid uid) | |
451 { | |
452 return uid; | |
453 } | |
454 | |
455 /* The hash function for a mask table in a shared_htab chain. */ | |
456 | |
457 static inline hashval_t | |
458 dv_htab_hash (decl_or_value dv) | |
459 { | |
460 return dv_uid2hash (dv_uid (dv)); | |
461 } | |
462 | |
463 static void variable_htab_free (void *); | |
464 | |
465 /* Variable hashtable helpers. */ | |
466 | |
467 struct variable_hasher : pointer_hash <variable> | |
468 { | |
469 typedef void *compare_type; | |
470 static inline hashval_t hash (const variable *); | |
471 static inline bool equal (const variable *, const void *); | |
472 static inline void remove (variable *); | |
473 }; | |
474 | |
475 /* The hash function for variable_htab, computes the hash value | |
476 from the declaration of variable X. */ | |
477 | |
478 inline hashval_t | |
479 variable_hasher::hash (const variable *v) | |
480 { | |
481 return dv_htab_hash (v->dv); | |
482 } | |
483 | |
484 /* Compare the declaration of variable X with declaration Y. */ | |
485 | |
486 inline bool | |
487 variable_hasher::equal (const variable *v, const void *y) | |
488 { | |
489 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y); | |
490 | |
491 return (dv_as_opaque (v->dv) == dv_as_opaque (dv)); | |
492 } | |
493 | |
494 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */ | |
495 | |
496 inline void | |
497 variable_hasher::remove (variable *var) | |
498 { | |
499 variable_htab_free (var); | |
500 } | |
501 | |
502 typedef hash_table<variable_hasher> variable_table_type; | |
503 typedef variable_table_type::iterator variable_iterator_type; | |
504 | |
505 /* Structure for passing some other parameters to function | |
506 emit_note_insn_var_location. */ | |
507 struct emit_note_data | |
508 { | |
509 /* The instruction which the note will be emitted before/after. */ | |
510 rtx_insn *insn; | |
511 | |
512 /* Where the note will be emitted (before/after insn)? */ | |
513 enum emit_note_where where; | |
514 | |
515 /* The variables and values active at this point. */ | |
516 variable_table_type *vars; | |
517 }; | |
234 | 518 |
235 /* Structure holding a refcounted hash table. If refcount > 1, | 519 /* Structure holding a refcounted hash table. If refcount > 1, |
236 it must be first unshared before modified. */ | 520 it must be first unshared before modified. */ |
237 typedef struct shared_hash_def | 521 struct shared_hash |
238 { | 522 { |
239 /* Reference count. */ | 523 /* Reference count. */ |
240 int refcount; | 524 int refcount; |
241 | 525 |
242 /* Actual hash table. */ | 526 /* Actual hash table. */ |
243 htab_t htab; | 527 variable_table_type *htab; |
244 } *shared_hash; | 528 }; |
245 | 529 |
246 /* Structure holding the IN or OUT set for a basic block. */ | 530 /* Structure holding the IN or OUT set for a basic block. */ |
247 typedef struct dataflow_set_def | 531 struct dataflow_set |
248 { | 532 { |
249 /* Adjustment of stack offset. */ | 533 /* Adjustment of stack offset. */ |
250 HOST_WIDE_INT stack_adjust; | 534 HOST_WIDE_INT stack_adjust; |
251 | 535 |
252 /* Attributes for registers (lists of attrs). */ | 536 /* Attributes for registers (lists of attrs). */ |
253 attrs regs[FIRST_PSEUDO_REGISTER]; | 537 attrs *regs[FIRST_PSEUDO_REGISTER]; |
254 | 538 |
255 /* Variable locations. */ | 539 /* Variable locations. */ |
256 shared_hash vars; | 540 shared_hash *vars; |
257 | 541 |
258 /* Vars that is being traversed. */ | 542 /* Vars that is being traversed. */ |
259 shared_hash traversed_vars; | 543 shared_hash *traversed_vars; |
260 } dataflow_set; | 544 }; |
261 | 545 |
262 /* The structure (one for each basic block) containing the information | 546 /* The structure (one for each basic block) containing the information |
263 needed for variable tracking. */ | 547 needed for variable tracking. */ |
264 typedef struct variable_tracking_info_def | 548 struct variable_tracking_info |
265 { | 549 { |
266 /* The vector of micro operations. */ | 550 /* The vector of micro operations. */ |
267 VEC(micro_operation, heap) *mos; | 551 vec<micro_operation> mos; |
268 | 552 |
269 /* The IN and OUT set for dataflow analysis. */ | 553 /* The IN and OUT set for dataflow analysis. */ |
270 dataflow_set in; | 554 dataflow_set in; |
271 dataflow_set out; | 555 dataflow_set out; |
272 | 556 |
280 bool visited; | 564 bool visited; |
281 | 565 |
282 /* Has the block been flooded in VTA? */ | 566 /* Has the block been flooded in VTA? */ |
283 bool flooded; | 567 bool flooded; |
284 | 568 |
285 } *variable_tracking_info; | 569 }; |
286 | |
287 /* Structure for chaining the locations. */ | |
288 typedef struct location_chain_def | |
289 { | |
290 /* Next element in the chain. */ | |
291 struct location_chain_def *next; | |
292 | |
293 /* The location (REG, MEM or VALUE). */ | |
294 rtx loc; | |
295 | |
296 /* The "value" stored in this location. */ | |
297 rtx set_src; | |
298 | |
299 /* Initialized? */ | |
300 enum var_init_status init; | |
301 } *location_chain; | |
302 | |
303 /* Structure describing one part of variable. */ | |
304 typedef struct variable_part_def | |
305 { | |
306 /* Chain of locations of the part. */ | |
307 location_chain loc_chain; | |
308 | |
309 /* Location which was last emitted to location list. */ | |
310 rtx cur_loc; | |
311 | |
312 /* The offset in the variable. */ | |
313 HOST_WIDE_INT offset; | |
314 } variable_part; | |
315 | |
316 /* Maximum number of location parts. */ | |
317 #define MAX_VAR_PARTS 16 | |
318 | |
319 /* Structure describing where the variable is located. */ | |
320 typedef struct variable_def | |
321 { | |
322 /* The declaration of the variable, or an RTL value being handled | |
323 like a declaration. */ | |
324 decl_or_value dv; | |
325 | |
326 /* Reference count. */ | |
327 int refcount; | |
328 | |
329 /* Number of variable parts. */ | |
330 char n_var_parts; | |
331 | |
332 /* True if this variable changed (any of its) cur_loc fields | |
333 during the current emit_notes_for_changes resp. | |
334 emit_notes_for_differences call. */ | |
335 bool cur_loc_changed; | |
336 | |
337 /* True if this variable_def struct is currently in the | |
338 changed_variables hash table. */ | |
339 bool in_changed_variables; | |
340 | |
341 /* The variable parts. */ | |
342 variable_part var_part[1]; | |
343 } *variable; | |
344 typedef const struct variable_def *const_variable; | |
345 | |
346 /* Structure for chaining backlinks from referenced VALUEs to | |
347 DVs that are referencing them. */ | |
348 typedef struct value_chain_def | |
349 { | |
350 /* Next value_chain entry. */ | |
351 struct value_chain_def *next; | |
352 | |
353 /* The declaration of the variable, or an RTL value | |
354 being handled like a declaration, whose var_parts[0].loc_chain | |
355 references the VALUE owning this value_chain. */ | |
356 decl_or_value dv; | |
357 | |
358 /* Reference count. */ | |
359 int refcount; | |
360 } *value_chain; | |
361 typedef const struct value_chain_def *const_value_chain; | |
362 | |
363 /* Pointer to the BB's information specific to variable tracking pass. */ | |
364 #define VTI(BB) ((variable_tracking_info) (BB)->aux) | |
365 | |
366 /* Macro to access MEM_OFFSET as an HOST_WIDE_INT. Evaluates MEM twice. */ | |
367 #define INT_MEM_OFFSET(mem) (MEM_OFFSET (mem) ? INTVAL (MEM_OFFSET (mem)) : 0) | |
368 | 570 |
369 /* Alloc pool for struct attrs_def. */ | 571 /* Alloc pool for struct attrs_def. */ |
370 static alloc_pool attrs_pool; | 572 object_allocator<attrs> attrs_pool ("attrs pool"); |
371 | 573 |
372 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */ | 574 /* Alloc pool for struct variable_def with MAX_VAR_PARTS entries. */ |
373 static alloc_pool var_pool; | 575 |
576 static pool_allocator var_pool | |
577 ("variable_def pool", sizeof (variable) + | |
578 (MAX_VAR_PARTS - 1) * sizeof (((variable *)NULL)->var_part[0])); | |
374 | 579 |
375 /* Alloc pool for struct variable_def with a single var_part entry. */ | 580 /* Alloc pool for struct variable_def with a single var_part entry. */ |
376 static alloc_pool valvar_pool; | 581 static pool_allocator valvar_pool |
377 | 582 ("small variable_def pool", sizeof (variable)); |
378 /* Alloc pool for struct location_chain_def. */ | 583 |
379 static alloc_pool loc_chain_pool; | 584 /* Alloc pool for struct location_chain. */ |
380 | 585 static object_allocator<location_chain> location_chain_pool |
381 /* Alloc pool for struct shared_hash_def. */ | 586 ("location_chain pool"); |
382 static alloc_pool shared_hash_pool; | 587 |
383 | 588 /* Alloc pool for struct shared_hash. */ |
384 /* Alloc pool for struct value_chain_def. */ | 589 static object_allocator<shared_hash> shared_hash_pool ("shared_hash pool"); |
385 static alloc_pool value_chain_pool; | 590 |
591 /* Alloc pool for struct loc_exp_dep_s for NOT_ONEPART variables. */ | |
592 object_allocator<loc_exp_dep> loc_exp_dep_pool ("loc_exp_dep pool"); | |
386 | 593 |
387 /* Changed variables, notes will be emitted for them. */ | 594 /* Changed variables, notes will be emitted for them. */ |
388 static htab_t changed_variables; | 595 static variable_table_type *changed_variables; |
389 | |
390 /* Links from VALUEs to DVs referencing them in their current loc_chains. */ | |
391 static htab_t value_chains; | |
392 | 596 |
393 /* Shall notes be emitted? */ | 597 /* Shall notes be emitted? */ |
394 static bool emit_notes; | 598 static bool emit_notes; |
395 | 599 |
600 /* Values whose dynamic location lists have gone empty, but whose | |
601 cselib location lists are still usable. Use this to hold the | |
602 current location, the backlinks, etc, during emit_notes. */ | |
603 static variable_table_type *dropped_values; | |
604 | |
396 /* Empty shared hashtable. */ | 605 /* Empty shared hashtable. */ |
397 static shared_hash empty_shared_hash; | 606 static shared_hash *empty_shared_hash; |
398 | 607 |
399 /* Scratch register bitmap used by cselib_expand_value_rtx. */ | 608 /* Scratch register bitmap used by cselib_expand_value_rtx. */ |
400 static bitmap scratch_regs = NULL; | 609 static bitmap scratch_regs = NULL; |
610 | |
611 #ifdef HAVE_window_save | |
612 struct GTY(()) parm_reg { | |
613 rtx outgoing; | |
614 rtx incoming; | |
615 }; | |
616 | |
617 | |
618 /* Vector of windowed parameter registers, if any. */ | |
619 static vec<parm_reg, va_gc> *windowed_parm_regs = NULL; | |
620 #endif | |
401 | 621 |
402 /* Variable used to tell whether cselib_process_insn called our hook. */ | 622 /* Variable used to tell whether cselib_process_insn called our hook. */ |
403 static bool cselib_hook_called; | 623 static bool cselib_hook_called; |
404 | 624 |
405 /* Local function prototypes. */ | 625 /* Local function prototypes. */ |
406 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *, | 626 static void stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *, |
407 HOST_WIDE_INT *); | 627 HOST_WIDE_INT *); |
408 static void insn_stack_adjust_offset_pre_post (rtx, HOST_WIDE_INT *, | 628 static void insn_stack_adjust_offset_pre_post (rtx_insn *, HOST_WIDE_INT *, |
409 HOST_WIDE_INT *); | 629 HOST_WIDE_INT *); |
410 static bool vt_stack_adjustments (void); | 630 static bool vt_stack_adjustments (void); |
411 static hashval_t variable_htab_hash (const void *); | 631 |
412 static int variable_htab_eq (const void *, const void *); | 632 static void init_attrs_list_set (attrs **); |
413 static void variable_htab_free (void *); | 633 static void attrs_list_clear (attrs **); |
414 | 634 static attrs *attrs_list_member (attrs *, decl_or_value, HOST_WIDE_INT); |
415 static void init_attrs_list_set (attrs *); | 635 static void attrs_list_insert (attrs **, decl_or_value, HOST_WIDE_INT, rtx); |
416 static void attrs_list_clear (attrs *); | 636 static void attrs_list_copy (attrs **, attrs *); |
417 static attrs attrs_list_member (attrs, decl_or_value, HOST_WIDE_INT); | 637 static void attrs_list_union (attrs **, attrs *); |
418 static void attrs_list_insert (attrs *, decl_or_value, HOST_WIDE_INT, rtx); | 638 |
419 static void attrs_list_copy (attrs *, attrs); | 639 static variable **unshare_variable (dataflow_set *set, variable **slot, |
420 static void attrs_list_union (attrs *, attrs); | 640 variable *var, enum var_init_status); |
421 | 641 static void vars_copy (variable_table_type *, variable_table_type *); |
422 static void **unshare_variable (dataflow_set *set, void **slot, variable var, | |
423 enum var_init_status); | |
424 static void vars_copy (htab_t, htab_t); | |
425 static tree var_debug_decl (tree); | 642 static tree var_debug_decl (tree); |
426 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx); | 643 static void var_reg_set (dataflow_set *, rtx, enum var_init_status, rtx); |
427 static void var_reg_delete_and_set (dataflow_set *, rtx, bool, | 644 static void var_reg_delete_and_set (dataflow_set *, rtx, bool, |
428 enum var_init_status, rtx); | 645 enum var_init_status, rtx); |
429 static void var_reg_delete (dataflow_set *, rtx, bool); | 646 static void var_reg_delete (dataflow_set *, rtx, bool); |
436 static void dataflow_set_init (dataflow_set *); | 653 static void dataflow_set_init (dataflow_set *); |
437 static void dataflow_set_clear (dataflow_set *); | 654 static void dataflow_set_clear (dataflow_set *); |
438 static void dataflow_set_copy (dataflow_set *, dataflow_set *); | 655 static void dataflow_set_copy (dataflow_set *, dataflow_set *); |
439 static int variable_union_info_cmp_pos (const void *, const void *); | 656 static int variable_union_info_cmp_pos (const void *, const void *); |
440 static void dataflow_set_union (dataflow_set *, dataflow_set *); | 657 static void dataflow_set_union (dataflow_set *, dataflow_set *); |
441 static location_chain find_loc_in_1pdv (rtx, variable, htab_t); | 658 static location_chain *find_loc_in_1pdv (rtx, variable *, |
659 variable_table_type *); | |
442 static bool canon_value_cmp (rtx, rtx); | 660 static bool canon_value_cmp (rtx, rtx); |
443 static int loc_cmp (rtx, rtx); | 661 static int loc_cmp (rtx, rtx); |
444 static bool variable_part_different_p (variable_part *, variable_part *); | 662 static bool variable_part_different_p (variable_part *, variable_part *); |
445 static bool onepart_variable_different_p (variable, variable); | 663 static bool onepart_variable_different_p (variable *, variable *); |
446 static bool variable_different_p (variable, variable); | 664 static bool variable_different_p (variable *, variable *); |
447 static bool dataflow_set_different (dataflow_set *, dataflow_set *); | 665 static bool dataflow_set_different (dataflow_set *, dataflow_set *); |
448 static void dataflow_set_destroy (dataflow_set *); | 666 static void dataflow_set_destroy (dataflow_set *); |
449 | 667 |
450 static bool contains_symbol_ref (rtx); | |
451 static bool track_expr_p (tree, bool); | 668 static bool track_expr_p (tree, bool); |
452 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT); | 669 static bool same_variable_part_p (rtx, tree, HOST_WIDE_INT); |
453 static int add_uses (rtx *, void *); | |
454 static void add_uses_1 (rtx *, void *); | 670 static void add_uses_1 (rtx *, void *); |
455 static void add_stores (rtx, const_rtx, void *); | 671 static void add_stores (rtx, const_rtx, void *); |
456 static bool compute_bb_dataflow (basic_block); | 672 static bool compute_bb_dataflow (basic_block); |
457 static bool vt_find_locations (void); | 673 static bool vt_find_locations (void); |
458 | 674 |
459 static void dump_attrs_list (attrs); | 675 static void dump_attrs_list (attrs *); |
460 static int dump_var_slot (void **, void *); | 676 static void dump_var (variable *); |
461 static void dump_var (variable); | 677 static void dump_vars (variable_table_type *); |
462 static void dump_vars (htab_t); | |
463 static void dump_dataflow_set (dataflow_set *); | 678 static void dump_dataflow_set (dataflow_set *); |
464 static void dump_dataflow_sets (void); | 679 static void dump_dataflow_sets (void); |
465 | 680 |
466 static void variable_was_changed (variable, dataflow_set *); | 681 static void set_dv_changed (decl_or_value, bool); |
467 static void **set_slot_part (dataflow_set *, rtx, void **, | 682 static void variable_was_changed (variable *, dataflow_set *); |
468 decl_or_value, HOST_WIDE_INT, | 683 static variable **set_slot_part (dataflow_set *, rtx, variable **, |
469 enum var_init_status, rtx); | 684 decl_or_value, HOST_WIDE_INT, |
685 enum var_init_status, rtx); | |
470 static void set_variable_part (dataflow_set *, rtx, | 686 static void set_variable_part (dataflow_set *, rtx, |
471 decl_or_value, HOST_WIDE_INT, | 687 decl_or_value, HOST_WIDE_INT, |
472 enum var_init_status, rtx, enum insert_option); | 688 enum var_init_status, rtx, enum insert_option); |
473 static void **clobber_slot_part (dataflow_set *, rtx, | 689 static variable **clobber_slot_part (dataflow_set *, rtx, |
474 void **, HOST_WIDE_INT, rtx); | 690 variable **, HOST_WIDE_INT, rtx); |
475 static void clobber_variable_part (dataflow_set *, rtx, | 691 static void clobber_variable_part (dataflow_set *, rtx, |
476 decl_or_value, HOST_WIDE_INT, rtx); | 692 decl_or_value, HOST_WIDE_INT, rtx); |
477 static void **delete_slot_part (dataflow_set *, rtx, void **, HOST_WIDE_INT); | 693 static variable **delete_slot_part (dataflow_set *, rtx, variable **, |
694 HOST_WIDE_INT); | |
478 static void delete_variable_part (dataflow_set *, rtx, | 695 static void delete_variable_part (dataflow_set *, rtx, |
479 decl_or_value, HOST_WIDE_INT); | 696 decl_or_value, HOST_WIDE_INT); |
480 static int emit_note_insn_var_location (void **, void *); | |
481 static void emit_notes_for_changes (rtx, enum emit_note_where, shared_hash); | |
482 static int emit_notes_for_differences_1 (void **, void *); | |
483 static int emit_notes_for_differences_2 (void **, void *); | |
484 static void emit_notes_for_differences (rtx, dataflow_set *, dataflow_set *); | |
485 static void emit_notes_in_bb (basic_block, dataflow_set *); | 697 static void emit_notes_in_bb (basic_block, dataflow_set *); |
486 static void vt_emit_notes (void); | 698 static void vt_emit_notes (void); |
487 | 699 |
488 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *); | 700 static bool vt_get_decl_and_offset (rtx, tree *, HOST_WIDE_INT *); |
489 static void vt_add_function_parameters (void); | 701 static void vt_add_function_parameters (void); |
490 static bool vt_initialize (void); | 702 static bool vt_initialize (void); |
491 static void vt_finalize (void); | 703 static void vt_finalize (void); |
704 | |
705 /* Callback for stack_adjust_offset_pre_post, called via for_each_inc_dec. */ | |
706 | |
707 static int | |
708 stack_adjust_offset_pre_post_cb (rtx, rtx op, rtx dest, rtx src, rtx srcoff, | |
709 void *arg) | |
710 { | |
711 if (dest != stack_pointer_rtx) | |
712 return 0; | |
713 | |
714 switch (GET_CODE (op)) | |
715 { | |
716 case PRE_INC: | |
717 case PRE_DEC: | |
718 ((HOST_WIDE_INT *)arg)[0] -= INTVAL (srcoff); | |
719 return 0; | |
720 case POST_INC: | |
721 case POST_DEC: | |
722 ((HOST_WIDE_INT *)arg)[1] -= INTVAL (srcoff); | |
723 return 0; | |
724 case PRE_MODIFY: | |
725 case POST_MODIFY: | |
726 /* We handle only adjustments by constant amount. */ | |
727 gcc_assert (GET_CODE (src) == PLUS | |
728 && CONST_INT_P (XEXP (src, 1)) | |
729 && XEXP (src, 0) == stack_pointer_rtx); | |
730 ((HOST_WIDE_INT *)arg)[GET_CODE (op) == POST_MODIFY] | |
731 -= INTVAL (XEXP (src, 1)); | |
732 return 0; | |
733 default: | |
734 gcc_unreachable (); | |
735 } | |
736 } | |
492 | 737 |
493 /* Given a SET, calculate the amount of stack adjustment it contains | 738 /* Given a SET, calculate the amount of stack adjustment it contains |
494 PRE- and POST-modifying stack pointer. | 739 PRE- and POST-modifying stack pointer. |
495 This function is similar to stack_adjust_offset. */ | 740 This function is similar to stack_adjust_offset. */ |
496 | 741 |
513 | 758 |
514 if (code == MINUS) | 759 if (code == MINUS) |
515 *post += INTVAL (XEXP (src, 1)); | 760 *post += INTVAL (XEXP (src, 1)); |
516 else | 761 else |
517 *post -= INTVAL (XEXP (src, 1)); | 762 *post -= INTVAL (XEXP (src, 1)); |
518 } | 763 return; |
519 else if (MEM_P (dest)) | 764 } |
520 { | 765 HOST_WIDE_INT res[2] = { 0, 0 }; |
521 /* (set (mem (pre_dec (reg sp))) (foo)) */ | 766 for_each_inc_dec (pattern, stack_adjust_offset_pre_post_cb, res); |
522 src = XEXP (dest, 0); | 767 *pre += res[0]; |
523 code = GET_CODE (src); | 768 *post += res[1]; |
524 | |
525 switch (code) | |
526 { | |
527 case PRE_MODIFY: | |
528 case POST_MODIFY: | |
529 if (XEXP (src, 0) == stack_pointer_rtx) | |
530 { | |
531 rtx val = XEXP (XEXP (src, 1), 1); | |
532 /* We handle only adjustments by constant amount. */ | |
533 gcc_assert (GET_CODE (XEXP (src, 1)) == PLUS && | |
534 CONST_INT_P (val)); | |
535 | |
536 if (code == PRE_MODIFY) | |
537 *pre -= INTVAL (val); | |
538 else | |
539 *post -= INTVAL (val); | |
540 break; | |
541 } | |
542 return; | |
543 | |
544 case PRE_DEC: | |
545 if (XEXP (src, 0) == stack_pointer_rtx) | |
546 { | |
547 *pre += GET_MODE_SIZE (GET_MODE (dest)); | |
548 break; | |
549 } | |
550 return; | |
551 | |
552 case POST_DEC: | |
553 if (XEXP (src, 0) == stack_pointer_rtx) | |
554 { | |
555 *post += GET_MODE_SIZE (GET_MODE (dest)); | |
556 break; | |
557 } | |
558 return; | |
559 | |
560 case PRE_INC: | |
561 if (XEXP (src, 0) == stack_pointer_rtx) | |
562 { | |
563 *pre -= GET_MODE_SIZE (GET_MODE (dest)); | |
564 break; | |
565 } | |
566 return; | |
567 | |
568 case POST_INC: | |
569 if (XEXP (src, 0) == stack_pointer_rtx) | |
570 { | |
571 *post -= GET_MODE_SIZE (GET_MODE (dest)); | |
572 break; | |
573 } | |
574 return; | |
575 | |
576 default: | |
577 return; | |
578 } | |
579 } | |
580 } | 769 } |
581 | 770 |
582 /* Given an INSN, calculate the amount of stack adjustment it contains | 771 /* Given an INSN, calculate the amount of stack adjustment it contains |
583 PRE- and POST-modifying stack pointer. */ | 772 PRE- and POST-modifying stack pointer. */ |
584 | 773 |
585 static void | 774 static void |
586 insn_stack_adjust_offset_pre_post (rtx insn, HOST_WIDE_INT *pre, | 775 insn_stack_adjust_offset_pre_post (rtx_insn *insn, HOST_WIDE_INT *pre, |
587 HOST_WIDE_INT *post) | 776 HOST_WIDE_INT *post) |
588 { | 777 { |
589 rtx pattern; | 778 rtx pattern; |
590 | 779 |
591 *pre = 0; | 780 *pre = 0; |
623 { | 812 { |
624 edge_iterator *stack; | 813 edge_iterator *stack; |
625 int sp; | 814 int sp; |
626 | 815 |
627 /* Initialize entry block. */ | 816 /* Initialize entry block. */ |
628 VTI (ENTRY_BLOCK_PTR)->visited = true; | 817 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->visited = true; |
629 VTI (ENTRY_BLOCK_PTR)->in.stack_adjust = INCOMING_FRAME_SP_OFFSET; | 818 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->in.stack_adjust |
630 VTI (ENTRY_BLOCK_PTR)->out.stack_adjust = INCOMING_FRAME_SP_OFFSET; | 819 = INCOMING_FRAME_SP_OFFSET; |
820 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out.stack_adjust | |
821 = INCOMING_FRAME_SP_OFFSET; | |
631 | 822 |
632 /* Allocate stack for back-tracking up CFG. */ | 823 /* Allocate stack for back-tracking up CFG. */ |
633 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1); | 824 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun) + 1); |
634 sp = 0; | 825 sp = 0; |
635 | 826 |
636 /* Push the first edge on to the stack. */ | 827 /* Push the first edge on to the stack. */ |
637 stack[sp++] = ei_start (ENTRY_BLOCK_PTR->succs); | 828 stack[sp++] = ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs); |
638 | 829 |
639 while (sp) | 830 while (sp) |
640 { | 831 { |
641 edge_iterator ei; | 832 edge_iterator ei; |
642 basic_block src; | 833 basic_block src; |
648 dest = ei_edge (ei)->dest; | 839 dest = ei_edge (ei)->dest; |
649 | 840 |
650 /* Check if the edge destination has been visited yet. */ | 841 /* Check if the edge destination has been visited yet. */ |
651 if (!VTI (dest)->visited) | 842 if (!VTI (dest)->visited) |
652 { | 843 { |
653 rtx insn; | 844 rtx_insn *insn; |
654 HOST_WIDE_INT pre, post, offset; | 845 HOST_WIDE_INT pre, post, offset; |
655 VTI (dest)->visited = true; | 846 VTI (dest)->visited = true; |
656 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust; | 847 VTI (dest)->in.stack_adjust = offset = VTI (src)->out.stack_adjust; |
657 | 848 |
658 if (dest != EXIT_BLOCK_PTR) | 849 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)) |
659 for (insn = BB_HEAD (dest); | 850 for (insn = BB_HEAD (dest); |
660 insn != NEXT_INSN (BB_END (dest)); | 851 insn != NEXT_INSN (BB_END (dest)); |
661 insn = NEXT_INSN (insn)) | 852 insn = NEXT_INSN (insn)) |
662 if (INSN_P (insn)) | 853 if (INSN_P (insn)) |
663 { | 854 { |
672 time, check its successors. */ | 863 time, check its successors. */ |
673 stack[sp++] = ei_start (dest->succs); | 864 stack[sp++] = ei_start (dest->succs); |
674 } | 865 } |
675 else | 866 else |
676 { | 867 { |
677 /* Check whether the adjustments on the edges are the same. */ | 868 /* We can end up with different stack adjustments for the exit block |
678 if (VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust) | 869 of a shrink-wrapped function if stack_adjust_offset_pre_post |
870 doesn't understand the rtx pattern used to restore the stack | |
871 pointer in the epilogue. For example, on s390(x), the stack | |
872 pointer is often restored via a load-multiple instruction | |
873 and so no stack_adjust offset is recorded for it. This means | |
874 that the stack offset at the end of the epilogue block is the | |
875 same as the offset before the epilogue, whereas other paths | |
876 to the exit block will have the correct stack_adjust. | |
877 | |
878 It is safe to ignore these differences because (a) we never | |
879 use the stack_adjust for the exit block in this pass and | |
880 (b) dwarf2cfi checks whether the CFA notes in a shrink-wrapped | |
881 function are correct. | |
882 | |
883 We must check whether the adjustments on other edges are | |
884 the same though. */ | |
885 if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun) | |
886 && VTI (dest)->in.stack_adjust != VTI (src)->out.stack_adjust) | |
679 { | 887 { |
680 free (stack); | 888 free (stack); |
681 return false; | 889 return false; |
682 } | 890 } |
683 | 891 |
697 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or | 905 /* arg_pointer_rtx resp. frame_pointer_rtx if stack_pointer_rtx or |
698 hard_frame_pointer_rtx is being mapped to it and offset for it. */ | 906 hard_frame_pointer_rtx is being mapped to it and offset for it. */ |
699 static rtx cfa_base_rtx; | 907 static rtx cfa_base_rtx; |
700 static HOST_WIDE_INT cfa_base_offset; | 908 static HOST_WIDE_INT cfa_base_offset; |
701 | 909 |
702 /* Compute a CFA-based value for the stack pointer. */ | 910 /* Compute a CFA-based value for an ADJUSTMENT made to stack_pointer_rtx |
911 or hard_frame_pointer_rtx. */ | |
703 | 912 |
704 static inline rtx | 913 static inline rtx |
705 compute_cfa_pointer (HOST_WIDE_INT adjustment) | 914 compute_cfa_pointer (HOST_WIDE_INT adjustment) |
706 { | 915 { |
707 return plus_constant (cfa_base_rtx, adjustment + cfa_base_offset); | 916 return plus_constant (Pmode, cfa_base_rtx, adjustment + cfa_base_offset); |
708 } | 917 } |
709 | 918 |
710 /* Adjustment for hard_frame_pointer_rtx to cfa base reg, | 919 /* Adjustment for hard_frame_pointer_rtx to cfa base reg, |
711 or -1 if the replacement shouldn't be done. */ | 920 or -1 if the replacement shouldn't be done. */ |
712 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1; | 921 static HOST_WIDE_INT hard_frame_pointer_adjustment = -1; |
714 /* Data for adjust_mems callback. */ | 923 /* Data for adjust_mems callback. */ |
715 | 924 |
716 struct adjust_mem_data | 925 struct adjust_mem_data |
717 { | 926 { |
718 bool store; | 927 bool store; |
719 enum machine_mode mem_mode; | 928 machine_mode mem_mode; |
720 HOST_WIDE_INT stack_adjust; | 929 HOST_WIDE_INT stack_adjust; |
721 rtx side_effects; | 930 auto_vec<rtx> side_effects; |
722 }; | 931 }; |
723 | 932 |
724 /* Helper for adjust_mems. Return 1 if *loc is unsuitable for | 933 /* Helper for adjust_mems. Return true if X is suitable for |
725 transformation of wider mode arithmetics to narrower mode, | 934 transformation of wider mode arithmetics to narrower mode. */ |
726 -1 if it is suitable and subexpressions shouldn't be | 935 |
727 traversed and 0 if it is suitable and subexpressions should | 936 static bool |
728 be traversed. Called through for_each_rtx. */ | 937 use_narrower_mode_test (rtx x, const_rtx subreg) |
729 | 938 { |
730 static int | 939 subrtx_var_iterator::array_type array; |
731 use_narrower_mode_test (rtx *loc, void *data) | 940 FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST) |
732 { | 941 { |
733 rtx subreg = (rtx) data; | 942 rtx x = *iter; |
734 | 943 if (CONSTANT_P (x)) |
735 if (CONSTANT_P (*loc)) | 944 iter.skip_subrtxes (); |
736 return -1; | |
737 switch (GET_CODE (*loc)) | |
738 { | |
739 case REG: | |
740 if (cselib_lookup (*loc, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode)) | |
741 return 1; | |
742 return -1; | |
743 case PLUS: | |
744 case MINUS: | |
745 case MULT: | |
746 return 0; | |
747 case ASHIFT: | |
748 if (for_each_rtx (&XEXP (*loc, 0), use_narrower_mode_test, data)) | |
749 return 1; | |
750 else | 945 else |
751 return -1; | 946 switch (GET_CODE (x)) |
752 default: | 947 { |
753 return 1; | 948 case REG: |
754 } | 949 if (cselib_lookup (x, GET_MODE (SUBREG_REG (subreg)), 0, VOIDmode)) |
950 return false; | |
951 if (!validate_subreg (GET_MODE (subreg), GET_MODE (x), x, | |
952 subreg_lowpart_offset (GET_MODE (subreg), | |
953 GET_MODE (x)))) | |
954 return false; | |
955 break; | |
956 case PLUS: | |
957 case MINUS: | |
958 case MULT: | |
959 break; | |
960 case ASHIFT: | |
961 iter.substitute (XEXP (x, 0)); | |
962 break; | |
963 default: | |
964 return false; | |
965 } | |
966 } | |
967 return true; | |
755 } | 968 } |
756 | 969 |
757 /* Transform X into narrower mode MODE from wider mode WMODE. */ | 970 /* Transform X into narrower mode MODE from wider mode WMODE. */ |
758 | 971 |
759 static rtx | 972 static rtx |
760 use_narrower_mode (rtx x, enum machine_mode mode, enum machine_mode wmode) | 973 use_narrower_mode (rtx x, scalar_int_mode mode, scalar_int_mode wmode) |
761 { | 974 { |
762 rtx op0, op1; | 975 rtx op0, op1; |
763 if (CONSTANT_P (x)) | 976 if (CONSTANT_P (x)) |
764 return lowpart_subreg (mode, x, wmode); | 977 return lowpart_subreg (mode, x, wmode); |
765 switch (GET_CODE (x)) | 978 switch (GET_CODE (x)) |
772 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode); | 985 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode); |
773 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode); | 986 op1 = use_narrower_mode (XEXP (x, 1), mode, wmode); |
774 return simplify_gen_binary (GET_CODE (x), mode, op0, op1); | 987 return simplify_gen_binary (GET_CODE (x), mode, op0, op1); |
775 case ASHIFT: | 988 case ASHIFT: |
776 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode); | 989 op0 = use_narrower_mode (XEXP (x, 0), mode, wmode); |
777 return simplify_gen_binary (ASHIFT, mode, op0, XEXP (x, 1)); | 990 op1 = XEXP (x, 1); |
991 /* Ensure shift amount is not wider than mode. */ | |
992 if (GET_MODE (op1) == VOIDmode) | |
993 op1 = lowpart_subreg (mode, op1, wmode); | |
994 else if (GET_MODE_PRECISION (mode) | |
995 < GET_MODE_PRECISION (as_a <scalar_int_mode> (GET_MODE (op1)))) | |
996 op1 = lowpart_subreg (mode, op1, GET_MODE (op1)); | |
997 return simplify_gen_binary (ASHIFT, mode, op0, op1); | |
778 default: | 998 default: |
779 gcc_unreachable (); | 999 gcc_unreachable (); |
780 } | 1000 } |
781 } | 1001 } |
782 | 1002 |
785 static rtx | 1005 static rtx |
786 adjust_mems (rtx loc, const_rtx old_rtx, void *data) | 1006 adjust_mems (rtx loc, const_rtx old_rtx, void *data) |
787 { | 1007 { |
788 struct adjust_mem_data *amd = (struct adjust_mem_data *) data; | 1008 struct adjust_mem_data *amd = (struct adjust_mem_data *) data; |
789 rtx mem, addr = loc, tem; | 1009 rtx mem, addr = loc, tem; |
790 enum machine_mode mem_mode_save; | 1010 machine_mode mem_mode_save; |
791 bool store_save; | 1011 bool store_save; |
1012 scalar_int_mode tem_mode, tem_subreg_mode; | |
792 switch (GET_CODE (loc)) | 1013 switch (GET_CODE (loc)) |
793 { | 1014 { |
794 case REG: | 1015 case REG: |
795 /* Don't do any sp or fp replacements outside of MEM addresses | 1016 /* Don't do any sp or fp replacements outside of MEM addresses |
796 on the LHS. */ | 1017 on the LHS. */ |
832 mem = avoid_constant_pool_reference (mem); | 1053 mem = avoid_constant_pool_reference (mem); |
833 return mem; | 1054 return mem; |
834 case PRE_INC: | 1055 case PRE_INC: |
835 case PRE_DEC: | 1056 case PRE_DEC: |
836 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0), | 1057 addr = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0), |
837 GEN_INT (GET_CODE (loc) == PRE_INC | 1058 gen_int_mode (GET_CODE (loc) == PRE_INC |
838 ? GET_MODE_SIZE (amd->mem_mode) | 1059 ? GET_MODE_SIZE (amd->mem_mode) |
839 : -GET_MODE_SIZE (amd->mem_mode))); | 1060 : -GET_MODE_SIZE (amd->mem_mode), |
1061 GET_MODE (loc))); | |
1062 /* FALLTHRU */ | |
840 case POST_INC: | 1063 case POST_INC: |
841 case POST_DEC: | 1064 case POST_DEC: |
842 if (addr == loc) | 1065 if (addr == loc) |
843 addr = XEXP (loc, 0); | 1066 addr = XEXP (loc, 0); |
844 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode); | 1067 gcc_assert (amd->mem_mode != VOIDmode && amd->mem_mode != BLKmode); |
845 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data); | 1068 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data); |
846 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0), | 1069 tem = gen_rtx_PLUS (GET_MODE (loc), XEXP (loc, 0), |
847 GEN_INT ((GET_CODE (loc) == PRE_INC | 1070 gen_int_mode ((GET_CODE (loc) == PRE_INC |
848 || GET_CODE (loc) == POST_INC) | 1071 || GET_CODE (loc) == POST_INC) |
849 ? GET_MODE_SIZE (amd->mem_mode) | 1072 ? GET_MODE_SIZE (amd->mem_mode) |
850 : -GET_MODE_SIZE (amd->mem_mode))); | 1073 : -GET_MODE_SIZE (amd->mem_mode), |
851 amd->side_effects = alloc_EXPR_LIST (0, | 1074 GET_MODE (loc))); |
852 gen_rtx_SET (VOIDmode, | 1075 store_save = amd->store; |
853 XEXP (loc, 0), | 1076 amd->store = false; |
854 tem), | 1077 tem = simplify_replace_fn_rtx (tem, old_rtx, adjust_mems, data); |
855 amd->side_effects); | 1078 amd->store = store_save; |
1079 amd->side_effects.safe_push (gen_rtx_SET (XEXP (loc, 0), tem)); | |
856 return addr; | 1080 return addr; |
857 case PRE_MODIFY: | 1081 case PRE_MODIFY: |
858 addr = XEXP (loc, 1); | 1082 addr = XEXP (loc, 1); |
1083 /* FALLTHRU */ | |
859 case POST_MODIFY: | 1084 case POST_MODIFY: |
860 if (addr == loc) | 1085 if (addr == loc) |
861 addr = XEXP (loc, 0); | 1086 addr = XEXP (loc, 0); |
862 gcc_assert (amd->mem_mode != VOIDmode); | 1087 gcc_assert (amd->mem_mode != VOIDmode); |
863 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data); | 1088 addr = simplify_replace_fn_rtx (addr, old_rtx, adjust_mems, data); |
864 amd->side_effects = alloc_EXPR_LIST (0, | 1089 store_save = amd->store; |
865 gen_rtx_SET (VOIDmode, | 1090 amd->store = false; |
866 XEXP (loc, 0), | 1091 tem = simplify_replace_fn_rtx (XEXP (loc, 1), old_rtx, |
867 XEXP (loc, 1)), | 1092 adjust_mems, data); |
868 amd->side_effects); | 1093 amd->store = store_save; |
1094 amd->side_effects.safe_push (gen_rtx_SET (XEXP (loc, 0), tem)); | |
869 return addr; | 1095 return addr; |
870 case SUBREG: | 1096 case SUBREG: |
871 /* First try without delegitimization of whole MEMs and | 1097 /* First try without delegitimization of whole MEMs and |
872 avoid_constant_pool_reference, which is more likely to succeed. */ | 1098 avoid_constant_pool_reference, which is more likely to succeed. */ |
873 store_save = amd->store; | 1099 store_save = amd->store; |
896 && GET_CODE (tem) == SUBREG | 1122 && GET_CODE (tem) == SUBREG |
897 && (GET_CODE (SUBREG_REG (tem)) == PLUS | 1123 && (GET_CODE (SUBREG_REG (tem)) == PLUS |
898 || GET_CODE (SUBREG_REG (tem)) == MINUS | 1124 || GET_CODE (SUBREG_REG (tem)) == MINUS |
899 || GET_CODE (SUBREG_REG (tem)) == MULT | 1125 || GET_CODE (SUBREG_REG (tem)) == MULT |
900 || GET_CODE (SUBREG_REG (tem)) == ASHIFT) | 1126 || GET_CODE (SUBREG_REG (tem)) == ASHIFT) |
901 && GET_MODE_CLASS (GET_MODE (tem)) == MODE_INT | 1127 && is_a <scalar_int_mode> (GET_MODE (tem), &tem_mode) |
902 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (tem))) == MODE_INT | 1128 && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (tem)), |
903 && GET_MODE_SIZE (GET_MODE (tem)) | 1129 &tem_subreg_mode) |
904 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (tem))) | 1130 && (GET_MODE_PRECISION (tem_mode) |
1131 < GET_MODE_PRECISION (tem_subreg_mode)) | |
905 && subreg_lowpart_p (tem) | 1132 && subreg_lowpart_p (tem) |
906 && !for_each_rtx (&SUBREG_REG (tem), use_narrower_mode_test, tem)) | 1133 && use_narrower_mode_test (SUBREG_REG (tem), tem)) |
907 return use_narrower_mode (SUBREG_REG (tem), GET_MODE (tem), | 1134 return use_narrower_mode (SUBREG_REG (tem), tem_mode, tem_subreg_mode); |
908 GET_MODE (SUBREG_REG (tem))); | |
909 return tem; | 1135 return tem; |
910 case ASM_OPERANDS: | 1136 case ASM_OPERANDS: |
911 /* Don't do any replacements in second and following | 1137 /* Don't do any replacements in second and following |
912 ASM_OPERANDS of inline-asm with multiple sets. | 1138 ASM_OPERANDS of inline-asm with multiple sets. |
913 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC | 1139 ASM_OPERANDS_INPUT_VEC, ASM_OPERANDS_INPUT_CONSTRAINT_VEC |
953 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes, | 1179 /* Simplify INSN. Remove all {PRE,POST}_{INC,DEC,MODIFY} rtxes, |
954 replace them with their value in the insn and add the side-effects | 1180 replace them with their value in the insn and add the side-effects |
955 as other sets to the insn. */ | 1181 as other sets to the insn. */ |
956 | 1182 |
957 static void | 1183 static void |
958 adjust_insn (basic_block bb, rtx insn) | 1184 adjust_insn (basic_block bb, rtx_insn *insn) |
959 { | 1185 { |
960 struct adjust_mem_data amd; | |
961 rtx set; | 1186 rtx set; |
1187 | |
1188 #ifdef HAVE_window_save | |
1189 /* If the target machine has an explicit window save instruction, the | |
1190 transformation OUTGOING_REGNO -> INCOMING_REGNO is done there. */ | |
1191 if (RTX_FRAME_RELATED_P (insn) | |
1192 && find_reg_note (insn, REG_CFA_WINDOW_SAVE, NULL_RTX)) | |
1193 { | |
1194 unsigned int i, nregs = vec_safe_length (windowed_parm_regs); | |
1195 rtx rtl = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (nregs * 2)); | |
1196 parm_reg *p; | |
1197 | |
1198 FOR_EACH_VEC_SAFE_ELT (windowed_parm_regs, i, p) | |
1199 { | |
1200 XVECEXP (rtl, 0, i * 2) | |
1201 = gen_rtx_SET (p->incoming, p->outgoing); | |
1202 /* Do not clobber the attached DECL, but only the REG. */ | |
1203 XVECEXP (rtl, 0, i * 2 + 1) | |
1204 = gen_rtx_CLOBBER (GET_MODE (p->outgoing), | |
1205 gen_raw_REG (GET_MODE (p->outgoing), | |
1206 REGNO (p->outgoing))); | |
1207 } | |
1208 | |
1209 validate_change (NULL_RTX, &PATTERN (insn), rtl, true); | |
1210 return; | |
1211 } | |
1212 #endif | |
1213 | |
1214 adjust_mem_data amd; | |
962 amd.mem_mode = VOIDmode; | 1215 amd.mem_mode = VOIDmode; |
963 amd.stack_adjust = -VTI (bb)->out.stack_adjust; | 1216 amd.stack_adjust = -VTI (bb)->out.stack_adjust; |
964 amd.side_effects = NULL_RTX; | |
965 | 1217 |
966 amd.store = true; | 1218 amd.store = true; |
967 note_stores (PATTERN (insn), adjust_mem_stores, &amd); | 1219 note_stores (PATTERN (insn), adjust_mem_stores, &amd); |
968 | 1220 |
969 amd.store = false; | 1221 amd.store = false; |
1025 | 1277 |
1026 if (note && CONSTANT_P (XEXP (note, 0))) | 1278 if (note && CONSTANT_P (XEXP (note, 0))) |
1027 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true); | 1279 validate_change (NULL_RTX, &SET_SRC (set), XEXP (note, 0), true); |
1028 } | 1280 } |
1029 | 1281 |
1030 if (amd.side_effects) | 1282 if (!amd.side_effects.is_empty ()) |
1031 { | 1283 { |
1032 rtx *pat, new_pat, s; | 1284 rtx *pat, new_pat; |
1033 int i, oldn, newn; | 1285 int i, oldn; |
1034 | 1286 |
1035 pat = &PATTERN (insn); | 1287 pat = &PATTERN (insn); |
1036 if (GET_CODE (*pat) == COND_EXEC) | 1288 if (GET_CODE (*pat) == COND_EXEC) |
1037 pat = &COND_EXEC_CODE (*pat); | 1289 pat = &COND_EXEC_CODE (*pat); |
1038 if (GET_CODE (*pat) == PARALLEL) | 1290 if (GET_CODE (*pat) == PARALLEL) |
1039 oldn = XVECLEN (*pat, 0); | 1291 oldn = XVECLEN (*pat, 0); |
1040 else | 1292 else |
1041 oldn = 1; | 1293 oldn = 1; |
1042 for (s = amd.side_effects, newn = 0; s; newn++) | 1294 unsigned int newn = amd.side_effects.length (); |
1043 s = XEXP (s, 1); | |
1044 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn)); | 1295 new_pat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (oldn + newn)); |
1045 if (GET_CODE (*pat) == PARALLEL) | 1296 if (GET_CODE (*pat) == PARALLEL) |
1046 for (i = 0; i < oldn; i++) | 1297 for (i = 0; i < oldn; i++) |
1047 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i); | 1298 XVECEXP (new_pat, 0, i) = XVECEXP (*pat, 0, i); |
1048 else | 1299 else |
1049 XVECEXP (new_pat, 0, 0) = *pat; | 1300 XVECEXP (new_pat, 0, 0) = *pat; |
1050 for (s = amd.side_effects, i = oldn; i < oldn + newn; i++, s = XEXP (s, 1)) | 1301 |
1051 XVECEXP (new_pat, 0, i) = XEXP (s, 0); | 1302 rtx effect; |
1052 free_EXPR_LIST_list (&amd.side_effects); | 1303 unsigned int j; |
1304 FOR_EACH_VEC_ELT_REVERSE (amd.side_effects, j, effect) | |
1305 XVECEXP (new_pat, 0, j + oldn) = effect; | |
1053 validate_change (NULL_RTX, pat, new_pat, true); | 1306 validate_change (NULL_RTX, pat, new_pat, true); |
1054 } | 1307 } |
1055 } | 1308 } |
1056 | 1309 |
1057 /* Return true if a decl_or_value DV is a DECL or NULL. */ | 1310 /* Return the DEBUG_EXPR of a DEBUG_EXPR_DECL or the VALUE in DV. */ |
1058 static inline bool | |
1059 dv_is_decl_p (decl_or_value dv) | |
1060 { | |
1061 return !dv || (int) TREE_CODE ((tree) dv) != (int) VALUE; | |
1062 } | |
1063 | |
1064 /* Return true if a decl_or_value is a VALUE rtl. */ | |
1065 static inline bool | |
1066 dv_is_value_p (decl_or_value dv) | |
1067 { | |
1068 return dv && !dv_is_decl_p (dv); | |
1069 } | |
1070 | |
1071 /* Return the decl in the decl_or_value. */ | |
1072 static inline tree | |
1073 dv_as_decl (decl_or_value dv) | |
1074 { | |
1075 gcc_checking_assert (dv_is_decl_p (dv)); | |
1076 return (tree) dv; | |
1077 } | |
1078 | |
1079 /* Return the value in the decl_or_value. */ | |
1080 static inline rtx | 1311 static inline rtx |
1081 dv_as_value (decl_or_value dv) | 1312 dv_as_rtx (decl_or_value dv) |
1082 { | 1313 { |
1083 gcc_checking_assert (dv_is_value_p (dv)); | 1314 tree decl; |
1084 return (rtx)dv; | 1315 |
1085 } | 1316 if (dv_is_value_p (dv)) |
1086 | 1317 return dv_as_value (dv); |
1087 /* Return the opaque pointer in the decl_or_value. */ | 1318 |
1088 static inline void * | 1319 decl = dv_as_decl (dv); |
1089 dv_as_opaque (decl_or_value dv) | 1320 |
1090 { | 1321 gcc_checking_assert (TREE_CODE (decl) == DEBUG_EXPR_DECL); |
1091 return dv; | 1322 return DECL_RTL_KNOWN_SET (decl); |
1092 } | 1323 } |
1093 | 1324 |
1094 /* Return true if a decl_or_value must not have more than one variable | 1325 /* Return nonzero if a decl_or_value must not have more than one |
1095 part. */ | 1326 variable part. The returned value discriminates among various |
1096 static inline bool | 1327 kinds of one-part DVs ccording to enum onepart_enum. */ |
1328 static inline onepart_enum | |
1097 dv_onepart_p (decl_or_value dv) | 1329 dv_onepart_p (decl_or_value dv) |
1098 { | 1330 { |
1099 tree decl; | 1331 tree decl; |
1100 | 1332 |
1101 if (!MAY_HAVE_DEBUG_INSNS) | 1333 if (!MAY_HAVE_DEBUG_INSNS) |
1102 return false; | 1334 return NOT_ONEPART; |
1103 | 1335 |
1104 if (dv_is_value_p (dv)) | 1336 if (dv_is_value_p (dv)) |
1105 return true; | 1337 return ONEPART_VALUE; |
1106 | 1338 |
1107 decl = dv_as_decl (dv); | 1339 decl = dv_as_decl (dv); |
1108 | 1340 |
1109 if (!decl) | |
1110 return true; | |
1111 | |
1112 if (TREE_CODE (decl) == DEBUG_EXPR_DECL) | 1341 if (TREE_CODE (decl) == DEBUG_EXPR_DECL) |
1113 return true; | 1342 return ONEPART_DEXPR; |
1114 | 1343 |
1115 return (target_for_debug_bind (decl) != NULL_TREE); | 1344 if (target_for_debug_bind (decl) != NULL_TREE) |
1116 } | 1345 return ONEPART_VDECL; |
1117 | 1346 |
1118 /* Return the variable pool to be used for dv, depending on whether it | 1347 return NOT_ONEPART; |
1119 can have multiple parts or not. */ | 1348 } |
1120 static inline alloc_pool | 1349 |
1121 dv_pool (decl_or_value dv) | 1350 /* Return the variable pool to be used for a dv of type ONEPART. */ |
1122 { | 1351 static inline pool_allocator & |
1123 return dv_onepart_p (dv) ? valvar_pool : var_pool; | 1352 onepart_pool (onepart_enum onepart) |
1353 { | |
1354 return onepart ? valvar_pool : var_pool; | |
1355 } | |
1356 | |
1357 /* Allocate a variable_def from the corresponding variable pool. */ | |
1358 static inline variable * | |
1359 onepart_pool_allocate (onepart_enum onepart) | |
1360 { | |
1361 return (variable*) onepart_pool (onepart).allocate (); | |
1124 } | 1362 } |
1125 | 1363 |
1126 /* Build a decl_or_value out of a decl. */ | 1364 /* Build a decl_or_value out of a decl. */ |
1127 static inline decl_or_value | 1365 static inline decl_or_value |
1128 dv_from_decl (tree decl) | 1366 dv_from_decl (tree decl) |
1141 dv = value; | 1379 dv = value; |
1142 gcc_checking_assert (dv_is_value_p (dv)); | 1380 gcc_checking_assert (dv_is_value_p (dv)); |
1143 return dv; | 1381 return dv; |
1144 } | 1382 } |
1145 | 1383 |
1384 /* Return a value or the decl of a debug_expr as a decl_or_value. */ | |
1385 static inline decl_or_value | |
1386 dv_from_rtx (rtx x) | |
1387 { | |
1388 decl_or_value dv; | |
1389 | |
1390 switch (GET_CODE (x)) | |
1391 { | |
1392 case DEBUG_EXPR: | |
1393 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x)); | |
1394 gcc_checking_assert (DECL_RTL_KNOWN_SET (DEBUG_EXPR_TREE_DECL (x)) == x); | |
1395 break; | |
1396 | |
1397 case VALUE: | |
1398 dv = dv_from_value (x); | |
1399 break; | |
1400 | |
1401 default: | |
1402 gcc_unreachable (); | |
1403 } | |
1404 | |
1405 return dv; | |
1406 } | |
1407 | |
1146 extern void debug_dv (decl_or_value dv); | 1408 extern void debug_dv (decl_or_value dv); |
1147 | 1409 |
1148 DEBUG_FUNCTION void | 1410 DEBUG_FUNCTION void |
1149 debug_dv (decl_or_value dv) | 1411 debug_dv (decl_or_value dv) |
1150 { | 1412 { |
1152 debug_rtx (dv_as_value (dv)); | 1414 debug_rtx (dv_as_value (dv)); |
1153 else | 1415 else |
1154 debug_generic_stmt (dv_as_decl (dv)); | 1416 debug_generic_stmt (dv_as_decl (dv)); |
1155 } | 1417 } |
1156 | 1418 |
1157 typedef unsigned int dvuid; | 1419 static void loc_exp_dep_clear (variable *var); |
1158 | |
1159 /* Return the uid of DV. */ | |
1160 | |
1161 static inline dvuid | |
1162 dv_uid (decl_or_value dv) | |
1163 { | |
1164 if (dv_is_value_p (dv)) | |
1165 return CSELIB_VAL_PTR (dv_as_value (dv))->uid; | |
1166 else | |
1167 return DECL_UID (dv_as_decl (dv)); | |
1168 } | |
1169 | |
1170 /* Compute the hash from the uid. */ | |
1171 | |
1172 static inline hashval_t | |
1173 dv_uid2hash (dvuid uid) | |
1174 { | |
1175 return uid; | |
1176 } | |
1177 | |
1178 /* The hash function for a mask table in a shared_htab chain. */ | |
1179 | |
1180 static inline hashval_t | |
1181 dv_htab_hash (decl_or_value dv) | |
1182 { | |
1183 return dv_uid2hash (dv_uid (dv)); | |
1184 } | |
1185 | |
1186 /* The hash function for variable_htab, computes the hash value | |
1187 from the declaration of variable X. */ | |
1188 | |
1189 static hashval_t | |
1190 variable_htab_hash (const void *x) | |
1191 { | |
1192 const_variable const v = (const_variable) x; | |
1193 | |
1194 return dv_htab_hash (v->dv); | |
1195 } | |
1196 | |
1197 /* Compare the declaration of variable X with declaration Y. */ | |
1198 | |
1199 static int | |
1200 variable_htab_eq (const void *x, const void *y) | |
1201 { | |
1202 const_variable const v = (const_variable) x; | |
1203 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y); | |
1204 | |
1205 return (dv_as_opaque (v->dv) == dv_as_opaque (dv)); | |
1206 } | |
1207 | 1420 |
1208 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */ | 1421 /* Free the element of VARIABLE_HTAB (its type is struct variable_def). */ |
1209 | 1422 |
1210 static void | 1423 static void |
1211 variable_htab_free (void *elem) | 1424 variable_htab_free (void *elem) |
1212 { | 1425 { |
1213 int i; | 1426 int i; |
1214 variable var = (variable) elem; | 1427 variable *var = (variable *) elem; |
1215 location_chain node, next; | 1428 location_chain *node, *next; |
1216 | 1429 |
1217 gcc_checking_assert (var->refcount > 0); | 1430 gcc_checking_assert (var->refcount > 0); |
1218 | 1431 |
1219 var->refcount--; | 1432 var->refcount--; |
1220 if (var->refcount > 0) | 1433 if (var->refcount > 0) |
1223 for (i = 0; i < var->n_var_parts; i++) | 1436 for (i = 0; i < var->n_var_parts; i++) |
1224 { | 1437 { |
1225 for (node = var->var_part[i].loc_chain; node; node = next) | 1438 for (node = var->var_part[i].loc_chain; node; node = next) |
1226 { | 1439 { |
1227 next = node->next; | 1440 next = node->next; |
1228 pool_free (loc_chain_pool, node); | 1441 delete node; |
1229 } | 1442 } |
1230 var->var_part[i].loc_chain = NULL; | 1443 var->var_part[i].loc_chain = NULL; |
1231 } | 1444 } |
1232 pool_free (dv_pool (var->dv), var); | 1445 if (var->onepart && VAR_LOC_1PAUX (var)) |
1233 } | 1446 { |
1234 | 1447 loc_exp_dep_clear (var); |
1235 /* The hash function for value_chains htab, computes the hash value | 1448 if (VAR_LOC_DEP_LST (var)) |
1236 from the VALUE. */ | 1449 VAR_LOC_DEP_LST (var)->pprev = NULL; |
1237 | 1450 XDELETE (VAR_LOC_1PAUX (var)); |
1238 static hashval_t | 1451 /* These may be reused across functions, so reset |
1239 value_chain_htab_hash (const void *x) | 1452 e.g. NO_LOC_P. */ |
1240 { | 1453 if (var->onepart == ONEPART_DEXPR) |
1241 const_value_chain const v = (const_value_chain) x; | 1454 set_dv_changed (var->dv, true); |
1242 | 1455 } |
1243 return dv_htab_hash (v->dv); | 1456 onepart_pool (var->onepart).remove (var); |
1244 } | |
1245 | |
1246 /* Compare the VALUE X with VALUE Y. */ | |
1247 | |
1248 static int | |
1249 value_chain_htab_eq (const void *x, const void *y) | |
1250 { | |
1251 const_value_chain const v = (const_value_chain) x; | |
1252 decl_or_value dv = CONST_CAST2 (decl_or_value, const void *, y); | |
1253 | |
1254 return dv_as_opaque (v->dv) == dv_as_opaque (dv); | |
1255 } | 1457 } |
1256 | 1458 |
1257 /* Initialize the set (array) SET of attrs to empty lists. */ | 1459 /* Initialize the set (array) SET of attrs to empty lists. */ |
1258 | 1460 |
1259 static void | 1461 static void |
1260 init_attrs_list_set (attrs *set) | 1462 init_attrs_list_set (attrs **set) |
1261 { | 1463 { |
1262 int i; | 1464 int i; |
1263 | 1465 |
1264 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | 1466 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
1265 set[i] = NULL; | 1467 set[i] = NULL; |
1266 } | 1468 } |
1267 | 1469 |
1268 /* Make the list *LISTP empty. */ | 1470 /* Make the list *LISTP empty. */ |
1269 | 1471 |
1270 static void | 1472 static void |
1271 attrs_list_clear (attrs *listp) | 1473 attrs_list_clear (attrs **listp) |
1272 { | 1474 { |
1273 attrs list, next; | 1475 attrs *list, *next; |
1274 | 1476 |
1275 for (list = *listp; list; list = next) | 1477 for (list = *listp; list; list = next) |
1276 { | 1478 { |
1277 next = list->next; | 1479 next = list->next; |
1278 pool_free (attrs_pool, list); | 1480 delete list; |
1279 } | 1481 } |
1280 *listp = NULL; | 1482 *listp = NULL; |
1281 } | 1483 } |
1282 | 1484 |
1283 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */ | 1485 /* Return true if the pair of DECL and OFFSET is the member of the LIST. */ |
1284 | 1486 |
1285 static attrs | 1487 static attrs * |
1286 attrs_list_member (attrs list, decl_or_value dv, HOST_WIDE_INT offset) | 1488 attrs_list_member (attrs *list, decl_or_value dv, HOST_WIDE_INT offset) |
1287 { | 1489 { |
1288 for (; list; list = list->next) | 1490 for (; list; list = list->next) |
1289 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset) | 1491 if (dv_as_opaque (list->dv) == dv_as_opaque (dv) && list->offset == offset) |
1290 return list; | 1492 return list; |
1291 return NULL; | 1493 return NULL; |
1292 } | 1494 } |
1293 | 1495 |
1294 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */ | 1496 /* Insert the triplet DECL, OFFSET, LOC to the list *LISTP. */ |
1295 | 1497 |
1296 static void | 1498 static void |
1297 attrs_list_insert (attrs *listp, decl_or_value dv, | 1499 attrs_list_insert (attrs **listp, decl_or_value dv, |
1298 HOST_WIDE_INT offset, rtx loc) | 1500 HOST_WIDE_INT offset, rtx loc) |
1299 { | 1501 { |
1300 attrs list; | 1502 attrs *list = new attrs; |
1301 | |
1302 list = (attrs) pool_alloc (attrs_pool); | |
1303 list->loc = loc; | 1503 list->loc = loc; |
1304 list->dv = dv; | 1504 list->dv = dv; |
1305 list->offset = offset; | 1505 list->offset = offset; |
1306 list->next = *listp; | 1506 list->next = *listp; |
1307 *listp = list; | 1507 *listp = list; |
1308 } | 1508 } |
1309 | 1509 |
1310 /* Copy all nodes from SRC and create a list *DSTP of the copies. */ | 1510 /* Copy all nodes from SRC and create a list *DSTP of the copies. */ |
1311 | 1511 |
1312 static void | 1512 static void |
1313 attrs_list_copy (attrs *dstp, attrs src) | 1513 attrs_list_copy (attrs **dstp, attrs *src) |
1314 { | 1514 { |
1315 attrs n; | |
1316 | |
1317 attrs_list_clear (dstp); | 1515 attrs_list_clear (dstp); |
1318 for (; src; src = src->next) | 1516 for (; src; src = src->next) |
1319 { | 1517 { |
1320 n = (attrs) pool_alloc (attrs_pool); | 1518 attrs *n = new attrs; |
1321 n->loc = src->loc; | 1519 n->loc = src->loc; |
1322 n->dv = src->dv; | 1520 n->dv = src->dv; |
1323 n->offset = src->offset; | 1521 n->offset = src->offset; |
1324 n->next = *dstp; | 1522 n->next = *dstp; |
1325 *dstp = n; | 1523 *dstp = n; |
1327 } | 1525 } |
1328 | 1526 |
1329 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */ | 1527 /* Add all nodes from SRC which are not in *DSTP to *DSTP. */ |
1330 | 1528 |
1331 static void | 1529 static void |
1332 attrs_list_union (attrs *dstp, attrs src) | 1530 attrs_list_union (attrs **dstp, attrs *src) |
1333 { | 1531 { |
1334 for (; src; src = src->next) | 1532 for (; src; src = src->next) |
1335 { | 1533 { |
1336 if (!attrs_list_member (*dstp, src->dv, src->offset)) | 1534 if (!attrs_list_member (*dstp, src->dv, src->offset)) |
1337 attrs_list_insert (dstp, src->dv, src->offset, src->loc); | 1535 attrs_list_insert (dstp, src->dv, src->offset, src->loc); |
1340 | 1538 |
1341 /* Combine nodes that are not onepart nodes from SRC and SRC2 into | 1539 /* Combine nodes that are not onepart nodes from SRC and SRC2 into |
1342 *DSTP. */ | 1540 *DSTP. */ |
1343 | 1541 |
1344 static void | 1542 static void |
1345 attrs_list_mpdv_union (attrs *dstp, attrs src, attrs src2) | 1543 attrs_list_mpdv_union (attrs **dstp, attrs *src, attrs *src2) |
1346 { | 1544 { |
1347 gcc_assert (!*dstp); | 1545 gcc_assert (!*dstp); |
1348 for (; src; src = src->next) | 1546 for (; src; src = src->next) |
1349 { | 1547 { |
1350 if (!dv_onepart_p (src->dv)) | 1548 if (!dv_onepart_p (src->dv)) |
1361 /* Shared hashtable support. */ | 1559 /* Shared hashtable support. */ |
1362 | 1560 |
1363 /* Return true if VARS is shared. */ | 1561 /* Return true if VARS is shared. */ |
1364 | 1562 |
1365 static inline bool | 1563 static inline bool |
1366 shared_hash_shared (shared_hash vars) | 1564 shared_hash_shared (shared_hash *vars) |
1367 { | 1565 { |
1368 return vars->refcount > 1; | 1566 return vars->refcount > 1; |
1369 } | 1567 } |
1370 | 1568 |
1371 /* Return the hash table for VARS. */ | 1569 /* Return the hash table for VARS. */ |
1372 | 1570 |
1373 static inline htab_t | 1571 static inline variable_table_type * |
1374 shared_hash_htab (shared_hash vars) | 1572 shared_hash_htab (shared_hash *vars) |
1375 { | 1573 { |
1376 return vars->htab; | 1574 return vars->htab; |
1377 } | 1575 } |
1378 | 1576 |
1379 /* Return true if VAR is shared, or maybe because VARS is shared. */ | 1577 /* Return true if VAR is shared, or maybe because VARS is shared. */ |
1380 | 1578 |
1381 static inline bool | 1579 static inline bool |
1382 shared_var_p (variable var, shared_hash vars) | 1580 shared_var_p (variable *var, shared_hash *vars) |
1383 { | 1581 { |
1384 /* Don't count an entry in the changed_variables table as a duplicate. */ | 1582 /* Don't count an entry in the changed_variables table as a duplicate. */ |
1385 return ((var->refcount > 1 + (int) var->in_changed_variables) | 1583 return ((var->refcount > 1 + (int) var->in_changed_variables) |
1386 || shared_hash_shared (vars)); | 1584 || shared_hash_shared (vars)); |
1387 } | 1585 } |
1388 | 1586 |
1389 /* Copy variables into a new hash table. */ | 1587 /* Copy variables into a new hash table. */ |
1390 | 1588 |
1391 static shared_hash | 1589 static shared_hash * |
1392 shared_hash_unshare (shared_hash vars) | 1590 shared_hash_unshare (shared_hash *vars) |
1393 { | 1591 { |
1394 shared_hash new_vars = (shared_hash) pool_alloc (shared_hash_pool); | 1592 shared_hash *new_vars = new shared_hash; |
1395 gcc_assert (vars->refcount > 1); | 1593 gcc_assert (vars->refcount > 1); |
1396 new_vars->refcount = 1; | 1594 new_vars->refcount = 1; |
1397 new_vars->htab | 1595 new_vars->htab = new variable_table_type (vars->htab->elements () + 3); |
1398 = htab_create (htab_elements (vars->htab) + 3, variable_htab_hash, | |
1399 variable_htab_eq, variable_htab_free); | |
1400 vars_copy (new_vars->htab, vars->htab); | 1596 vars_copy (new_vars->htab, vars->htab); |
1401 vars->refcount--; | 1597 vars->refcount--; |
1402 return new_vars; | 1598 return new_vars; |
1403 } | 1599 } |
1404 | 1600 |
1405 /* Increment reference counter on VARS and return it. */ | 1601 /* Increment reference counter on VARS and return it. */ |
1406 | 1602 |
1407 static inline shared_hash | 1603 static inline shared_hash * |
1408 shared_hash_copy (shared_hash vars) | 1604 shared_hash_copy (shared_hash *vars) |
1409 { | 1605 { |
1410 vars->refcount++; | 1606 vars->refcount++; |
1411 return vars; | 1607 return vars; |
1412 } | 1608 } |
1413 | 1609 |
1414 /* Decrement reference counter and destroy hash table if not shared | 1610 /* Decrement reference counter and destroy hash table if not shared |
1415 anymore. */ | 1611 anymore. */ |
1416 | 1612 |
1417 static void | 1613 static void |
1418 shared_hash_destroy (shared_hash vars) | 1614 shared_hash_destroy (shared_hash *vars) |
1419 { | 1615 { |
1420 gcc_checking_assert (vars->refcount > 0); | 1616 gcc_checking_assert (vars->refcount > 0); |
1421 if (--vars->refcount == 0) | 1617 if (--vars->refcount == 0) |
1422 { | 1618 { |
1423 htab_delete (vars->htab); | 1619 delete vars->htab; |
1424 pool_free (shared_hash_pool, vars); | 1620 delete vars; |
1425 } | 1621 } |
1426 } | 1622 } |
1427 | 1623 |
1428 /* Unshare *PVARS if shared and return slot for DV. If INS is | 1624 /* Unshare *PVARS if shared and return slot for DV. If INS is |
1429 INSERT, insert it if not already present. */ | 1625 INSERT, insert it if not already present. */ |
1430 | 1626 |
1431 static inline void ** | 1627 static inline variable ** |
1432 shared_hash_find_slot_unshare_1 (shared_hash *pvars, decl_or_value dv, | 1628 shared_hash_find_slot_unshare_1 (shared_hash **pvars, decl_or_value dv, |
1433 hashval_t dvhash, enum insert_option ins) | 1629 hashval_t dvhash, enum insert_option ins) |
1434 { | 1630 { |
1435 if (shared_hash_shared (*pvars)) | 1631 if (shared_hash_shared (*pvars)) |
1436 *pvars = shared_hash_unshare (*pvars); | 1632 *pvars = shared_hash_unshare (*pvars); |
1437 return htab_find_slot_with_hash (shared_hash_htab (*pvars), dv, dvhash, ins); | 1633 return shared_hash_htab (*pvars)->find_slot_with_hash (dv, dvhash, ins); |
1438 } | 1634 } |
1439 | 1635 |
1440 static inline void ** | 1636 static inline variable ** |
1441 shared_hash_find_slot_unshare (shared_hash *pvars, decl_or_value dv, | 1637 shared_hash_find_slot_unshare (shared_hash **pvars, decl_or_value dv, |
1442 enum insert_option ins) | 1638 enum insert_option ins) |
1443 { | 1639 { |
1444 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins); | 1640 return shared_hash_find_slot_unshare_1 (pvars, dv, dv_htab_hash (dv), ins); |
1445 } | 1641 } |
1446 | 1642 |
1447 /* Return slot for DV, if it is already present in the hash table. | 1643 /* Return slot for DV, if it is already present in the hash table. |
1448 If it is not present, insert it only VARS is not shared, otherwise | 1644 If it is not present, insert it only VARS is not shared, otherwise |
1449 return NULL. */ | 1645 return NULL. */ |
1450 | 1646 |
1451 static inline void ** | 1647 static inline variable ** |
1452 shared_hash_find_slot_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash) | 1648 shared_hash_find_slot_1 (shared_hash *vars, decl_or_value dv, hashval_t dvhash) |
1453 { | 1649 { |
1454 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash, | 1650 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, |
1455 shared_hash_shared (vars) | 1651 shared_hash_shared (vars) |
1456 ? NO_INSERT : INSERT); | 1652 ? NO_INSERT : INSERT); |
1457 } | 1653 } |
1458 | 1654 |
1459 static inline void ** | 1655 static inline variable ** |
1460 shared_hash_find_slot (shared_hash vars, decl_or_value dv) | 1656 shared_hash_find_slot (shared_hash *vars, decl_or_value dv) |
1461 { | 1657 { |
1462 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv)); | 1658 return shared_hash_find_slot_1 (vars, dv, dv_htab_hash (dv)); |
1463 } | 1659 } |
1464 | 1660 |
1465 /* Return slot for DV only if it is already present in the hash table. */ | 1661 /* Return slot for DV only if it is already present in the hash table. */ |
1466 | 1662 |
1467 static inline void ** | 1663 static inline variable ** |
1468 shared_hash_find_slot_noinsert_1 (shared_hash vars, decl_or_value dv, | 1664 shared_hash_find_slot_noinsert_1 (shared_hash *vars, decl_or_value dv, |
1469 hashval_t dvhash) | 1665 hashval_t dvhash) |
1470 { | 1666 { |
1471 return htab_find_slot_with_hash (shared_hash_htab (vars), dv, dvhash, | 1667 return shared_hash_htab (vars)->find_slot_with_hash (dv, dvhash, NO_INSERT); |
1472 NO_INSERT); | 1668 } |
1473 } | 1669 |
1474 | 1670 static inline variable ** |
1475 static inline void ** | 1671 shared_hash_find_slot_noinsert (shared_hash *vars, decl_or_value dv) |
1476 shared_hash_find_slot_noinsert (shared_hash vars, decl_or_value dv) | |
1477 { | 1672 { |
1478 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv)); | 1673 return shared_hash_find_slot_noinsert_1 (vars, dv, dv_htab_hash (dv)); |
1479 } | 1674 } |
1480 | 1675 |
1481 /* Return variable for DV or NULL if not already present in the hash | 1676 /* Return variable for DV or NULL if not already present in the hash |
1482 table. */ | 1677 table. */ |
1483 | 1678 |
1484 static inline variable | 1679 static inline variable * |
1485 shared_hash_find_1 (shared_hash vars, decl_or_value dv, hashval_t dvhash) | 1680 shared_hash_find_1 (shared_hash *vars, decl_or_value dv, hashval_t dvhash) |
1486 { | 1681 { |
1487 return (variable) htab_find_with_hash (shared_hash_htab (vars), dv, dvhash); | 1682 return shared_hash_htab (vars)->find_with_hash (dv, dvhash); |
1488 } | 1683 } |
1489 | 1684 |
1490 static inline variable | 1685 static inline variable * |
1491 shared_hash_find (shared_hash vars, decl_or_value dv) | 1686 shared_hash_find (shared_hash *vars, decl_or_value dv) |
1492 { | 1687 { |
1493 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv)); | 1688 return shared_hash_find_1 (vars, dv, dv_htab_hash (dv)); |
1494 } | 1689 } |
1495 | 1690 |
1496 /* Return true if TVAL is better than CVAL as a canonival value. We | 1691 /* Return true if TVAL is better than CVAL as a canonival value. We |
1511 | 1706 |
1512 static bool dst_can_be_shared; | 1707 static bool dst_can_be_shared; |
1513 | 1708 |
1514 /* Return a copy of a variable VAR and insert it to dataflow set SET. */ | 1709 /* Return a copy of a variable VAR and insert it to dataflow set SET. */ |
1515 | 1710 |
1516 static void ** | 1711 static variable ** |
1517 unshare_variable (dataflow_set *set, void **slot, variable var, | 1712 unshare_variable (dataflow_set *set, variable **slot, variable *var, |
1518 enum var_init_status initialized) | 1713 enum var_init_status initialized) |
1519 { | 1714 { |
1520 variable new_var; | 1715 variable *new_var; |
1521 int i; | 1716 int i; |
1522 | 1717 |
1523 new_var = (variable) pool_alloc (dv_pool (var->dv)); | 1718 new_var = onepart_pool_allocate (var->onepart); |
1524 new_var->dv = var->dv; | 1719 new_var->dv = var->dv; |
1525 new_var->refcount = 1; | 1720 new_var->refcount = 1; |
1526 var->refcount--; | 1721 var->refcount--; |
1527 new_var->n_var_parts = var->n_var_parts; | 1722 new_var->n_var_parts = var->n_var_parts; |
1528 new_var->cur_loc_changed = var->cur_loc_changed; | 1723 new_var->onepart = var->onepart; |
1529 var->cur_loc_changed = false; | |
1530 new_var->in_changed_variables = false; | 1724 new_var->in_changed_variables = false; |
1531 | 1725 |
1532 if (! flag_var_tracking_uninit) | 1726 if (! flag_var_tracking_uninit) |
1533 initialized = VAR_INIT_STATUS_INITIALIZED; | 1727 initialized = VAR_INIT_STATUS_INITIALIZED; |
1534 | 1728 |
1535 for (i = 0; i < var->n_var_parts; i++) | 1729 for (i = 0; i < var->n_var_parts; i++) |
1536 { | 1730 { |
1537 location_chain node; | 1731 location_chain *node; |
1538 location_chain *nextp; | 1732 location_chain **nextp; |
1539 | 1733 |
1540 new_var->var_part[i].offset = var->var_part[i].offset; | 1734 if (i == 0 && var->onepart) |
1735 { | |
1736 /* One-part auxiliary data is only used while emitting | |
1737 notes, so propagate it to the new variable in the active | |
1738 dataflow set. If we're not emitting notes, this will be | |
1739 a no-op. */ | |
1740 gcc_checking_assert (!VAR_LOC_1PAUX (var) || emit_notes); | |
1741 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (var); | |
1742 VAR_LOC_1PAUX (var) = NULL; | |
1743 } | |
1744 else | |
1745 VAR_PART_OFFSET (new_var, i) = VAR_PART_OFFSET (var, i); | |
1541 nextp = &new_var->var_part[i].loc_chain; | 1746 nextp = &new_var->var_part[i].loc_chain; |
1542 for (node = var->var_part[i].loc_chain; node; node = node->next) | 1747 for (node = var->var_part[i].loc_chain; node; node = node->next) |
1543 { | 1748 { |
1544 location_chain new_lc; | 1749 location_chain *new_lc; |
1545 | 1750 |
1546 new_lc = (location_chain) pool_alloc (loc_chain_pool); | 1751 new_lc = new location_chain; |
1547 new_lc->next = NULL; | 1752 new_lc->next = NULL; |
1548 if (node->init > initialized) | 1753 if (node->init > initialized) |
1549 new_lc->init = node->init; | 1754 new_lc->init = node->init; |
1550 else | 1755 else |
1551 new_lc->init = initialized; | 1756 new_lc->init = initialized; |
1568 else if (set->traversed_vars && set->vars != set->traversed_vars) | 1773 else if (set->traversed_vars && set->vars != set->traversed_vars) |
1569 slot = shared_hash_find_slot_noinsert (set->vars, var->dv); | 1774 slot = shared_hash_find_slot_noinsert (set->vars, var->dv); |
1570 *slot = new_var; | 1775 *slot = new_var; |
1571 if (var->in_changed_variables) | 1776 if (var->in_changed_variables) |
1572 { | 1777 { |
1573 void **cslot | 1778 variable **cslot |
1574 = htab_find_slot_with_hash (changed_variables, var->dv, | 1779 = changed_variables->find_slot_with_hash (var->dv, |
1575 dv_htab_hash (var->dv), NO_INSERT); | 1780 dv_htab_hash (var->dv), |
1781 NO_INSERT); | |
1576 gcc_assert (*cslot == (void *) var); | 1782 gcc_assert (*cslot == (void *) var); |
1577 var->in_changed_variables = false; | 1783 var->in_changed_variables = false; |
1578 variable_htab_free (var); | 1784 variable_htab_free (var); |
1579 *cslot = new_var; | 1785 *cslot = new_var; |
1580 new_var->in_changed_variables = true; | 1786 new_var->in_changed_variables = true; |
1583 } | 1789 } |
1584 | 1790 |
1585 /* Copy all variables from hash table SRC to hash table DST. */ | 1791 /* Copy all variables from hash table SRC to hash table DST. */ |
1586 | 1792 |
1587 static void | 1793 static void |
1588 vars_copy (htab_t dst, htab_t src) | 1794 vars_copy (variable_table_type *dst, variable_table_type *src) |
1589 { | 1795 { |
1590 htab_iterator hi; | 1796 variable_iterator_type hi; |
1591 variable var; | 1797 variable *var; |
1592 | 1798 |
1593 FOR_EACH_HTAB_ELEMENT (src, var, variable, hi) | 1799 FOR_EACH_HASH_TABLE_ELEMENT (*src, var, variable, hi) |
1594 { | 1800 { |
1595 void **dstp; | 1801 variable **dstp; |
1596 var->refcount++; | 1802 var->refcount++; |
1597 dstp = htab_find_slot_with_hash (dst, var->dv, | 1803 dstp = dst->find_slot_with_hash (var->dv, dv_htab_hash (var->dv), |
1598 dv_htab_hash (var->dv), | |
1599 INSERT); | 1804 INSERT); |
1600 *dstp = var; | 1805 *dstp = var; |
1601 } | 1806 } |
1602 } | 1807 } |
1603 | 1808 |
1604 /* Map a decl to its main debug decl. */ | 1809 /* Map a decl to its main debug decl. */ |
1605 | 1810 |
1606 static inline tree | 1811 static inline tree |
1607 var_debug_decl (tree decl) | 1812 var_debug_decl (tree decl) |
1608 { | 1813 { |
1609 if (decl && DECL_P (decl) | 1814 if (decl && VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl)) |
1610 && DECL_DEBUG_EXPR_IS_FROM (decl)) | |
1611 { | 1815 { |
1612 tree debugdecl = DECL_DEBUG_EXPR (decl); | 1816 tree debugdecl = DECL_DEBUG_EXPR (decl); |
1613 if (debugdecl && DECL_P (debugdecl)) | 1817 if (DECL_P (debugdecl)) |
1614 decl = debugdecl; | 1818 decl = debugdecl; |
1615 } | 1819 } |
1616 | 1820 |
1617 return decl; | 1821 return decl; |
1618 } | 1822 } |
1622 static void | 1826 static void |
1623 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized, | 1827 var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized, |
1624 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src, | 1828 decl_or_value dv, HOST_WIDE_INT offset, rtx set_src, |
1625 enum insert_option iopt) | 1829 enum insert_option iopt) |
1626 { | 1830 { |
1627 attrs node; | 1831 attrs *node; |
1628 bool decl_p = dv_is_decl_p (dv); | 1832 bool decl_p = dv_is_decl_p (dv); |
1629 | 1833 |
1630 if (decl_p) | 1834 if (decl_p) |
1631 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv))); | 1835 dv = dv_from_decl (var_debug_decl (dv_as_decl (dv))); |
1632 | 1836 |
1653 } | 1857 } |
1654 | 1858 |
1655 static enum var_init_status | 1859 static enum var_init_status |
1656 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv) | 1860 get_init_value (dataflow_set *set, rtx loc, decl_or_value dv) |
1657 { | 1861 { |
1658 variable var; | 1862 variable *var; |
1659 int i; | 1863 int i; |
1660 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN; | 1864 enum var_init_status ret_val = VAR_INIT_STATUS_UNKNOWN; |
1661 | 1865 |
1662 if (! flag_var_tracking_uninit) | 1866 if (! flag_var_tracking_uninit) |
1663 return VAR_INIT_STATUS_INITIALIZED; | 1867 return VAR_INIT_STATUS_INITIALIZED; |
1665 var = shared_hash_find (set->vars, dv); | 1869 var = shared_hash_find (set->vars, dv); |
1666 if (var) | 1870 if (var) |
1667 { | 1871 { |
1668 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++) | 1872 for (i = 0; i < var->n_var_parts && ret_val == VAR_INIT_STATUS_UNKNOWN; i++) |
1669 { | 1873 { |
1670 location_chain nextp; | 1874 location_chain *nextp; |
1671 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next) | 1875 for (nextp = var->var_part[i].loc_chain; nextp; nextp = nextp->next) |
1672 if (rtx_equal_p (nextp->loc, loc)) | 1876 if (rtx_equal_p (nextp->loc, loc)) |
1673 { | 1877 { |
1674 ret_val = nextp->init; | 1878 ret_val = nextp->init; |
1675 break; | 1879 break; |
1691 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify, | 1895 var_reg_delete_and_set (dataflow_set *set, rtx loc, bool modify, |
1692 enum var_init_status initialized, rtx set_src) | 1896 enum var_init_status initialized, rtx set_src) |
1693 { | 1897 { |
1694 tree decl = REG_EXPR (loc); | 1898 tree decl = REG_EXPR (loc); |
1695 HOST_WIDE_INT offset = REG_OFFSET (loc); | 1899 HOST_WIDE_INT offset = REG_OFFSET (loc); |
1696 attrs node, next; | 1900 attrs *node, *next; |
1697 attrs *nextp; | 1901 attrs **nextp; |
1698 | 1902 |
1699 decl = var_debug_decl (decl); | 1903 decl = var_debug_decl (decl); |
1700 | 1904 |
1701 if (initialized == VAR_INIT_STATUS_UNKNOWN) | 1905 if (initialized == VAR_INIT_STATUS_UNKNOWN) |
1702 initialized = get_init_value (set, loc, dv_from_decl (decl)); | 1906 initialized = get_init_value (set, loc, dv_from_decl (decl)); |
1706 { | 1910 { |
1707 next = node->next; | 1911 next = node->next; |
1708 if (dv_as_opaque (node->dv) != decl || node->offset != offset) | 1912 if (dv_as_opaque (node->dv) != decl || node->offset != offset) |
1709 { | 1913 { |
1710 delete_variable_part (set, node->loc, node->dv, node->offset); | 1914 delete_variable_part (set, node->loc, node->dv, node->offset); |
1711 pool_free (attrs_pool, node); | 1915 delete node; |
1712 *nextp = next; | 1916 *nextp = next; |
1713 } | 1917 } |
1714 else | 1918 else |
1715 { | 1919 { |
1716 node->loc = loc; | 1920 node->loc = loc; |
1728 association with onepart dvs too. */ | 1932 association with onepart dvs too. */ |
1729 | 1933 |
1730 static void | 1934 static void |
1731 var_reg_delete (dataflow_set *set, rtx loc, bool clobber) | 1935 var_reg_delete (dataflow_set *set, rtx loc, bool clobber) |
1732 { | 1936 { |
1733 attrs *nextp = &set->regs[REGNO (loc)]; | 1937 attrs **nextp = &set->regs[REGNO (loc)]; |
1734 attrs node, next; | 1938 attrs *node, *next; |
1735 | 1939 |
1736 if (clobber) | 1940 if (clobber) |
1737 { | 1941 { |
1738 tree decl = REG_EXPR (loc); | 1942 tree decl = REG_EXPR (loc); |
1739 HOST_WIDE_INT offset = REG_OFFSET (loc); | 1943 HOST_WIDE_INT offset = REG_OFFSET (loc); |
1747 { | 1951 { |
1748 next = node->next; | 1952 next = node->next; |
1749 if (clobber || !dv_onepart_p (node->dv)) | 1953 if (clobber || !dv_onepart_p (node->dv)) |
1750 { | 1954 { |
1751 delete_variable_part (set, node->loc, node->dv, node->offset); | 1955 delete_variable_part (set, node->loc, node->dv, node->offset); |
1752 pool_free (attrs_pool, node); | 1956 delete node; |
1753 *nextp = next; | 1957 *nextp = next; |
1754 } | 1958 } |
1755 else | 1959 else |
1756 nextp = &node->next; | 1960 nextp = &node->next; |
1757 } | 1961 } |
1760 /* Delete content of register with number REGNO in dataflow set SET. */ | 1964 /* Delete content of register with number REGNO in dataflow set SET. */ |
1761 | 1965 |
1762 static void | 1966 static void |
1763 var_regno_delete (dataflow_set *set, int regno) | 1967 var_regno_delete (dataflow_set *set, int regno) |
1764 { | 1968 { |
1765 attrs *reg = &set->regs[regno]; | 1969 attrs **reg = &set->regs[regno]; |
1766 attrs node, next; | 1970 attrs *node, *next; |
1767 | 1971 |
1768 for (node = *reg; node; node = next) | 1972 for (node = *reg; node; node = next) |
1769 { | 1973 { |
1770 next = node->next; | 1974 next = node->next; |
1771 delete_variable_part (set, node->loc, node->dv, node->offset); | 1975 delete_variable_part (set, node->loc, node->dv, node->offset); |
1772 pool_free (attrs_pool, node); | 1976 delete node; |
1773 } | 1977 } |
1774 *reg = NULL; | 1978 *reg = NULL; |
1979 } | |
1980 | |
1981 /* Return true if I is the negated value of a power of two. */ | |
1982 static bool | |
1983 negative_power_of_two_p (HOST_WIDE_INT i) | |
1984 { | |
1985 unsigned HOST_WIDE_INT x = -(unsigned HOST_WIDE_INT)i; | |
1986 return pow2_or_zerop (x); | |
1987 } | |
1988 | |
1989 /* Strip constant offsets and alignments off of LOC. Return the base | |
1990 expression. */ | |
1991 | |
1992 static rtx | |
1993 vt_get_canonicalize_base (rtx loc) | |
1994 { | |
1995 while ((GET_CODE (loc) == PLUS | |
1996 || GET_CODE (loc) == AND) | |
1997 && GET_CODE (XEXP (loc, 1)) == CONST_INT | |
1998 && (GET_CODE (loc) != AND | |
1999 || negative_power_of_two_p (INTVAL (XEXP (loc, 1))))) | |
2000 loc = XEXP (loc, 0); | |
2001 | |
2002 return loc; | |
2003 } | |
2004 | |
2005 /* This caches canonicalized addresses for VALUEs, computed using | |
2006 information in the global cselib table. */ | |
2007 static hash_map<rtx, rtx> *global_get_addr_cache; | |
2008 | |
2009 /* This caches canonicalized addresses for VALUEs, computed using | |
2010 information from the global cache and information pertaining to a | |
2011 basic block being analyzed. */ | |
2012 static hash_map<rtx, rtx> *local_get_addr_cache; | |
2013 | |
2014 static rtx vt_canonicalize_addr (dataflow_set *, rtx); | |
2015 | |
2016 /* Return the canonical address for LOC, that must be a VALUE, using a | |
2017 cached global equivalence or computing it and storing it in the | |
2018 global cache. */ | |
2019 | |
2020 static rtx | |
2021 get_addr_from_global_cache (rtx const loc) | |
2022 { | |
2023 rtx x; | |
2024 | |
2025 gcc_checking_assert (GET_CODE (loc) == VALUE); | |
2026 | |
2027 bool existed; | |
2028 rtx *slot = &global_get_addr_cache->get_or_insert (loc, &existed); | |
2029 if (existed) | |
2030 return *slot; | |
2031 | |
2032 x = canon_rtx (get_addr (loc)); | |
2033 | |
2034 /* Tentative, avoiding infinite recursion. */ | |
2035 *slot = x; | |
2036 | |
2037 if (x != loc) | |
2038 { | |
2039 rtx nx = vt_canonicalize_addr (NULL, x); | |
2040 if (nx != x) | |
2041 { | |
2042 /* The table may have moved during recursion, recompute | |
2043 SLOT. */ | |
2044 *global_get_addr_cache->get (loc) = x = nx; | |
2045 } | |
2046 } | |
2047 | |
2048 return x; | |
2049 } | |
2050 | |
2051 /* Return the canonical address for LOC, that must be a VALUE, using a | |
2052 cached local equivalence or computing it and storing it in the | |
2053 local cache. */ | |
2054 | |
2055 static rtx | |
2056 get_addr_from_local_cache (dataflow_set *set, rtx const loc) | |
2057 { | |
2058 rtx x; | |
2059 decl_or_value dv; | |
2060 variable *var; | |
2061 location_chain *l; | |
2062 | |
2063 gcc_checking_assert (GET_CODE (loc) == VALUE); | |
2064 | |
2065 bool existed; | |
2066 rtx *slot = &local_get_addr_cache->get_or_insert (loc, &existed); | |
2067 if (existed) | |
2068 return *slot; | |
2069 | |
2070 x = get_addr_from_global_cache (loc); | |
2071 | |
2072 /* Tentative, avoiding infinite recursion. */ | |
2073 *slot = x; | |
2074 | |
2075 /* Recurse to cache local expansion of X, or if we need to search | |
2076 for a VALUE in the expansion. */ | |
2077 if (x != loc) | |
2078 { | |
2079 rtx nx = vt_canonicalize_addr (set, x); | |
2080 if (nx != x) | |
2081 { | |
2082 slot = local_get_addr_cache->get (loc); | |
2083 *slot = x = nx; | |
2084 } | |
2085 return x; | |
2086 } | |
2087 | |
2088 dv = dv_from_rtx (x); | |
2089 var = shared_hash_find (set->vars, dv); | |
2090 if (!var) | |
2091 return x; | |
2092 | |
2093 /* Look for an improved equivalent expression. */ | |
2094 for (l = var->var_part[0].loc_chain; l; l = l->next) | |
2095 { | |
2096 rtx base = vt_get_canonicalize_base (l->loc); | |
2097 if (GET_CODE (base) == VALUE | |
2098 && canon_value_cmp (base, loc)) | |
2099 { | |
2100 rtx nx = vt_canonicalize_addr (set, l->loc); | |
2101 if (x != nx) | |
2102 { | |
2103 slot = local_get_addr_cache->get (loc); | |
2104 *slot = x = nx; | |
2105 } | |
2106 break; | |
2107 } | |
2108 } | |
2109 | |
2110 return x; | |
2111 } | |
2112 | |
2113 /* Canonicalize LOC using equivalences from SET in addition to those | |
2114 in the cselib static table. It expects a VALUE-based expression, | |
2115 and it will only substitute VALUEs with other VALUEs or | |
2116 function-global equivalences, so that, if two addresses have base | |
2117 VALUEs that are locally or globally related in ways that | |
2118 memrefs_conflict_p cares about, they will both canonicalize to | |
2119 expressions that have the same base VALUE. | |
2120 | |
2121 The use of VALUEs as canonical base addresses enables the canonical | |
2122 RTXs to remain unchanged globally, if they resolve to a constant, | |
2123 or throughout a basic block otherwise, so that they can be cached | |
2124 and the cache needs not be invalidated when REGs, MEMs or such | |
2125 change. */ | |
2126 | |
2127 static rtx | |
2128 vt_canonicalize_addr (dataflow_set *set, rtx oloc) | |
2129 { | |
2130 HOST_WIDE_INT ofst = 0; | |
2131 machine_mode mode = GET_MODE (oloc); | |
2132 rtx loc = oloc; | |
2133 rtx x; | |
2134 bool retry = true; | |
2135 | |
2136 while (retry) | |
2137 { | |
2138 while (GET_CODE (loc) == PLUS | |
2139 && GET_CODE (XEXP (loc, 1)) == CONST_INT) | |
2140 { | |
2141 ofst += INTVAL (XEXP (loc, 1)); | |
2142 loc = XEXP (loc, 0); | |
2143 } | |
2144 | |
2145 /* Alignment operations can't normally be combined, so just | |
2146 canonicalize the base and we're done. We'll normally have | |
2147 only one stack alignment anyway. */ | |
2148 if (GET_CODE (loc) == AND | |
2149 && GET_CODE (XEXP (loc, 1)) == CONST_INT | |
2150 && negative_power_of_two_p (INTVAL (XEXP (loc, 1)))) | |
2151 { | |
2152 x = vt_canonicalize_addr (set, XEXP (loc, 0)); | |
2153 if (x != XEXP (loc, 0)) | |
2154 loc = gen_rtx_AND (mode, x, XEXP (loc, 1)); | |
2155 retry = false; | |
2156 } | |
2157 | |
2158 if (GET_CODE (loc) == VALUE) | |
2159 { | |
2160 if (set) | |
2161 loc = get_addr_from_local_cache (set, loc); | |
2162 else | |
2163 loc = get_addr_from_global_cache (loc); | |
2164 | |
2165 /* Consolidate plus_constants. */ | |
2166 while (ofst && GET_CODE (loc) == PLUS | |
2167 && GET_CODE (XEXP (loc, 1)) == CONST_INT) | |
2168 { | |
2169 ofst += INTVAL (XEXP (loc, 1)); | |
2170 loc = XEXP (loc, 0); | |
2171 } | |
2172 | |
2173 retry = false; | |
2174 } | |
2175 else | |
2176 { | |
2177 x = canon_rtx (loc); | |
2178 if (retry) | |
2179 retry = (x != loc); | |
2180 loc = x; | |
2181 } | |
2182 } | |
2183 | |
2184 /* Add OFST back in. */ | |
2185 if (ofst) | |
2186 { | |
2187 /* Don't build new RTL if we can help it. */ | |
2188 if (GET_CODE (oloc) == PLUS | |
2189 && XEXP (oloc, 0) == loc | |
2190 && INTVAL (XEXP (oloc, 1)) == ofst) | |
2191 return oloc; | |
2192 | |
2193 loc = plus_constant (mode, loc, ofst); | |
2194 } | |
2195 | |
2196 return loc; | |
2197 } | |
2198 | |
2199 /* Return true iff there's a true dependence between MLOC and LOC. | |
2200 MADDR must be a canonicalized version of MLOC's address. */ | |
2201 | |
2202 static inline bool | |
2203 vt_canon_true_dep (dataflow_set *set, rtx mloc, rtx maddr, rtx loc) | |
2204 { | |
2205 if (GET_CODE (loc) != MEM) | |
2206 return false; | |
2207 | |
2208 rtx addr = vt_canonicalize_addr (set, XEXP (loc, 0)); | |
2209 if (!canon_true_dependence (mloc, GET_MODE (mloc), maddr, loc, addr)) | |
2210 return false; | |
2211 | |
2212 return true; | |
2213 } | |
2214 | |
2215 /* Hold parameters for the hashtab traversal function | |
2216 drop_overlapping_mem_locs, see below. */ | |
2217 | |
2218 struct overlapping_mems | |
2219 { | |
2220 dataflow_set *set; | |
2221 rtx loc, addr; | |
2222 }; | |
2223 | |
2224 /* Remove all MEMs that overlap with COMS->LOC from the location list | |
2225 of a hash table entry for a onepart variable. COMS->ADDR must be a | |
2226 canonicalized form of COMS->LOC's address, and COMS->LOC must be | |
2227 canonicalized itself. */ | |
2228 | |
2229 int | |
2230 drop_overlapping_mem_locs (variable **slot, overlapping_mems *coms) | |
2231 { | |
2232 dataflow_set *set = coms->set; | |
2233 rtx mloc = coms->loc, addr = coms->addr; | |
2234 variable *var = *slot; | |
2235 | |
2236 if (var->onepart != NOT_ONEPART) | |
2237 { | |
2238 location_chain *loc, **locp; | |
2239 bool changed = false; | |
2240 rtx cur_loc; | |
2241 | |
2242 gcc_assert (var->n_var_parts == 1); | |
2243 | |
2244 if (shared_var_p (var, set->vars)) | |
2245 { | |
2246 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next) | |
2247 if (vt_canon_true_dep (set, mloc, addr, loc->loc)) | |
2248 break; | |
2249 | |
2250 if (!loc) | |
2251 return 1; | |
2252 | |
2253 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN); | |
2254 var = *slot; | |
2255 gcc_assert (var->n_var_parts == 1); | |
2256 } | |
2257 | |
2258 if (VAR_LOC_1PAUX (var)) | |
2259 cur_loc = VAR_LOC_FROM (var); | |
2260 else | |
2261 cur_loc = var->var_part[0].cur_loc; | |
2262 | |
2263 for (locp = &var->var_part[0].loc_chain, loc = *locp; | |
2264 loc; loc = *locp) | |
2265 { | |
2266 if (!vt_canon_true_dep (set, mloc, addr, loc->loc)) | |
2267 { | |
2268 locp = &loc->next; | |
2269 continue; | |
2270 } | |
2271 | |
2272 *locp = loc->next; | |
2273 /* If we have deleted the location which was last emitted | |
2274 we have to emit new location so add the variable to set | |
2275 of changed variables. */ | |
2276 if (cur_loc == loc->loc) | |
2277 { | |
2278 changed = true; | |
2279 var->var_part[0].cur_loc = NULL; | |
2280 if (VAR_LOC_1PAUX (var)) | |
2281 VAR_LOC_FROM (var) = NULL; | |
2282 } | |
2283 delete loc; | |
2284 } | |
2285 | |
2286 if (!var->var_part[0].loc_chain) | |
2287 { | |
2288 var->n_var_parts--; | |
2289 changed = true; | |
2290 } | |
2291 if (changed) | |
2292 variable_was_changed (var, set); | |
2293 } | |
2294 | |
2295 return 1; | |
2296 } | |
2297 | |
2298 /* Remove from SET all VALUE bindings to MEMs that overlap with LOC. */ | |
2299 | |
2300 static void | |
2301 clobber_overlapping_mems (dataflow_set *set, rtx loc) | |
2302 { | |
2303 struct overlapping_mems coms; | |
2304 | |
2305 gcc_checking_assert (GET_CODE (loc) == MEM); | |
2306 | |
2307 coms.set = set; | |
2308 coms.loc = canon_rtx (loc); | |
2309 coms.addr = vt_canonicalize_addr (set, XEXP (loc, 0)); | |
2310 | |
2311 set->traversed_vars = set->vars; | |
2312 shared_hash_htab (set->vars) | |
2313 ->traverse <overlapping_mems*, drop_overlapping_mem_locs> (&coms); | |
2314 set->traversed_vars = NULL; | |
1775 } | 2315 } |
1776 | 2316 |
1777 /* Set the location of DV, OFFSET as the MEM LOC. */ | 2317 /* Set the location of DV, OFFSET as the MEM LOC. */ |
1778 | 2318 |
1779 static void | 2319 static void |
1814 enum var_init_status initialized, rtx set_src) | 2354 enum var_init_status initialized, rtx set_src) |
1815 { | 2355 { |
1816 tree decl = MEM_EXPR (loc); | 2356 tree decl = MEM_EXPR (loc); |
1817 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc); | 2357 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc); |
1818 | 2358 |
2359 clobber_overlapping_mems (set, loc); | |
1819 decl = var_debug_decl (decl); | 2360 decl = var_debug_decl (decl); |
1820 | 2361 |
1821 if (initialized == VAR_INIT_STATUS_UNKNOWN) | 2362 if (initialized == VAR_INIT_STATUS_UNKNOWN) |
1822 initialized = get_init_value (set, loc, dv_from_decl (decl)); | 2363 initialized = get_init_value (set, loc, dv_from_decl (decl)); |
1823 | 2364 |
1834 var_mem_delete (dataflow_set *set, rtx loc, bool clobber) | 2375 var_mem_delete (dataflow_set *set, rtx loc, bool clobber) |
1835 { | 2376 { |
1836 tree decl = MEM_EXPR (loc); | 2377 tree decl = MEM_EXPR (loc); |
1837 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc); | 2378 HOST_WIDE_INT offset = INT_MEM_OFFSET (loc); |
1838 | 2379 |
2380 clobber_overlapping_mems (set, loc); | |
1839 decl = var_debug_decl (decl); | 2381 decl = var_debug_decl (decl); |
1840 if (clobber) | 2382 if (clobber) |
1841 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL); | 2383 clobber_variable_part (set, NULL, dv_from_decl (decl), offset, NULL); |
1842 delete_variable_part (set, loc, dv_from_decl (decl), offset); | 2384 delete_variable_part (set, loc, dv_from_decl (decl), offset); |
1843 } | 2385 } |
1844 | 2386 |
2387 /* Return true if LOC should not be expanded for location expressions, | |
2388 or used in them. */ | |
2389 | |
2390 static inline bool | |
2391 unsuitable_loc (rtx loc) | |
2392 { | |
2393 switch (GET_CODE (loc)) | |
2394 { | |
2395 case PC: | |
2396 case SCRATCH: | |
2397 case CC0: | |
2398 case ASM_INPUT: | |
2399 case ASM_OPERANDS: | |
2400 return true; | |
2401 | |
2402 default: | |
2403 return false; | |
2404 } | |
2405 } | |
2406 | |
2407 /* Bind VAL to LOC in SET. If MODIFIED, detach LOC from any values | |
2408 bound to it. */ | |
2409 | |
2410 static inline void | |
2411 val_bind (dataflow_set *set, rtx val, rtx loc, bool modified) | |
2412 { | |
2413 if (REG_P (loc)) | |
2414 { | |
2415 if (modified) | |
2416 var_regno_delete (set, REGNO (loc)); | |
2417 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED, | |
2418 dv_from_value (val), 0, NULL_RTX, INSERT); | |
2419 } | |
2420 else if (MEM_P (loc)) | |
2421 { | |
2422 struct elt_loc_list *l = CSELIB_VAL_PTR (val)->locs; | |
2423 | |
2424 if (modified) | |
2425 clobber_overlapping_mems (set, loc); | |
2426 | |
2427 if (l && GET_CODE (l->loc) == VALUE) | |
2428 l = canonical_cselib_val (CSELIB_VAL_PTR (l->loc))->locs; | |
2429 | |
2430 /* If this MEM is a global constant, we don't need it in the | |
2431 dynamic tables. ??? We should test this before emitting the | |
2432 micro-op in the first place. */ | |
2433 while (l) | |
2434 if (GET_CODE (l->loc) == MEM && XEXP (l->loc, 0) == XEXP (loc, 0)) | |
2435 break; | |
2436 else | |
2437 l = l->next; | |
2438 | |
2439 if (!l) | |
2440 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED, | |
2441 dv_from_value (val), 0, NULL_RTX, INSERT); | |
2442 } | |
2443 else | |
2444 { | |
2445 /* Other kinds of equivalences are necessarily static, at least | |
2446 so long as we do not perform substitutions while merging | |
2447 expressions. */ | |
2448 gcc_unreachable (); | |
2449 set_variable_part (set, loc, dv_from_value (val), 0, | |
2450 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT); | |
2451 } | |
2452 } | |
2453 | |
1845 /* Bind a value to a location it was just stored in. If MODIFIED | 2454 /* Bind a value to a location it was just stored in. If MODIFIED |
1846 holds, assume the location was modified, detaching it from any | 2455 holds, assume the location was modified, detaching it from any |
1847 values bound to it. */ | 2456 values bound to it. */ |
1848 | 2457 |
1849 static void | 2458 static void |
1850 val_store (dataflow_set *set, rtx val, rtx loc, rtx insn, bool modified) | 2459 val_store (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn, |
2460 bool modified) | |
1851 { | 2461 { |
1852 cselib_val *v = CSELIB_VAL_PTR (val); | 2462 cselib_val *v = CSELIB_VAL_PTR (val); |
1853 | 2463 |
1854 gcc_assert (cselib_preserved_value_p (v)); | 2464 gcc_assert (cselib_preserved_value_p (v)); |
1855 | 2465 |
1856 if (dump_file) | 2466 if (dump_file) |
1857 { | 2467 { |
1858 fprintf (dump_file, "%i: ", INSN_UID (insn)); | 2468 fprintf (dump_file, "%i: ", insn ? INSN_UID (insn) : 0); |
2469 print_inline_rtx (dump_file, loc, 0); | |
2470 fprintf (dump_file, " evaluates to "); | |
1859 print_inline_rtx (dump_file, val, 0); | 2471 print_inline_rtx (dump_file, val, 0); |
1860 fprintf (dump_file, " stored in "); | |
1861 print_inline_rtx (dump_file, loc, 0); | |
1862 if (v->locs) | 2472 if (v->locs) |
1863 { | 2473 { |
1864 struct elt_loc_list *l; | 2474 struct elt_loc_list *l; |
1865 for (l = v->locs; l; l = l->next) | 2475 for (l = v->locs; l; l = l->next) |
1866 { | 2476 { |
1869 } | 2479 } |
1870 } | 2480 } |
1871 fprintf (dump_file, "\n"); | 2481 fprintf (dump_file, "\n"); |
1872 } | 2482 } |
1873 | 2483 |
1874 if (REG_P (loc)) | 2484 gcc_checking_assert (!unsuitable_loc (loc)); |
1875 { | 2485 |
1876 if (modified) | 2486 val_bind (set, val, loc, modified); |
1877 var_regno_delete (set, REGNO (loc)); | 2487 } |
1878 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED, | 2488 |
1879 dv_from_value (val), 0, NULL_RTX, INSERT); | 2489 /* Clear (canonical address) slots that reference X. */ |
1880 } | 2490 |
1881 else if (MEM_P (loc)) | 2491 bool |
1882 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED, | 2492 local_get_addr_clear_given_value (rtx const &, rtx *slot, rtx x) |
1883 dv_from_value (val), 0, NULL_RTX, INSERT); | 2493 { |
1884 else | 2494 if (vt_get_canonicalize_base (*slot) == x) |
1885 set_variable_part (set, loc, dv_from_value (val), 0, | 2495 *slot = NULL; |
1886 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT); | 2496 return true; |
1887 } | 2497 } |
1888 | 2498 |
1889 /* Reset this node, detaching all its equivalences. Return the slot | 2499 /* Reset this node, detaching all its equivalences. Return the slot |
1890 in the variable hash table that holds dv, if there is one. */ | 2500 in the variable hash table that holds dv, if there is one. */ |
1891 | 2501 |
1892 static void | 2502 static void |
1893 val_reset (dataflow_set *set, decl_or_value dv) | 2503 val_reset (dataflow_set *set, decl_or_value dv) |
1894 { | 2504 { |
1895 variable var = shared_hash_find (set->vars, dv) ; | 2505 variable *var = shared_hash_find (set->vars, dv) ; |
1896 location_chain node; | 2506 location_chain *node; |
1897 rtx cval; | 2507 rtx cval; |
1898 | 2508 |
1899 if (!var || !var->n_var_parts) | 2509 if (!var || !var->n_var_parts) |
1900 return; | 2510 return; |
1901 | 2511 |
1902 gcc_assert (var->n_var_parts == 1); | 2512 gcc_assert (var->n_var_parts == 1); |
2513 | |
2514 if (var->onepart == ONEPART_VALUE) | |
2515 { | |
2516 rtx x = dv_as_value (dv); | |
2517 | |
2518 /* Relationships in the global cache don't change, so reset the | |
2519 local cache entry only. */ | |
2520 rtx *slot = local_get_addr_cache->get (x); | |
2521 if (slot) | |
2522 { | |
2523 /* If the value resolved back to itself, odds are that other | |
2524 values may have cached it too. These entries now refer | |
2525 to the old X, so detach them too. Entries that used the | |
2526 old X but resolved to something else remain ok as long as | |
2527 that something else isn't also reset. */ | |
2528 if (*slot == x) | |
2529 local_get_addr_cache | |
2530 ->traverse<rtx, local_get_addr_clear_given_value> (x); | |
2531 *slot = NULL; | |
2532 } | |
2533 } | |
1903 | 2534 |
1904 cval = NULL; | 2535 cval = NULL; |
1905 for (node = var->var_part[0].loc_chain; node; node = node->next) | 2536 for (node = var->var_part[0].loc_chain; node; node = node->next) |
1906 if (GET_CODE (node->loc) == VALUE | 2537 if (GET_CODE (node->loc) == VALUE |
1907 && canon_value_cmp (node->loc, cval)) | 2538 && canon_value_cmp (node->loc, cval)) |
1922 | 2553 |
1923 if (cval) | 2554 if (cval) |
1924 { | 2555 { |
1925 decl_or_value cdv = dv_from_value (cval); | 2556 decl_or_value cdv = dv_from_value (cval); |
1926 | 2557 |
1927 /* Keep the remaining values connected, accummulating links | 2558 /* Keep the remaining values connected, accumulating links |
1928 in the canonical value. */ | 2559 in the canonical value. */ |
1929 for (node = var->var_part[0].loc_chain; node; node = node->next) | 2560 for (node = var->var_part[0].loc_chain; node; node = node->next) |
1930 { | 2561 { |
1931 if (node->loc == cval) | 2562 if (node->loc == cval) |
1932 continue; | 2563 continue; |
1946 removed to the point of requiring reinsertion. */ | 2577 removed to the point of requiring reinsertion. */ |
1947 if (cval) | 2578 if (cval) |
1948 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0); | 2579 delete_variable_part (set, dv_as_value (dv), dv_from_value (cval), 0); |
1949 | 2580 |
1950 clobber_variable_part (set, NULL, dv, 0, NULL); | 2581 clobber_variable_part (set, NULL, dv, 0, NULL); |
1951 | |
1952 /* ??? Should we make sure there aren't other available values or | |
1953 variables whose values involve this one other than by | |
1954 equivalence? E.g., at the very least we should reset MEMs, those | |
1955 shouldn't be too hard to find cselib-looking up the value as an | |
1956 address, then locating the resulting value in our own hash | |
1957 table. */ | |
1958 } | 2582 } |
1959 | 2583 |
1960 /* Find the values in a given location and map the val to another | 2584 /* Find the values in a given location and map the val to another |
1961 value, if it is unique, or add the location as one holding the | 2585 value, if it is unique, or add the location as one holding the |
1962 value. */ | 2586 value. */ |
1963 | 2587 |
1964 static void | 2588 static void |
1965 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx insn) | 2589 val_resolve (dataflow_set *set, rtx val, rtx loc, rtx_insn *insn) |
1966 { | 2590 { |
1967 decl_or_value dv = dv_from_value (val); | 2591 decl_or_value dv = dv_from_value (val); |
1968 | 2592 |
1969 if (dump_file && (dump_flags & TDF_DETAILS)) | 2593 if (dump_file && (dump_flags & TDF_DETAILS)) |
1970 { | 2594 { |
1978 fputc ('\n', dump_file); | 2602 fputc ('\n', dump_file); |
1979 } | 2603 } |
1980 | 2604 |
1981 val_reset (set, dv); | 2605 val_reset (set, dv); |
1982 | 2606 |
2607 gcc_checking_assert (!unsuitable_loc (loc)); | |
2608 | |
1983 if (REG_P (loc)) | 2609 if (REG_P (loc)) |
1984 { | 2610 { |
1985 attrs node, found = NULL; | 2611 attrs *node, *found = NULL; |
1986 | 2612 |
1987 for (node = set->regs[REGNO (loc)]; node; node = node->next) | 2613 for (node = set->regs[REGNO (loc)]; node; node = node->next) |
1988 if (dv_is_value_p (node->dv) | 2614 if (dv_is_value_p (node->dv) |
1989 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc)) | 2615 && GET_MODE (dv_as_value (node->dv)) == GET_MODE (loc)) |
1990 { | 2616 { |
2001 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT); | 2627 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT); |
2002 } | 2628 } |
2003 | 2629 |
2004 /* If we didn't find any equivalence, we need to remember that | 2630 /* If we didn't find any equivalence, we need to remember that |
2005 this value is held in the named register. */ | 2631 this value is held in the named register. */ |
2006 if (!found) | 2632 if (found) |
2007 var_reg_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED, | 2633 return; |
2008 dv_from_value (val), 0, NULL_RTX, INSERT); | 2634 } |
2009 } | 2635 /* ??? Attempt to find and merge equivalent MEMs or other |
2010 else if (MEM_P (loc)) | 2636 expressions too. */ |
2011 /* ??? Merge equivalent MEMs. */ | 2637 |
2012 var_mem_decl_set (set, loc, VAR_INIT_STATUS_INITIALIZED, | 2638 val_bind (set, val, loc, false); |
2013 dv_from_value (val), 0, NULL_RTX, INSERT); | |
2014 else | |
2015 /* ??? Merge equivalent expressions. */ | |
2016 set_variable_part (set, loc, dv_from_value (val), 0, | |
2017 VAR_INIT_STATUS_INITIALIZED, NULL_RTX, INSERT); | |
2018 } | 2639 } |
2019 | 2640 |
2020 /* Initialize dataflow set SET to be empty. | 2641 /* Initialize dataflow set SET to be empty. |
2021 VARS_SIZE is the initial size of hash table VARS. */ | 2642 VARS_SIZE is the initial size of hash table VARS. */ |
2022 | 2643 |
2061 /* Information for merging lists of locations for a given offset of variable. | 2682 /* Information for merging lists of locations for a given offset of variable. |
2062 */ | 2683 */ |
2063 struct variable_union_info | 2684 struct variable_union_info |
2064 { | 2685 { |
2065 /* Node of the location chain. */ | 2686 /* Node of the location chain. */ |
2066 location_chain lc; | 2687 location_chain *lc; |
2067 | 2688 |
2068 /* The sum of positions in the input chains. */ | 2689 /* The sum of positions in the input chains. */ |
2069 int pos; | 2690 int pos; |
2070 | 2691 |
2071 /* The position in the chain of DST dataflow set. */ | 2692 /* The position in the chain of DST dataflow set. */ |
2101 When we are updating the location parts the newest location is in the | 2722 When we are updating the location parts the newest location is in the |
2102 beginning of the chain, so when we do the described "sorted" union | 2723 beginning of the chain, so when we do the described "sorted" union |
2103 we keep the newest locations in the beginning. */ | 2724 we keep the newest locations in the beginning. */ |
2104 | 2725 |
2105 static int | 2726 static int |
2106 variable_union (variable src, dataflow_set *set) | 2727 variable_union (variable *src, dataflow_set *set) |
2107 { | 2728 { |
2108 variable dst; | 2729 variable *dst; |
2109 void **dstp; | 2730 variable **dstp; |
2110 int i, j, k; | 2731 int i, j, k; |
2111 | 2732 |
2112 dstp = shared_hash_find_slot (set->vars, src->dv); | 2733 dstp = shared_hash_find_slot (set->vars, src->dv); |
2113 if (!dstp || !*dstp) | 2734 if (!dstp || !*dstp) |
2114 { | 2735 { |
2122 | 2743 |
2123 /* Continue traversing the hash table. */ | 2744 /* Continue traversing the hash table. */ |
2124 return 1; | 2745 return 1; |
2125 } | 2746 } |
2126 else | 2747 else |
2127 dst = (variable) *dstp; | 2748 dst = *dstp; |
2128 | 2749 |
2129 gcc_assert (src->n_var_parts); | 2750 gcc_assert (src->n_var_parts); |
2751 gcc_checking_assert (src->onepart == dst->onepart); | |
2130 | 2752 |
2131 /* We can combine one-part variables very efficiently, because their | 2753 /* We can combine one-part variables very efficiently, because their |
2132 entries are in canonical order. */ | 2754 entries are in canonical order. */ |
2133 if (dv_onepart_p (src->dv)) | 2755 if (src->onepart) |
2134 { | 2756 { |
2135 location_chain *nodep, dnode, snode; | 2757 location_chain **nodep, *dnode, *snode; |
2136 | 2758 |
2137 gcc_assert (src->n_var_parts == 1 | 2759 gcc_assert (src->n_var_parts == 1 |
2138 && dst->n_var_parts == 1); | 2760 && dst->n_var_parts == 1); |
2139 | 2761 |
2140 snode = src->var_part[0].loc_chain; | 2762 snode = src->var_part[0].loc_chain; |
2149 { | 2771 { |
2150 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1; | 2772 int r = dnode ? loc_cmp (dnode->loc, snode->loc) : 1; |
2151 | 2773 |
2152 if (r > 0) | 2774 if (r > 0) |
2153 { | 2775 { |
2154 location_chain nnode; | 2776 location_chain *nnode; |
2155 | 2777 |
2156 if (shared_var_p (dst, set->vars)) | 2778 if (shared_var_p (dst, set->vars)) |
2157 { | 2779 { |
2158 dstp = unshare_variable (set, dstp, dst, | 2780 dstp = unshare_variable (set, dstp, dst, |
2159 VAR_INIT_STATUS_INITIALIZED); | 2781 VAR_INIT_STATUS_INITIALIZED); |
2160 dst = (variable)*dstp; | 2782 dst = *dstp; |
2161 goto restart_onepart_unshared; | 2783 goto restart_onepart_unshared; |
2162 } | 2784 } |
2163 | 2785 |
2164 *nodep = nnode = (location_chain) pool_alloc (loc_chain_pool); | 2786 *nodep = nnode = new location_chain; |
2165 nnode->loc = snode->loc; | 2787 nnode->loc = snode->loc; |
2166 nnode->init = snode->init; | 2788 nnode->init = snode->init; |
2167 if (!snode->set_src || MEM_P (snode->set_src)) | 2789 if (!snode->set_src || MEM_P (snode->set_src)) |
2168 nnode->set_src = NULL; | 2790 nnode->set_src = NULL; |
2169 else | 2791 else |
2182 } | 2804 } |
2183 | 2805 |
2184 return 1; | 2806 return 1; |
2185 } | 2807 } |
2186 | 2808 |
2809 gcc_checking_assert (!src->onepart); | |
2810 | |
2187 /* Count the number of location parts, result is K. */ | 2811 /* Count the number of location parts, result is K. */ |
2188 for (i = 0, j = 0, k = 0; | 2812 for (i = 0, j = 0, k = 0; |
2189 i < src->n_var_parts && j < dst->n_var_parts; k++) | 2813 i < src->n_var_parts && j < dst->n_var_parts; k++) |
2190 { | 2814 { |
2191 if (src->var_part[i].offset == dst->var_part[j].offset) | 2815 if (VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j)) |
2192 { | 2816 { |
2193 i++; | 2817 i++; |
2194 j++; | 2818 j++; |
2195 } | 2819 } |
2196 else if (src->var_part[i].offset < dst->var_part[j].offset) | 2820 else if (VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j)) |
2197 i++; | 2821 i++; |
2198 else | 2822 else |
2199 j++; | 2823 j++; |
2200 } | 2824 } |
2201 k += src->n_var_parts - i; | 2825 k += src->n_var_parts - i; |
2202 k += dst->n_var_parts - j; | 2826 k += dst->n_var_parts - j; |
2203 | 2827 |
2204 /* We track only variables whose size is <= MAX_VAR_PARTS bytes | 2828 /* We track only variables whose size is <= MAX_VAR_PARTS bytes |
2205 thus there are at most MAX_VAR_PARTS different offsets. */ | 2829 thus there are at most MAX_VAR_PARTS different offsets. */ |
2206 gcc_assert (dv_onepart_p (dst->dv) ? k == 1 : k <= MAX_VAR_PARTS); | 2830 gcc_checking_assert (dst->onepart ? k == 1 : k <= MAX_VAR_PARTS); |
2207 | 2831 |
2208 if (dst->n_var_parts != k && shared_var_p (dst, set->vars)) | 2832 if (dst->n_var_parts != k && shared_var_p (dst, set->vars)) |
2209 { | 2833 { |
2210 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN); | 2834 dstp = unshare_variable (set, dstp, dst, VAR_INIT_STATUS_UNKNOWN); |
2211 dst = (variable)*dstp; | 2835 dst = *dstp; |
2212 } | 2836 } |
2213 | 2837 |
2214 i = src->n_var_parts - 1; | 2838 i = src->n_var_parts - 1; |
2215 j = dst->n_var_parts - 1; | 2839 j = dst->n_var_parts - 1; |
2216 dst->n_var_parts = k; | 2840 dst->n_var_parts = k; |
2217 | 2841 |
2218 for (k--; k >= 0; k--) | 2842 for (k--; k >= 0; k--) |
2219 { | 2843 { |
2220 location_chain node, node2; | 2844 location_chain *node, *node2; |
2221 | 2845 |
2222 if (i >= 0 && j >= 0 | 2846 if (i >= 0 && j >= 0 |
2223 && src->var_part[i].offset == dst->var_part[j].offset) | 2847 && VAR_PART_OFFSET (src, i) == VAR_PART_OFFSET (dst, j)) |
2224 { | 2848 { |
2225 /* Compute the "sorted" union of the chains, i.e. the locations which | 2849 /* Compute the "sorted" union of the chains, i.e. the locations which |
2226 are in both chains go first, they are sorted by the sum of | 2850 are in both chains go first, they are sorted by the sum of |
2227 positions in the chains. */ | 2851 positions in the chains. */ |
2228 int dst_l, src_l; | 2852 int dst_l, src_l; |
2250 } | 2874 } |
2251 if (node || node2) | 2875 if (node || node2) |
2252 { | 2876 { |
2253 dstp = unshare_variable (set, dstp, dst, | 2877 dstp = unshare_variable (set, dstp, dst, |
2254 VAR_INIT_STATUS_UNKNOWN); | 2878 VAR_INIT_STATUS_UNKNOWN); |
2255 dst = (variable)*dstp; | 2879 dst = (variable *)*dstp; |
2256 } | 2880 } |
2257 } | 2881 } |
2258 | 2882 |
2259 src_l = 0; | 2883 src_l = 0; |
2260 for (node = src->var_part[i].loc_chain; node; node = node->next) | 2884 for (node = src->var_part[i].loc_chain; node; node = node->next) |
2264 dst_l++; | 2888 dst_l++; |
2265 | 2889 |
2266 if (dst_l == 1) | 2890 if (dst_l == 1) |
2267 { | 2891 { |
2268 /* The most common case, much simpler, no qsort is needed. */ | 2892 /* The most common case, much simpler, no qsort is needed. */ |
2269 location_chain dstnode = dst->var_part[j].loc_chain; | 2893 location_chain *dstnode = dst->var_part[j].loc_chain; |
2270 dst->var_part[k].loc_chain = dstnode; | 2894 dst->var_part[k].loc_chain = dstnode; |
2271 dst->var_part[k].offset = dst->var_part[j].offset; | 2895 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j); |
2272 node2 = dstnode; | 2896 node2 = dstnode; |
2273 for (node = src->var_part[i].loc_chain; node; node = node->next) | 2897 for (node = src->var_part[i].loc_chain; node; node = node->next) |
2274 if (!((REG_P (dstnode->loc) | 2898 if (!((REG_P (dstnode->loc) |
2275 && REG_P (node->loc) | 2899 && REG_P (node->loc) |
2276 && REGNO (dstnode->loc) == REGNO (node->loc)) | 2900 && REGNO (dstnode->loc) == REGNO (node->loc)) |
2277 || rtx_equal_p (dstnode->loc, node->loc))) | 2901 || rtx_equal_p (dstnode->loc, node->loc))) |
2278 { | 2902 { |
2279 location_chain new_node; | 2903 location_chain *new_node; |
2280 | 2904 |
2281 /* Copy the location from SRC. */ | 2905 /* Copy the location from SRC. */ |
2282 new_node = (location_chain) pool_alloc (loc_chain_pool); | 2906 new_node = new location_chain; |
2283 new_node->loc = node->loc; | 2907 new_node->loc = node->loc; |
2284 new_node->init = node->init; | 2908 new_node->init = node->init; |
2285 if (!node->set_src || MEM_P (node->set_src)) | 2909 if (!node->set_src || MEM_P (node->set_src)) |
2286 new_node->set_src = NULL; | 2910 new_node->set_src = NULL; |
2287 else | 2911 else |
2329 break; | 2953 break; |
2330 } | 2954 } |
2331 } | 2955 } |
2332 if (jj >= dst_l) /* The location has not been found. */ | 2956 if (jj >= dst_l) /* The location has not been found. */ |
2333 { | 2957 { |
2334 location_chain new_node; | 2958 location_chain *new_node; |
2335 | 2959 |
2336 /* Copy the location from SRC. */ | 2960 /* Copy the location from SRC. */ |
2337 new_node = (location_chain) pool_alloc (loc_chain_pool); | 2961 new_node = new location_chain; |
2338 new_node->loc = node->loc; | 2962 new_node->loc = node->loc; |
2339 new_node->init = node->init; | 2963 new_node->init = node->init; |
2340 if (!node->set_src || MEM_P (node->set_src)) | 2964 if (!node->set_src || MEM_P (node->set_src)) |
2341 new_node->set_src = NULL; | 2965 new_node->set_src = NULL; |
2342 else | 2966 else |
2404 vui[ii - 1].lc->next = vui[ii].lc; | 3028 vui[ii - 1].lc->next = vui[ii].lc; |
2405 vui[n - 1].lc->next = NULL; | 3029 vui[n - 1].lc->next = NULL; |
2406 dst->var_part[k].loc_chain = vui[0].lc; | 3030 dst->var_part[k].loc_chain = vui[0].lc; |
2407 } | 3031 } |
2408 | 3032 |
2409 dst->var_part[k].offset = dst->var_part[j].offset; | 3033 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (dst, j); |
2410 } | 3034 } |
2411 i--; | 3035 i--; |
2412 j--; | 3036 j--; |
2413 } | 3037 } |
2414 else if ((i >= 0 && j >= 0 | 3038 else if ((i >= 0 && j >= 0 |
2415 && src->var_part[i].offset < dst->var_part[j].offset) | 3039 && VAR_PART_OFFSET (src, i) < VAR_PART_OFFSET (dst, j)) |
2416 || i < 0) | 3040 || i < 0) |
2417 { | 3041 { |
2418 dst->var_part[k] = dst->var_part[j]; | 3042 dst->var_part[k] = dst->var_part[j]; |
2419 j--; | 3043 j--; |
2420 } | 3044 } |
2421 else if ((i >= 0 && j >= 0 | 3045 else if ((i >= 0 && j >= 0 |
2422 && src->var_part[i].offset > dst->var_part[j].offset) | 3046 && VAR_PART_OFFSET (src, i) > VAR_PART_OFFSET (dst, j)) |
2423 || j < 0) | 3047 || j < 0) |
2424 { | 3048 { |
2425 location_chain *nextp; | 3049 location_chain **nextp; |
2426 | 3050 |
2427 /* Copy the chain from SRC. */ | 3051 /* Copy the chain from SRC. */ |
2428 nextp = &dst->var_part[k].loc_chain; | 3052 nextp = &dst->var_part[k].loc_chain; |
2429 for (node = src->var_part[i].loc_chain; node; node = node->next) | 3053 for (node = src->var_part[i].loc_chain; node; node = node->next) |
2430 { | 3054 { |
2431 location_chain new_lc; | 3055 location_chain *new_lc; |
2432 | 3056 |
2433 new_lc = (location_chain) pool_alloc (loc_chain_pool); | 3057 new_lc = new location_chain; |
2434 new_lc->next = NULL; | 3058 new_lc->next = NULL; |
2435 new_lc->init = node->init; | 3059 new_lc->init = node->init; |
2436 if (!node->set_src || MEM_P (node->set_src)) | 3060 if (!node->set_src || MEM_P (node->set_src)) |
2437 new_lc->set_src = NULL; | 3061 new_lc->set_src = NULL; |
2438 else | 3062 else |
2441 | 3065 |
2442 *nextp = new_lc; | 3066 *nextp = new_lc; |
2443 nextp = &new_lc->next; | 3067 nextp = &new_lc->next; |
2444 } | 3068 } |
2445 | 3069 |
2446 dst->var_part[k].offset = src->var_part[i].offset; | 3070 VAR_PART_OFFSET (dst, k) = VAR_PART_OFFSET (src, i); |
2447 i--; | 3071 i--; |
2448 } | 3072 } |
2449 dst->var_part[k].cur_loc = NULL; | 3073 dst->var_part[k].cur_loc = NULL; |
2450 } | 3074 } |
2451 | 3075 |
2452 if (flag_var_tracking_uninit) | 3076 if (flag_var_tracking_uninit) |
2453 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++) | 3077 for (i = 0; i < src->n_var_parts && i < dst->n_var_parts; i++) |
2454 { | 3078 { |
2455 location_chain node, node2; | 3079 location_chain *node, *node2; |
2456 for (node = src->var_part[i].loc_chain; node; node = node->next) | 3080 for (node = src->var_part[i].loc_chain; node; node = node->next) |
2457 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next) | 3081 for (node2 = dst->var_part[i].loc_chain; node2; node2 = node2->next) |
2458 if (rtx_equal_p (node->loc, node2->loc)) | 3082 if (rtx_equal_p (node->loc, node2->loc)) |
2459 { | 3083 { |
2460 if (node->init > node2->init) | 3084 if (node->init > node2->init) |
2481 shared_hash_destroy (dst->vars); | 3105 shared_hash_destroy (dst->vars); |
2482 dst->vars = shared_hash_copy (src->vars); | 3106 dst->vars = shared_hash_copy (src->vars); |
2483 } | 3107 } |
2484 else | 3108 else |
2485 { | 3109 { |
2486 htab_iterator hi; | 3110 variable_iterator_type hi; |
2487 variable var; | 3111 variable *var; |
2488 | 3112 |
2489 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (src->vars), var, variable, hi) | 3113 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (src->vars), |
3114 var, variable, hi) | |
2490 variable_union (var, dst); | 3115 variable_union (var, dst); |
2491 } | 3116 } |
2492 } | 3117 } |
2493 | 3118 |
2494 /* Whether the value is currently being expanded. */ | 3119 /* Whether the value is currently being expanded. */ |
2495 #define VALUE_RECURSED_INTO(x) \ | 3120 #define VALUE_RECURSED_INTO(x) \ |
2496 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used) | 3121 (RTL_FLAG_CHECK2 ("VALUE_RECURSED_INTO", (x), VALUE, DEBUG_EXPR)->used) |
2497 /* Whether the value is in changed_variables hash table. */ | 3122 |
3123 /* Whether no expansion was found, saving useless lookups. | |
3124 It must only be set when VALUE_CHANGED is clear. */ | |
3125 #define NO_LOC_P(x) \ | |
3126 (RTL_FLAG_CHECK2 ("NO_LOC_P", (x), VALUE, DEBUG_EXPR)->return_val) | |
3127 | |
3128 /* Whether cur_loc in the value needs to be (re)computed. */ | |
2498 #define VALUE_CHANGED(x) \ | 3129 #define VALUE_CHANGED(x) \ |
2499 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related) | 3130 (RTL_FLAG_CHECK1 ("VALUE_CHANGED", (x), VALUE)->frame_related) |
2500 /* Whether the decl is in changed_variables hash table. */ | 3131 /* Whether cur_loc in the decl needs to be (re)computed. */ |
2501 #define DECL_CHANGED(x) TREE_VISITED (x) | 3132 #define DECL_CHANGED(x) TREE_VISITED (x) |
2502 | 3133 |
2503 /* Record that DV has been added into resp. removed from changed_variables | 3134 /* Record (if NEWV) that DV needs to have its cur_loc recomputed. For |
2504 hashtable. */ | 3135 user DECLs, this means they're in changed_variables. Values and |
3136 debug exprs may be left with this flag set if no user variable | |
3137 requires them to be evaluated. */ | |
2505 | 3138 |
2506 static inline void | 3139 static inline void |
2507 set_dv_changed (decl_or_value dv, bool newv) | 3140 set_dv_changed (decl_or_value dv, bool newv) |
2508 { | 3141 { |
2509 if (dv_is_value_p (dv)) | 3142 switch (dv_onepart_p (dv)) |
2510 VALUE_CHANGED (dv_as_value (dv)) = newv; | 3143 { |
2511 else | 3144 case ONEPART_VALUE: |
2512 DECL_CHANGED (dv_as_decl (dv)) = newv; | 3145 if (newv) |
2513 } | 3146 NO_LOC_P (dv_as_value (dv)) = false; |
2514 | 3147 VALUE_CHANGED (dv_as_value (dv)) = newv; |
2515 /* Return true if DV is present in changed_variables hash table. */ | 3148 break; |
3149 | |
3150 case ONEPART_DEXPR: | |
3151 if (newv) | |
3152 NO_LOC_P (DECL_RTL_KNOWN_SET (dv_as_decl (dv))) = false; | |
3153 /* Fall through. */ | |
3154 | |
3155 default: | |
3156 DECL_CHANGED (dv_as_decl (dv)) = newv; | |
3157 break; | |
3158 } | |
3159 } | |
3160 | |
3161 /* Return true if DV needs to have its cur_loc recomputed. */ | |
2516 | 3162 |
2517 static inline bool | 3163 static inline bool |
2518 dv_changed_p (decl_or_value dv) | 3164 dv_changed_p (decl_or_value dv) |
2519 { | 3165 { |
2520 return (dv_is_value_p (dv) | 3166 return (dv_is_value_p (dv) |
2525 /* Return a location list node whose loc is rtx_equal to LOC, in the | 3171 /* Return a location list node whose loc is rtx_equal to LOC, in the |
2526 location list of a one-part variable or value VAR, or in that of | 3172 location list of a one-part variable or value VAR, or in that of |
2527 any values recursively mentioned in the location lists. VARS must | 3173 any values recursively mentioned in the location lists. VARS must |
2528 be in star-canonical form. */ | 3174 be in star-canonical form. */ |
2529 | 3175 |
2530 static location_chain | 3176 static location_chain * |
2531 find_loc_in_1pdv (rtx loc, variable var, htab_t vars) | 3177 find_loc_in_1pdv (rtx loc, variable *var, variable_table_type *vars) |
2532 { | 3178 { |
2533 location_chain node; | 3179 location_chain *node; |
2534 enum rtx_code loc_code; | 3180 enum rtx_code loc_code; |
2535 | 3181 |
2536 if (!var) | 3182 if (!var) |
2537 return NULL; | 3183 return NULL; |
2538 | 3184 |
2539 gcc_checking_assert (dv_onepart_p (var->dv)); | 3185 gcc_checking_assert (var->onepart); |
2540 | 3186 |
2541 if (!var->n_var_parts) | 3187 if (!var->n_var_parts) |
2542 return NULL; | 3188 return NULL; |
2543 | 3189 |
2544 gcc_checking_assert (var->var_part[0].offset == 0); | |
2545 gcc_checking_assert (loc != dv_as_opaque (var->dv)); | 3190 gcc_checking_assert (loc != dv_as_opaque (var->dv)); |
2546 | 3191 |
2547 loc_code = GET_CODE (loc); | 3192 loc_code = GET_CODE (loc); |
2548 for (node = var->var_part[0].loc_chain; node; node = node->next) | 3193 for (node = var->var_part[0].loc_chain; node; node = node->next) |
2549 { | 3194 { |
2550 decl_or_value dv; | 3195 decl_or_value dv; |
2551 variable rvar; | 3196 variable *rvar; |
2552 | 3197 |
2553 if (GET_CODE (node->loc) != loc_code) | 3198 if (GET_CODE (node->loc) != loc_code) |
2554 { | 3199 { |
2555 if (GET_CODE (node->loc) != VALUE) | 3200 if (GET_CODE (node->loc) != VALUE) |
2556 continue; | 3201 continue; |
2584 | 3229 |
2585 gcc_checking_assert (node == var->var_part[0].loc_chain); | 3230 gcc_checking_assert (node == var->var_part[0].loc_chain); |
2586 gcc_checking_assert (!node->next); | 3231 gcc_checking_assert (!node->next); |
2587 | 3232 |
2588 dv = dv_from_value (node->loc); | 3233 dv = dv_from_value (node->loc); |
2589 rvar = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv)); | 3234 rvar = vars->find_with_hash (dv, dv_htab_hash (dv)); |
2590 return find_loc_in_1pdv (loc, rvar, vars); | 3235 return find_loc_in_1pdv (loc, rvar, vars); |
2591 } | 3236 } |
3237 | |
3238 /* ??? Gotta look in cselib_val locations too. */ | |
2592 | 3239 |
2593 return NULL; | 3240 return NULL; |
2594 } | 3241 } |
2595 | 3242 |
2596 /* Hash table iteration argument passed to variable_merge. */ | 3243 /* Hash table iteration argument passed to variable_merge. */ |
2608 | 3255 |
2609 /* Insert LOC in *DNODE, if it's not there yet. The list must be in | 3256 /* Insert LOC in *DNODE, if it's not there yet. The list must be in |
2610 loc_cmp order, and it is maintained as such. */ | 3257 loc_cmp order, and it is maintained as such. */ |
2611 | 3258 |
2612 static void | 3259 static void |
2613 insert_into_intersection (location_chain *nodep, rtx loc, | 3260 insert_into_intersection (location_chain **nodep, rtx loc, |
2614 enum var_init_status status) | 3261 enum var_init_status status) |
2615 { | 3262 { |
2616 location_chain node; | 3263 location_chain *node; |
2617 int r; | 3264 int r; |
2618 | 3265 |
2619 for (node = *nodep; node; nodep = &node->next, node = *nodep) | 3266 for (node = *nodep; node; nodep = &node->next, node = *nodep) |
2620 if ((r = loc_cmp (node->loc, loc)) == 0) | 3267 if ((r = loc_cmp (node->loc, loc)) == 0) |
2621 { | 3268 { |
2623 return; | 3270 return; |
2624 } | 3271 } |
2625 else if (r > 0) | 3272 else if (r > 0) |
2626 break; | 3273 break; |
2627 | 3274 |
2628 node = (location_chain) pool_alloc (loc_chain_pool); | 3275 node = new location_chain; |
2629 | 3276 |
2630 node->loc = loc; | 3277 node->loc = loc; |
2631 node->set_src = NULL; | 3278 node->set_src = NULL; |
2632 node->init = status; | 3279 node->init = status; |
2633 node->next = *nodep; | 3280 node->next = *nodep; |
2634 *nodep = node; | 3281 *nodep = node; |
2635 } | 3282 } |
2636 | 3283 |
2637 /* Insert in DEST the intersection the locations present in both | 3284 /* Insert in DEST the intersection of the locations present in both |
2638 S1NODE and S2VAR, directly or indirectly. S1NODE is from a | 3285 S1NODE and S2VAR, directly or indirectly. S1NODE is from a |
2639 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in | 3286 variable in DSM->cur, whereas S2VAR is from DSM->src. dvar is in |
2640 DSM->dst. */ | 3287 DSM->dst. */ |
2641 | 3288 |
2642 static void | 3289 static void |
2643 intersect_loc_chains (rtx val, location_chain *dest, struct dfset_merge *dsm, | 3290 intersect_loc_chains (rtx val, location_chain **dest, struct dfset_merge *dsm, |
2644 location_chain s1node, variable s2var) | 3291 location_chain *s1node, variable *s2var) |
2645 { | 3292 { |
2646 dataflow_set *s1set = dsm->cur; | 3293 dataflow_set *s1set = dsm->cur; |
2647 dataflow_set *s2set = dsm->src; | 3294 dataflow_set *s2set = dsm->src; |
2648 location_chain found; | 3295 location_chain *found; |
2649 | 3296 |
2650 if (s2var) | 3297 if (s2var) |
2651 { | 3298 { |
2652 location_chain s2node; | 3299 location_chain *s2node; |
2653 | 3300 |
2654 gcc_checking_assert (dv_onepart_p (s2var->dv)); | 3301 gcc_checking_assert (s2var->onepart); |
2655 | 3302 |
2656 if (s2var->n_var_parts) | 3303 if (s2var->n_var_parts) |
2657 { | 3304 { |
2658 gcc_checking_assert (s2var->var_part[0].offset == 0); | |
2659 s2node = s2var->var_part[0].loc_chain; | 3305 s2node = s2var->var_part[0].loc_chain; |
2660 | 3306 |
2661 for (; s1node && s2node; | 3307 for (; s1node && s2node; |
2662 s1node = s1node->next, s2node = s2node->next) | 3308 s1node = s1node->next, s2node = s2node->next) |
2663 if (s1node->loc != s2node->loc) | 3309 if (s1node->loc != s2node->loc) |
2685 | 3331 |
2686 if (GET_CODE (s1node->loc) == VALUE | 3332 if (GET_CODE (s1node->loc) == VALUE |
2687 && !VALUE_RECURSED_INTO (s1node->loc)) | 3333 && !VALUE_RECURSED_INTO (s1node->loc)) |
2688 { | 3334 { |
2689 decl_or_value dv = dv_from_value (s1node->loc); | 3335 decl_or_value dv = dv_from_value (s1node->loc); |
2690 variable svar = shared_hash_find (s1set->vars, dv); | 3336 variable *svar = shared_hash_find (s1set->vars, dv); |
2691 if (svar) | 3337 if (svar) |
2692 { | 3338 { |
2693 if (svar->n_var_parts == 1) | 3339 if (svar->n_var_parts == 1) |
2694 { | 3340 { |
2695 VALUE_RECURSED_INTO (s1node->loc) = true; | 3341 VALUE_RECURSED_INTO (s1node->loc) = true; |
2699 VALUE_RECURSED_INTO (s1node->loc) = false; | 3345 VALUE_RECURSED_INTO (s1node->loc) = false; |
2700 } | 3346 } |
2701 } | 3347 } |
2702 } | 3348 } |
2703 | 3349 |
3350 /* ??? gotta look in cselib_val locations too. */ | |
3351 | |
2704 /* ??? if the location is equivalent to any location in src, | 3352 /* ??? if the location is equivalent to any location in src, |
2705 searched recursively | 3353 searched recursively |
2706 | 3354 |
2707 add to dst the values needed to represent the equivalence | 3355 add to dst the values needed to represent the equivalence |
2708 | 3356 |
2787 return 1; | 3435 return 1; |
2788 } | 3436 } |
2789 | 3437 |
2790 if (GET_CODE (y) == VALUE) | 3438 if (GET_CODE (y) == VALUE) |
2791 return 1; | 3439 return 1; |
3440 | |
3441 /* Entry value is the least preferable kind of expression. */ | |
3442 if (GET_CODE (x) == ENTRY_VALUE) | |
3443 { | |
3444 if (GET_CODE (y) != ENTRY_VALUE) | |
3445 return 1; | |
3446 gcc_assert (GET_MODE (x) == GET_MODE (y)); | |
3447 return loc_cmp (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y)); | |
3448 } | |
3449 | |
3450 if (GET_CODE (y) == ENTRY_VALUE) | |
3451 return -1; | |
2792 | 3452 |
2793 if (GET_CODE (x) == GET_CODE (y)) | 3453 if (GET_CODE (x) == GET_CODE (y)) |
2794 /* Compare operands below. */; | 3454 /* Compare operands below. */; |
2795 else if (GET_CODE (x) < GET_CODE (y)) | 3455 else if (GET_CODE (x) < GET_CODE (y)) |
2796 return -1; | 3456 return -1; |
2878 contain anything but integers and other rtx's, | 3538 contain anything but integers and other rtx's, |
2879 except for within LABEL_REFs and SYMBOL_REFs. */ | 3539 except for within LABEL_REFs and SYMBOL_REFs. */ |
2880 default: | 3540 default: |
2881 gcc_unreachable (); | 3541 gcc_unreachable (); |
2882 } | 3542 } |
3543 if (CONST_WIDE_INT_P (x)) | |
3544 { | |
3545 /* Compare the vector length first. */ | |
3546 if (CONST_WIDE_INT_NUNITS (x) >= CONST_WIDE_INT_NUNITS (y)) | |
3547 return 1; | |
3548 else if (CONST_WIDE_INT_NUNITS (x) < CONST_WIDE_INT_NUNITS (y)) | |
3549 return -1; | |
3550 | |
3551 /* Compare the vectors elements. */; | |
3552 for (j = CONST_WIDE_INT_NUNITS (x) - 1; j >= 0 ; j--) | |
3553 { | |
3554 if (CONST_WIDE_INT_ELT (x, j) < CONST_WIDE_INT_ELT (y, j)) | |
3555 return -1; | |
3556 if (CONST_WIDE_INT_ELT (x, j) > CONST_WIDE_INT_ELT (y, j)) | |
3557 return 1; | |
3558 } | |
3559 } | |
2883 | 3560 |
2884 return 0; | 3561 return 0; |
2885 } | 3562 } |
2886 | 3563 |
2887 /* If decl or value DVP refers to VALUE from *LOC, add backlinks | |
2888 from VALUE to DVP. */ | |
2889 | |
2890 static int | |
2891 add_value_chain (rtx *loc, void *dvp) | |
2892 { | |
2893 decl_or_value dv, ldv; | |
2894 value_chain vc, nvc; | |
2895 void **slot; | |
2896 | |
2897 if (GET_CODE (*loc) == VALUE) | |
2898 ldv = dv_from_value (*loc); | |
2899 else if (GET_CODE (*loc) == DEBUG_EXPR) | |
2900 ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc)); | |
2901 else | |
2902 return 0; | |
2903 | |
2904 if (dv_as_opaque (ldv) == dvp) | |
2905 return 0; | |
2906 | |
2907 dv = (decl_or_value) dvp; | |
2908 slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv), | |
2909 INSERT); | |
2910 if (!*slot) | |
2911 { | |
2912 vc = (value_chain) pool_alloc (value_chain_pool); | |
2913 vc->dv = ldv; | |
2914 vc->next = NULL; | |
2915 vc->refcount = 0; | |
2916 *slot = (void *) vc; | |
2917 } | |
2918 else | |
2919 { | |
2920 for (vc = ((value_chain) *slot)->next; vc; vc = vc->next) | |
2921 if (dv_as_opaque (vc->dv) == dv_as_opaque (dv)) | |
2922 break; | |
2923 if (vc) | |
2924 { | |
2925 vc->refcount++; | |
2926 return 0; | |
2927 } | |
2928 } | |
2929 vc = (value_chain) *slot; | |
2930 nvc = (value_chain) pool_alloc (value_chain_pool); | |
2931 nvc->dv = dv; | |
2932 nvc->next = vc->next; | |
2933 nvc->refcount = 1; | |
2934 vc->next = nvc; | |
2935 return 0; | |
2936 } | |
2937 | |
2938 /* If decl or value DVP refers to VALUEs from within LOC, add backlinks | |
2939 from those VALUEs to DVP. */ | |
2940 | |
2941 static void | |
2942 add_value_chains (decl_or_value dv, rtx loc) | |
2943 { | |
2944 if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR) | |
2945 { | |
2946 add_value_chain (&loc, dv_as_opaque (dv)); | |
2947 return; | |
2948 } | |
2949 if (REG_P (loc)) | |
2950 return; | |
2951 if (MEM_P (loc)) | |
2952 loc = XEXP (loc, 0); | |
2953 for_each_rtx (&loc, add_value_chain, dv_as_opaque (dv)); | |
2954 } | |
2955 | |
2956 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, add backlinks from those | |
2957 VALUEs to DV. Add the same time get rid of ASM_OPERANDS from locs list, | |
2958 that is something we never can express in .debug_info and can prevent | |
2959 reverse ops from being used. */ | |
2960 | |
2961 static void | |
2962 add_cselib_value_chains (decl_or_value dv) | |
2963 { | |
2964 struct elt_loc_list **l; | |
2965 | |
2966 for (l = &CSELIB_VAL_PTR (dv_as_value (dv))->locs; *l;) | |
2967 if (GET_CODE ((*l)->loc) == ASM_OPERANDS) | |
2968 *l = (*l)->next; | |
2969 else | |
2970 { | |
2971 for_each_rtx (&(*l)->loc, add_value_chain, dv_as_opaque (dv)); | |
2972 l = &(*l)->next; | |
2973 } | |
2974 } | |
2975 | |
2976 /* If decl or value DVP refers to VALUE from *LOC, remove backlinks | |
2977 from VALUE to DVP. */ | |
2978 | |
2979 static int | |
2980 remove_value_chain (rtx *loc, void *dvp) | |
2981 { | |
2982 decl_or_value dv, ldv; | |
2983 value_chain vc; | |
2984 void **slot; | |
2985 | |
2986 if (GET_CODE (*loc) == VALUE) | |
2987 ldv = dv_from_value (*loc); | |
2988 else if (GET_CODE (*loc) == DEBUG_EXPR) | |
2989 ldv = dv_from_decl (DEBUG_EXPR_TREE_DECL (*loc)); | |
2990 else | |
2991 return 0; | |
2992 | |
2993 if (dv_as_opaque (ldv) == dvp) | |
2994 return 0; | |
2995 | |
2996 dv = (decl_or_value) dvp; | |
2997 slot = htab_find_slot_with_hash (value_chains, ldv, dv_htab_hash (ldv), | |
2998 NO_INSERT); | |
2999 for (vc = (value_chain) *slot; vc->next; vc = vc->next) | |
3000 if (dv_as_opaque (vc->next->dv) == dv_as_opaque (dv)) | |
3001 { | |
3002 value_chain dvc = vc->next; | |
3003 gcc_assert (dvc->refcount > 0); | |
3004 if (--dvc->refcount == 0) | |
3005 { | |
3006 vc->next = dvc->next; | |
3007 pool_free (value_chain_pool, dvc); | |
3008 if (vc->next == NULL && vc == (value_chain) *slot) | |
3009 { | |
3010 pool_free (value_chain_pool, vc); | |
3011 htab_clear_slot (value_chains, slot); | |
3012 } | |
3013 } | |
3014 return 0; | |
3015 } | |
3016 gcc_unreachable (); | |
3017 } | |
3018 | |
3019 /* If decl or value DVP refers to VALUEs from within LOC, remove backlinks | |
3020 from those VALUEs to DVP. */ | |
3021 | |
3022 static void | |
3023 remove_value_chains (decl_or_value dv, rtx loc) | |
3024 { | |
3025 if (GET_CODE (loc) == VALUE || GET_CODE (loc) == DEBUG_EXPR) | |
3026 { | |
3027 remove_value_chain (&loc, dv_as_opaque (dv)); | |
3028 return; | |
3029 } | |
3030 if (REG_P (loc)) | |
3031 return; | |
3032 if (MEM_P (loc)) | |
3033 loc = XEXP (loc, 0); | |
3034 for_each_rtx (&loc, remove_value_chain, dv_as_opaque (dv)); | |
3035 } | |
3036 | |
3037 #if ENABLE_CHECKING | |
3038 /* If CSELIB_VAL_PTR of value DV refer to VALUEs, remove backlinks from those | |
3039 VALUEs to DV. */ | |
3040 | |
3041 static void | |
3042 remove_cselib_value_chains (decl_or_value dv) | |
3043 { | |
3044 struct elt_loc_list *l; | |
3045 | |
3046 for (l = CSELIB_VAL_PTR (dv_as_value (dv))->locs; l; l = l->next) | |
3047 for_each_rtx (&l->loc, remove_value_chain, dv_as_opaque (dv)); | |
3048 } | |
3049 | |
3050 /* Check the order of entries in one-part variables. */ | 3564 /* Check the order of entries in one-part variables. */ |
3051 | 3565 |
3052 static int | 3566 int |
3053 canonicalize_loc_order_check (void **slot, void *data ATTRIBUTE_UNUSED) | 3567 canonicalize_loc_order_check (variable **slot, |
3054 { | 3568 dataflow_set *data ATTRIBUTE_UNUSED) |
3055 variable var = (variable) *slot; | 3569 { |
3056 decl_or_value dv = var->dv; | 3570 variable *var = *slot; |
3057 location_chain node, next; | 3571 location_chain *node, *next; |
3058 | 3572 |
3059 #ifdef ENABLE_RTL_CHECKING | 3573 #ifdef ENABLE_RTL_CHECKING |
3060 int i; | 3574 int i; |
3061 for (i = 0; i < var->n_var_parts; i++) | 3575 for (i = 0; i < var->n_var_parts; i++) |
3062 gcc_assert (var->var_part[0].cur_loc == NULL); | 3576 gcc_assert (var->var_part[0].cur_loc == NULL); |
3063 gcc_assert (!var->cur_loc_changed && !var->in_changed_variables); | 3577 gcc_assert (!var->in_changed_variables); |
3064 #endif | 3578 #endif |
3065 | 3579 |
3066 if (!dv_onepart_p (dv)) | 3580 if (!var->onepart) |
3067 return 1; | 3581 return 1; |
3068 | 3582 |
3069 gcc_assert (var->n_var_parts == 1); | 3583 gcc_assert (var->n_var_parts == 1); |
3070 node = var->var_part[0].loc_chain; | 3584 node = var->var_part[0].loc_chain; |
3071 gcc_assert (node); | 3585 gcc_assert (node); |
3076 node = next; | 3590 node = next; |
3077 } | 3591 } |
3078 | 3592 |
3079 return 1; | 3593 return 1; |
3080 } | 3594 } |
3081 #endif | |
3082 | 3595 |
3083 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are | 3596 /* Mark with VALUE_RECURSED_INTO values that have neighbors that are |
3084 more likely to be chosen as canonical for an equivalence set. | 3597 more likely to be chosen as canonical for an equivalence set. |
3085 Ensure less likely values can reach more likely neighbors, making | 3598 Ensure less likely values can reach more likely neighbors, making |
3086 the connections bidirectional. */ | 3599 the connections bidirectional. */ |
3087 | 3600 |
3088 static int | 3601 int |
3089 canonicalize_values_mark (void **slot, void *data) | 3602 canonicalize_values_mark (variable **slot, dataflow_set *set) |
3090 { | 3603 { |
3091 dataflow_set *set = (dataflow_set *)data; | 3604 variable *var = *slot; |
3092 variable var = (variable) *slot; | |
3093 decl_or_value dv = var->dv; | 3605 decl_or_value dv = var->dv; |
3094 rtx val; | 3606 rtx val; |
3095 location_chain node; | 3607 location_chain *node; |
3096 | 3608 |
3097 if (!dv_is_value_p (dv)) | 3609 if (!dv_is_value_p (dv)) |
3098 return 1; | 3610 return 1; |
3099 | 3611 |
3100 gcc_checking_assert (var->n_var_parts == 1); | 3612 gcc_checking_assert (var->n_var_parts == 1); |
3107 if (canon_value_cmp (node->loc, val)) | 3619 if (canon_value_cmp (node->loc, val)) |
3108 VALUE_RECURSED_INTO (val) = true; | 3620 VALUE_RECURSED_INTO (val) = true; |
3109 else | 3621 else |
3110 { | 3622 { |
3111 decl_or_value odv = dv_from_value (node->loc); | 3623 decl_or_value odv = dv_from_value (node->loc); |
3112 void **oslot = shared_hash_find_slot_noinsert (set->vars, odv); | 3624 variable **oslot; |
3113 | 3625 oslot = shared_hash_find_slot_noinsert (set->vars, odv); |
3114 oslot = set_slot_part (set, val, oslot, odv, 0, | 3626 |
3115 node->init, NULL_RTX); | 3627 set_slot_part (set, val, oslot, odv, 0, |
3628 node->init, NULL_RTX); | |
3116 | 3629 |
3117 VALUE_RECURSED_INTO (node->loc) = true; | 3630 VALUE_RECURSED_INTO (node->loc) = true; |
3118 } | 3631 } |
3119 } | 3632 } |
3120 | 3633 |
3122 } | 3635 } |
3123 | 3636 |
3124 /* Remove redundant entries from equivalence lists in onepart | 3637 /* Remove redundant entries from equivalence lists in onepart |
3125 variables, canonicalizing equivalence sets into star shapes. */ | 3638 variables, canonicalizing equivalence sets into star shapes. */ |
3126 | 3639 |
3127 static int | 3640 int |
3128 canonicalize_values_star (void **slot, void *data) | 3641 canonicalize_values_star (variable **slot, dataflow_set *set) |
3129 { | 3642 { |
3130 dataflow_set *set = (dataflow_set *)data; | 3643 variable *var = *slot; |
3131 variable var = (variable) *slot; | |
3132 decl_or_value dv = var->dv; | 3644 decl_or_value dv = var->dv; |
3133 location_chain node; | 3645 location_chain *node; |
3134 decl_or_value cdv; | 3646 decl_or_value cdv; |
3135 rtx val, cval; | 3647 rtx val, cval; |
3136 void **cslot; | 3648 variable **cslot; |
3137 bool has_value; | 3649 bool has_value; |
3138 bool has_marks; | 3650 bool has_marks; |
3139 | 3651 |
3140 if (!dv_onepart_p (dv)) | 3652 if (!var->onepart) |
3141 return 1; | 3653 return 1; |
3142 | 3654 |
3143 gcc_checking_assert (var->n_var_parts == 1); | 3655 gcc_checking_assert (var->n_var_parts == 1); |
3144 | 3656 |
3145 if (dv_is_value_p (dv)) | 3657 if (dv_is_value_p (dv)) |
3197 /* The canonical value was reset and dropped. | 3709 /* The canonical value was reset and dropped. |
3198 Remove it. */ | 3710 Remove it. */ |
3199 clobber_variable_part (set, NULL, var->dv, 0, NULL); | 3711 clobber_variable_part (set, NULL, var->dv, 0, NULL); |
3200 return 1; | 3712 return 1; |
3201 } | 3713 } |
3202 var = (variable)*slot; | 3714 var = *slot; |
3203 gcc_assert (dv_is_value_p (var->dv)); | 3715 gcc_assert (dv_is_value_p (var->dv)); |
3204 if (var->n_var_parts == 0) | 3716 if (var->n_var_parts == 0) |
3205 return 1; | 3717 return 1; |
3206 gcc_assert (var->n_var_parts == 1); | 3718 gcc_assert (var->n_var_parts == 1); |
3207 goto restart; | 3719 goto restart; |
3248 parent. */ | 3760 parent. */ |
3249 clobber_variable_part (set, cval, ndv, 0, NULL); | 3761 clobber_variable_part (set, cval, ndv, 0, NULL); |
3250 } | 3762 } |
3251 else if (GET_CODE (node->loc) == REG) | 3763 else if (GET_CODE (node->loc) == REG) |
3252 { | 3764 { |
3253 attrs list = set->regs[REGNO (node->loc)], *listp; | 3765 attrs *list = set->regs[REGNO (node->loc)], **listp; |
3254 | 3766 |
3255 /* Change an existing attribute referring to dv so that it | 3767 /* Change an existing attribute referring to dv so that it |
3256 refers to cdv, removing any duplicate this might | 3768 refers to cdv, removing any duplicate this might |
3257 introduce, and checking that no previous duplicates | 3769 introduce, and checking that no previous duplicates |
3258 existed, all in a single pass. */ | 3770 existed, all in a single pass. */ |
3277 continue; | 3789 continue; |
3278 | 3790 |
3279 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv)) | 3791 if (dv_as_opaque (list->dv) == dv_as_opaque (cdv)) |
3280 { | 3792 { |
3281 *listp = list->next; | 3793 *listp = list->next; |
3282 pool_free (attrs_pool, list); | 3794 delete list; |
3283 list = *listp; | 3795 list = *listp; |
3284 break; | 3796 break; |
3285 } | 3797 } |
3286 | 3798 |
3287 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv)); | 3799 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (dv)); |
3295 continue; | 3807 continue; |
3296 | 3808 |
3297 if (dv_as_opaque (list->dv) == dv_as_opaque (dv)) | 3809 if (dv_as_opaque (list->dv) == dv_as_opaque (dv)) |
3298 { | 3810 { |
3299 *listp = list->next; | 3811 *listp = list->next; |
3300 pool_free (attrs_pool, list); | 3812 delete list; |
3301 list = *listp; | 3813 list = *listp; |
3302 break; | 3814 break; |
3303 } | 3815 } |
3304 | 3816 |
3305 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv)); | 3817 gcc_assert (dv_as_opaque (list->dv) != dv_as_opaque (cdv)); |
3306 } | 3818 } |
3307 } | 3819 } |
3308 else | 3820 else |
3309 gcc_unreachable (); | 3821 gcc_unreachable (); |
3310 | 3822 |
3311 #if ENABLE_CHECKING | 3823 if (flag_checking) |
3312 while (list) | 3824 while (list) |
3313 { | 3825 { |
3314 if (list->offset == 0 | 3826 if (list->offset == 0 |
3315 && (dv_as_opaque (list->dv) == dv_as_opaque (dv) | 3827 && (dv_as_opaque (list->dv) == dv_as_opaque (dv) |
3316 || dv_as_opaque (list->dv) == dv_as_opaque (cdv))) | 3828 || dv_as_opaque (list->dv) == dv_as_opaque (cdv))) |
3317 gcc_unreachable (); | 3829 gcc_unreachable (); |
3318 | 3830 |
3319 list = list->next; | 3831 list = list->next; |
3320 } | 3832 } |
3321 #endif | |
3322 } | 3833 } |
3323 } | 3834 } |
3324 | 3835 |
3325 if (val) | 3836 if (val) |
3326 cslot = set_slot_part (set, val, cslot, cdv, 0, | 3837 set_slot_part (set, val, cslot, cdv, 0, |
3327 VAR_INIT_STATUS_INITIALIZED, NULL_RTX); | 3838 VAR_INIT_STATUS_INITIALIZED, NULL_RTX); |
3328 | 3839 |
3329 slot = clobber_slot_part (set, cval, slot, 0, NULL); | 3840 slot = clobber_slot_part (set, cval, slot, 0, NULL); |
3330 | 3841 |
3331 /* Variable may have been unshared. */ | 3842 /* Variable may have been unshared. */ |
3332 var = (variable)*slot; | 3843 var = *slot; |
3333 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval | 3844 gcc_checking_assert (var->n_var_parts && var->var_part[0].loc_chain->loc == cval |
3334 && var->var_part[0].loc_chain->next == NULL); | 3845 && var->var_part[0].loc_chain->next == NULL); |
3335 | 3846 |
3336 if (VALUE_RECURSED_INTO (cval)) | 3847 if (VALUE_RECURSED_INTO (cval)) |
3337 goto restart_with_cval; | 3848 goto restart_with_cval; |
3344 circumstances, see PR42873. Unfortunately we can't do this | 3855 circumstances, see PR42873. Unfortunately we can't do this |
3345 efficiently as part of canonicalize_values_star, since we may not | 3856 efficiently as part of canonicalize_values_star, since we may not |
3346 have determined or even seen the canonical value of a set when we | 3857 have determined or even seen the canonical value of a set when we |
3347 get to a variable that references another member of the set. */ | 3858 get to a variable that references another member of the set. */ |
3348 | 3859 |
3349 static int | 3860 int |
3350 canonicalize_vars_star (void **slot, void *data) | 3861 canonicalize_vars_star (variable **slot, dataflow_set *set) |
3351 { | 3862 { |
3352 dataflow_set *set = (dataflow_set *)data; | 3863 variable *var = *slot; |
3353 variable var = (variable) *slot; | |
3354 decl_or_value dv = var->dv; | 3864 decl_or_value dv = var->dv; |
3355 location_chain node; | 3865 location_chain *node; |
3356 rtx cval; | 3866 rtx cval; |
3357 decl_or_value cdv; | 3867 decl_or_value cdv; |
3358 void **cslot; | 3868 variable **cslot; |
3359 variable cvar; | 3869 variable *cvar; |
3360 location_chain cnode; | 3870 location_chain *cnode; |
3361 | 3871 |
3362 if (!dv_onepart_p (dv) || dv_is_value_p (dv)) | 3872 if (!var->onepart || var->onepart == ONEPART_VALUE) |
3363 return 1; | 3873 return 1; |
3364 | 3874 |
3365 gcc_assert (var->n_var_parts == 1); | 3875 gcc_assert (var->n_var_parts == 1); |
3366 | 3876 |
3367 node = var->var_part[0].loc_chain; | 3877 node = var->var_part[0].loc_chain; |
3375 /* Push values to the canonical one. */ | 3885 /* Push values to the canonical one. */ |
3376 cdv = dv_from_value (cval); | 3886 cdv = dv_from_value (cval); |
3377 cslot = shared_hash_find_slot_noinsert (set->vars, cdv); | 3887 cslot = shared_hash_find_slot_noinsert (set->vars, cdv); |
3378 if (!cslot) | 3888 if (!cslot) |
3379 return 1; | 3889 return 1; |
3380 cvar = (variable)*cslot; | 3890 cvar = *cslot; |
3381 gcc_assert (cvar->n_var_parts == 1); | 3891 gcc_assert (cvar->n_var_parts == 1); |
3382 | 3892 |
3383 cnode = cvar->var_part[0].loc_chain; | 3893 cnode = cvar->var_part[0].loc_chain; |
3384 | 3894 |
3385 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs | 3895 /* CVAL is canonical if its value list contains non-VALUEs or VALUEs |
3393 gcc_assert (!cnode->next); | 3903 gcc_assert (!cnode->next); |
3394 cval = cnode->loc; | 3904 cval = cnode->loc; |
3395 | 3905 |
3396 slot = set_slot_part (set, cval, slot, dv, 0, | 3906 slot = set_slot_part (set, cval, slot, dv, 0, |
3397 node->init, node->set_src); | 3907 node->init, node->set_src); |
3398 slot = clobber_slot_part (set, cval, slot, 0, node->set_src); | 3908 clobber_slot_part (set, cval, slot, 0, node->set_src); |
3399 | 3909 |
3400 return 1; | 3910 return 1; |
3401 } | 3911 } |
3402 | 3912 |
3403 /* Combine variable or value in *S1SLOT (in DSM->cur) with the | 3913 /* Combine variable or value in *S1SLOT (in DSM->cur) with the |
3404 corresponding entry in DSM->src. Multi-part variables are combined | 3914 corresponding entry in DSM->src. Multi-part variables are combined |
3405 with variable_union, whereas onepart dvs are combined with | 3915 with variable_union, whereas onepart dvs are combined with |
3406 intersection. */ | 3916 intersection. */ |
3407 | 3917 |
3408 static int | 3918 static int |
3409 variable_merge_over_cur (variable s1var, struct dfset_merge *dsm) | 3919 variable_merge_over_cur (variable *s1var, struct dfset_merge *dsm) |
3410 { | 3920 { |
3411 dataflow_set *dst = dsm->dst; | 3921 dataflow_set *dst = dsm->dst; |
3412 void **dstslot; | 3922 variable **dstslot; |
3413 variable s2var, dvar = NULL; | 3923 variable *s2var, *dvar = NULL; |
3414 decl_or_value dv = s1var->dv; | 3924 decl_or_value dv = s1var->dv; |
3415 bool onepart = dv_onepart_p (dv); | 3925 onepart_enum onepart = s1var->onepart; |
3416 rtx val; | 3926 rtx val; |
3417 hashval_t dvhash; | 3927 hashval_t dvhash; |
3418 location_chain node, *nodep; | 3928 location_chain *node, **nodep; |
3419 | 3929 |
3420 /* If the incoming onepart variable has an empty location list, then | 3930 /* If the incoming onepart variable has an empty location list, then |
3421 the intersection will be just as empty. For other variables, | 3931 the intersection will be just as empty. For other variables, |
3422 it's always union. */ | 3932 it's always union. */ |
3423 gcc_checking_assert (s1var->n_var_parts | 3933 gcc_checking_assert (s1var->n_var_parts |
3424 && s1var->var_part[0].loc_chain); | 3934 && s1var->var_part[0].loc_chain); |
3425 | 3935 |
3426 if (!onepart) | 3936 if (!onepart) |
3427 return variable_union (s1var, dst); | 3937 return variable_union (s1var, dst); |
3428 | 3938 |
3429 gcc_checking_assert (s1var->n_var_parts == 1 | 3939 gcc_checking_assert (s1var->n_var_parts == 1); |
3430 && s1var->var_part[0].offset == 0); | |
3431 | 3940 |
3432 dvhash = dv_htab_hash (dv); | 3941 dvhash = dv_htab_hash (dv); |
3433 if (dv_is_value_p (dv)) | 3942 if (dv_is_value_p (dv)) |
3434 val = dv_as_value (dv); | 3943 val = dv_as_value (dv); |
3435 else | 3944 else |
3442 return 1; | 3951 return 1; |
3443 } | 3952 } |
3444 | 3953 |
3445 dsm->src_onepart_cnt--; | 3954 dsm->src_onepart_cnt--; |
3446 gcc_assert (s2var->var_part[0].loc_chain | 3955 gcc_assert (s2var->var_part[0].loc_chain |
3447 && s2var->n_var_parts == 1 | 3956 && s2var->onepart == onepart |
3448 && s2var->var_part[0].offset == 0); | 3957 && s2var->n_var_parts == 1); |
3449 | 3958 |
3450 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash); | 3959 dstslot = shared_hash_find_slot_noinsert_1 (dst->vars, dv, dvhash); |
3451 if (dstslot) | 3960 if (dstslot) |
3452 { | 3961 { |
3453 dvar = (variable)*dstslot; | 3962 dvar = *dstslot; |
3454 gcc_assert (dvar->refcount == 1 | 3963 gcc_assert (dvar->refcount == 1 |
3455 && dvar->n_var_parts == 1 | 3964 && dvar->onepart == onepart |
3456 && dvar->var_part[0].offset == 0); | 3965 && dvar->n_var_parts == 1); |
3457 nodep = &dvar->var_part[0].loc_chain; | 3966 nodep = &dvar->var_part[0].loc_chain; |
3458 } | 3967 } |
3459 else | 3968 else |
3460 { | 3969 { |
3461 nodep = &node; | 3970 nodep = &node; |
3478 | 3987 |
3479 if (!dstslot) | 3988 if (!dstslot) |
3480 { | 3989 { |
3481 if (node) | 3990 if (node) |
3482 { | 3991 { |
3483 dvar = (variable) pool_alloc (dv_pool (dv)); | 3992 dvar = onepart_pool_allocate (onepart); |
3484 dvar->dv = dv; | 3993 dvar->dv = dv; |
3485 dvar->refcount = 1; | 3994 dvar->refcount = 1; |
3486 dvar->n_var_parts = 1; | 3995 dvar->n_var_parts = 1; |
3487 dvar->cur_loc_changed = false; | 3996 dvar->onepart = onepart; |
3488 dvar->in_changed_variables = false; | 3997 dvar->in_changed_variables = false; |
3489 dvar->var_part[0].offset = 0; | |
3490 dvar->var_part[0].loc_chain = node; | 3998 dvar->var_part[0].loc_chain = node; |
3491 dvar->var_part[0].cur_loc = NULL; | 3999 dvar->var_part[0].cur_loc = NULL; |
4000 if (onepart) | |
4001 VAR_LOC_1PAUX (dvar) = NULL; | |
4002 else | |
4003 VAR_PART_OFFSET (dvar, 0) = 0; | |
3492 | 4004 |
3493 dstslot | 4005 dstslot |
3494 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash, | 4006 = shared_hash_find_slot_unshare_1 (&dst->vars, dv, dvhash, |
3495 INSERT); | 4007 INSERT); |
3496 gcc_assert (!*dstslot); | 4008 gcc_assert (!*dstslot); |
3502 } | 4014 } |
3503 | 4015 |
3504 nodep = &dvar->var_part[0].loc_chain; | 4016 nodep = &dvar->var_part[0].loc_chain; |
3505 while ((node = *nodep)) | 4017 while ((node = *nodep)) |
3506 { | 4018 { |
3507 location_chain *nextp = &node->next; | 4019 location_chain **nextp = &node->next; |
3508 | 4020 |
3509 if (GET_CODE (node->loc) == REG) | 4021 if (GET_CODE (node->loc) == REG) |
3510 { | 4022 { |
3511 attrs list; | 4023 attrs *list; |
3512 | 4024 |
3513 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next) | 4025 for (list = dst->regs[REGNO (node->loc)]; list; list = list->next) |
3514 if (GET_MODE (node->loc) == GET_MODE (list->loc) | 4026 if (GET_MODE (node->loc) == GET_MODE (list->loc) |
3515 && dv_is_value_p (list->dv)) | 4027 && dv_is_value_p (list->dv)) |
3516 break; | 4028 break; |
3543 walk it all. */ | 4055 walk it all. */ |
3544 break; | 4056 break; |
3545 nodep = nextp; | 4057 nodep = nextp; |
3546 } | 4058 } |
3547 | 4059 |
3548 if (dvar != (variable)*dstslot) | 4060 if (dvar != *dstslot) |
3549 dvar = (variable)*dstslot; | 4061 dvar = *dstslot; |
3550 nodep = &dvar->var_part[0].loc_chain; | 4062 nodep = &dvar->var_part[0].loc_chain; |
3551 | 4063 |
3552 if (val) | 4064 if (val) |
3553 { | 4065 { |
3554 /* Mark all referenced nodes for canonicalization, and make sure | 4066 /* Mark all referenced nodes for canonicalization, and make sure |
3566 gcc_assert (*dstslot == dvar); | 4078 gcc_assert (*dstslot == dvar); |
3567 canonicalize_values_star (dstslot, dst); | 4079 canonicalize_values_star (dstslot, dst); |
3568 gcc_checking_assert (dstslot | 4080 gcc_checking_assert (dstslot |
3569 == shared_hash_find_slot_noinsert_1 (dst->vars, | 4081 == shared_hash_find_slot_noinsert_1 (dst->vars, |
3570 dv, dvhash)); | 4082 dv, dvhash)); |
3571 dvar = (variable)*dstslot; | 4083 dvar = *dstslot; |
3572 } | 4084 } |
3573 else | 4085 else |
3574 { | 4086 { |
3575 bool has_value = false, has_other = false; | 4087 bool has_value = false, has_other = false; |
3576 | 4088 |
3602 for (node = *nodep; node; node = node->next) | 4114 for (node = *nodep; node; node = node->next) |
3603 { | 4115 { |
3604 if (GET_CODE (node->loc) == VALUE) | 4116 if (GET_CODE (node->loc) == VALUE) |
3605 { | 4117 { |
3606 decl_or_value dv = dv_from_value (node->loc); | 4118 decl_or_value dv = dv_from_value (node->loc); |
3607 void **slot = NULL; | 4119 variable **slot = NULL; |
3608 | 4120 |
3609 if (shared_hash_shared (dst->vars)) | 4121 if (shared_hash_shared (dst->vars)) |
3610 slot = shared_hash_find_slot_noinsert (dst->vars, dv); | 4122 slot = shared_hash_find_slot_noinsert (dst->vars, dv); |
3611 if (!slot) | 4123 if (!slot) |
3612 slot = shared_hash_find_slot_unshare (&dst->vars, dv, | 4124 slot = shared_hash_find_slot_unshare (&dst->vars, dv, |
3613 INSERT); | 4125 INSERT); |
3614 if (!*slot) | 4126 if (!*slot) |
3615 { | 4127 { |
3616 variable var = (variable) pool_alloc (dv_pool (dv)); | 4128 variable *var = onepart_pool_allocate (ONEPART_VALUE); |
3617 var->dv = dv; | 4129 var->dv = dv; |
3618 var->refcount = 1; | 4130 var->refcount = 1; |
3619 var->n_var_parts = 1; | 4131 var->n_var_parts = 1; |
3620 var->cur_loc_changed = false; | 4132 var->onepart = ONEPART_VALUE; |
3621 var->in_changed_variables = false; | 4133 var->in_changed_variables = false; |
3622 var->var_part[0].offset = 0; | |
3623 var->var_part[0].loc_chain = NULL; | 4134 var->var_part[0].loc_chain = NULL; |
3624 var->var_part[0].cur_loc = NULL; | 4135 var->var_part[0].cur_loc = NULL; |
4136 VAR_LOC_1PAUX (var) = NULL; | |
3625 *slot = var; | 4137 *slot = var; |
3626 } | 4138 } |
3627 | 4139 |
3628 VALUE_RECURSED_INTO (node->loc) = true; | 4140 VALUE_RECURSED_INTO (node->loc) = true; |
3629 } | 4141 } |
3633 gcc_assert (*dstslot == dvar); | 4145 gcc_assert (*dstslot == dvar); |
3634 canonicalize_values_star (dstslot, dst); | 4146 canonicalize_values_star (dstslot, dst); |
3635 gcc_checking_assert (dstslot | 4147 gcc_checking_assert (dstslot |
3636 == shared_hash_find_slot_noinsert_1 (dst->vars, | 4148 == shared_hash_find_slot_noinsert_1 (dst->vars, |
3637 dv, dvhash)); | 4149 dv, dvhash)); |
3638 dvar = (variable)*dstslot; | 4150 dvar = *dstslot; |
3639 } | 4151 } |
3640 } | 4152 } |
3641 | 4153 |
3642 if (!onepart_variable_different_p (dvar, s2var)) | 4154 if (!onepart_variable_different_p (dvar, s2var)) |
3643 { | 4155 { |
3662 multi-part variable. Unions of multi-part variables and | 4174 multi-part variable. Unions of multi-part variables and |
3663 intersections of one-part ones will be handled in | 4175 intersections of one-part ones will be handled in |
3664 variable_merge_over_cur(). */ | 4176 variable_merge_over_cur(). */ |
3665 | 4177 |
3666 static int | 4178 static int |
3667 variable_merge_over_src (variable s2var, struct dfset_merge *dsm) | 4179 variable_merge_over_src (variable *s2var, struct dfset_merge *dsm) |
3668 { | 4180 { |
3669 dataflow_set *dst = dsm->dst; | 4181 dataflow_set *dst = dsm->dst; |
3670 decl_or_value dv = s2var->dv; | 4182 decl_or_value dv = s2var->dv; |
3671 bool onepart = dv_onepart_p (dv); | 4183 |
3672 | 4184 if (!s2var->onepart) |
3673 if (!onepart) | 4185 { |
3674 { | 4186 variable **dstp = shared_hash_find_slot (dst->vars, dv); |
3675 void **dstp = shared_hash_find_slot (dst->vars, dv); | |
3676 *dstp = s2var; | 4187 *dstp = s2var; |
3677 s2var->refcount++; | 4188 s2var->refcount++; |
3678 return 1; | 4189 return 1; |
3679 } | 4190 } |
3680 | 4191 |
3691 dataflow_set cur = *dst; | 4202 dataflow_set cur = *dst; |
3692 dataflow_set *src1 = &cur; | 4203 dataflow_set *src1 = &cur; |
3693 struct dfset_merge dsm; | 4204 struct dfset_merge dsm; |
3694 int i; | 4205 int i; |
3695 size_t src1_elems, src2_elems; | 4206 size_t src1_elems, src2_elems; |
3696 htab_iterator hi; | 4207 variable_iterator_type hi; |
3697 variable var; | 4208 variable *var; |
3698 | 4209 |
3699 src1_elems = htab_elements (shared_hash_htab (src1->vars)); | 4210 src1_elems = shared_hash_htab (src1->vars)->elements (); |
3700 src2_elems = htab_elements (shared_hash_htab (src2->vars)); | 4211 src2_elems = shared_hash_htab (src2->vars)->elements (); |
3701 dataflow_set_init (dst); | 4212 dataflow_set_init (dst); |
3702 dst->stack_adjust = cur.stack_adjust; | 4213 dst->stack_adjust = cur.stack_adjust; |
3703 shared_hash_destroy (dst->vars); | 4214 shared_hash_destroy (dst->vars); |
3704 dst->vars = (shared_hash) pool_alloc (shared_hash_pool); | 4215 dst->vars = new shared_hash; |
3705 dst->vars->refcount = 1; | 4216 dst->vars->refcount = 1; |
3706 dst->vars->htab | 4217 dst->vars->htab = new variable_table_type (MAX (src1_elems, src2_elems)); |
3707 = htab_create (MAX (src1_elems, src2_elems), variable_htab_hash, | |
3708 variable_htab_eq, variable_htab_free); | |
3709 | 4218 |
3710 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | 4219 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
3711 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]); | 4220 attrs_list_mpdv_union (&dst->regs[i], src1->regs[i], src2->regs[i]); |
3712 | 4221 |
3713 dsm.dst = dst; | 4222 dsm.dst = dst; |
3714 dsm.src = src2; | 4223 dsm.src = src2; |
3715 dsm.cur = src1; | 4224 dsm.cur = src1; |
3716 dsm.src_onepart_cnt = 0; | 4225 dsm.src_onepart_cnt = 0; |
3717 | 4226 |
3718 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.src->vars), var, variable, hi) | 4227 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.src->vars), |
4228 var, variable, hi) | |
3719 variable_merge_over_src (var, &dsm); | 4229 variable_merge_over_src (var, &dsm); |
3720 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (dsm.cur->vars), var, variable, hi) | 4230 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (dsm.cur->vars), |
4231 var, variable, hi) | |
3721 variable_merge_over_cur (var, &dsm); | 4232 variable_merge_over_cur (var, &dsm); |
3722 | 4233 |
3723 if (dsm.src_onepart_cnt) | 4234 if (dsm.src_onepart_cnt) |
3724 dst_can_be_shared = false; | 4235 dst_can_be_shared = false; |
3725 | 4236 |
3730 | 4241 |
3731 static void | 4242 static void |
3732 dataflow_set_equiv_regs (dataflow_set *set) | 4243 dataflow_set_equiv_regs (dataflow_set *set) |
3733 { | 4244 { |
3734 int i; | 4245 int i; |
3735 attrs list, *listp; | 4246 attrs *list, **listp; |
3736 | 4247 |
3737 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | 4248 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) |
3738 { | 4249 { |
3739 rtx canon[NUM_MACHINE_MODES]; | 4250 rtx canon[NUM_MACHINE_MODES]; |
3740 | 4251 |
3785 for (listp = &set->regs[i]; (list = *listp); | 4296 for (listp = &set->regs[i]; (list = *listp); |
3786 listp = list ? &list->next : listp) | 4297 listp = list ? &list->next : listp) |
3787 if (list->offset == 0 && dv_onepart_p (list->dv)) | 4298 if (list->offset == 0 && dv_onepart_p (list->dv)) |
3788 { | 4299 { |
3789 rtx cval = canon[(int)GET_MODE (list->loc)]; | 4300 rtx cval = canon[(int)GET_MODE (list->loc)]; |
3790 void **slot; | 4301 variable **slot; |
3791 | 4302 |
3792 if (!cval) | 4303 if (!cval) |
3793 continue; | 4304 continue; |
3794 | 4305 |
3795 if (dv_is_value_p (list->dv)) | 4306 if (dv_is_value_p (list->dv)) |
3809 | 4320 |
3810 /* Remove any redundant values in the location list of VAR, which must | 4321 /* Remove any redundant values in the location list of VAR, which must |
3811 be unshared and 1-part. */ | 4322 be unshared and 1-part. */ |
3812 | 4323 |
3813 static void | 4324 static void |
3814 remove_duplicate_values (variable var) | 4325 remove_duplicate_values (variable *var) |
3815 { | 4326 { |
3816 location_chain node, *nodep; | 4327 location_chain *node, **nodep; |
3817 | 4328 |
3818 gcc_assert (dv_onepart_p (var->dv)); | 4329 gcc_assert (var->onepart); |
3819 gcc_assert (var->n_var_parts == 1); | 4330 gcc_assert (var->n_var_parts == 1); |
3820 gcc_assert (var->refcount == 1); | 4331 gcc_assert (var->refcount == 1); |
3821 | 4332 |
3822 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); ) | 4333 for (nodep = &var->var_part[0].loc_chain; (node = *nodep); ) |
3823 { | 4334 { |
3825 { | 4336 { |
3826 if (VALUE_RECURSED_INTO (node->loc)) | 4337 if (VALUE_RECURSED_INTO (node->loc)) |
3827 { | 4338 { |
3828 /* Remove duplicate value node. */ | 4339 /* Remove duplicate value node. */ |
3829 *nodep = node->next; | 4340 *nodep = node->next; |
3830 pool_free (loc_chain_pool, node); | 4341 delete node; |
3831 continue; | 4342 continue; |
3832 } | 4343 } |
3833 else | 4344 else |
3834 VALUE_RECURSED_INTO (node->loc) = true; | 4345 VALUE_RECURSED_INTO (node->loc) = true; |
3835 } | 4346 } |
3856 }; | 4367 }; |
3857 | 4368 |
3858 /* Create values for incoming expressions associated with one-part | 4369 /* Create values for incoming expressions associated with one-part |
3859 variables that don't have value numbers for them. */ | 4370 variables that don't have value numbers for them. */ |
3860 | 4371 |
3861 static int | 4372 int |
3862 variable_post_merge_new_vals (void **slot, void *info) | 4373 variable_post_merge_new_vals (variable **slot, dfset_post_merge *dfpm) |
3863 { | 4374 { |
3864 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info; | |
3865 dataflow_set *set = dfpm->set; | 4375 dataflow_set *set = dfpm->set; |
3866 variable var = (variable)*slot; | 4376 variable *var = *slot; |
3867 location_chain node; | 4377 location_chain *node; |
3868 | 4378 |
3869 if (!dv_onepart_p (var->dv) || !var->n_var_parts) | 4379 if (!var->onepart || !var->n_var_parts) |
3870 return 1; | 4380 return 1; |
3871 | 4381 |
3872 gcc_assert (var->n_var_parts == 1); | 4382 gcc_assert (var->n_var_parts == 1); |
3873 | 4383 |
3874 if (dv_is_decl_p (var->dv)) | 4384 if (dv_is_decl_p (var->dv)) |
3880 { | 4390 { |
3881 if (GET_CODE (node->loc) == VALUE) | 4391 if (GET_CODE (node->loc) == VALUE) |
3882 gcc_assert (!VALUE_RECURSED_INTO (node->loc)); | 4392 gcc_assert (!VALUE_RECURSED_INTO (node->loc)); |
3883 else if (GET_CODE (node->loc) == REG) | 4393 else if (GET_CODE (node->loc) == REG) |
3884 { | 4394 { |
3885 attrs att, *attp, *curp = NULL; | 4395 attrs *att, **attp, **curp = NULL; |
3886 | 4396 |
3887 if (var->refcount != 1) | 4397 if (var->refcount != 1) |
3888 { | 4398 { |
3889 slot = unshare_variable (set, slot, var, | 4399 slot = unshare_variable (set, slot, var, |
3890 VAR_INIT_STATUS_INITIALIZED); | 4400 VAR_INIT_STATUS_INITIALIZED); |
3891 var = (variable)*slot; | 4401 var = *slot; |
3892 goto restart; | 4402 goto restart; |
3893 } | 4403 } |
3894 | 4404 |
3895 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp); | 4405 for (attp = &set->regs[REGNO (node->loc)]; (att = *attp); |
3896 attp = &att->next) | 4406 attp = &att->next) |
3979 /* Remove attribute referring to the decl, which now | 4489 /* Remove attribute referring to the decl, which now |
3980 uses the value for the register, already existing or | 4490 uses the value for the register, already existing or |
3981 to be added when we bring perm in. */ | 4491 to be added when we bring perm in. */ |
3982 att = *curp; | 4492 att = *curp; |
3983 *curp = att->next; | 4493 *curp = att->next; |
3984 pool_free (attrs_pool, att); | 4494 delete att; |
3985 } | 4495 } |
3986 } | 4496 } |
3987 | 4497 |
3988 if (check_dupes) | 4498 if (check_dupes) |
3989 remove_duplicate_values (var); | 4499 remove_duplicate_values (var); |
3993 } | 4503 } |
3994 | 4504 |
3995 /* Reset values in the permanent set that are not associated with the | 4505 /* Reset values in the permanent set that are not associated with the |
3996 chosen expression. */ | 4506 chosen expression. */ |
3997 | 4507 |
3998 static int | 4508 int |
3999 variable_post_merge_perm_vals (void **pslot, void *info) | 4509 variable_post_merge_perm_vals (variable **pslot, dfset_post_merge *dfpm) |
4000 { | 4510 { |
4001 struct dfset_post_merge *dfpm = (struct dfset_post_merge *)info; | |
4002 dataflow_set *set = dfpm->set; | 4511 dataflow_set *set = dfpm->set; |
4003 variable pvar = (variable)*pslot, var; | 4512 variable *pvar = *pslot, *var; |
4004 location_chain pnode; | 4513 location_chain *pnode; |
4005 decl_or_value dv; | 4514 decl_or_value dv; |
4006 attrs att; | 4515 attrs *att; |
4007 | 4516 |
4008 gcc_assert (dv_is_value_p (pvar->dv) | 4517 gcc_assert (dv_is_value_p (pvar->dv) |
4009 && pvar->n_var_parts == 1); | 4518 && pvar->n_var_parts == 1); |
4010 pnode = pvar->var_part[0].loc_chain; | 4519 pnode = pvar->var_part[0].loc_chain; |
4011 gcc_assert (pnode | 4520 gcc_assert (pnode |
4062 struct dfset_post_merge dfpm; | 4571 struct dfset_post_merge dfpm; |
4063 | 4572 |
4064 dfpm.set = set; | 4573 dfpm.set = set; |
4065 dfpm.permp = permp; | 4574 dfpm.permp = permp; |
4066 | 4575 |
4067 htab_traverse (shared_hash_htab (set->vars), variable_post_merge_new_vals, | 4576 shared_hash_htab (set->vars) |
4068 &dfpm); | 4577 ->traverse <dfset_post_merge*, variable_post_merge_new_vals> (&dfpm); |
4069 if (*permp) | 4578 if (*permp) |
4070 htab_traverse (shared_hash_htab ((*permp)->vars), | 4579 shared_hash_htab ((*permp)->vars) |
4071 variable_post_merge_perm_vals, &dfpm); | 4580 ->traverse <dfset_post_merge*, variable_post_merge_perm_vals> (&dfpm); |
4072 htab_traverse (shared_hash_htab (set->vars), canonicalize_values_star, set); | 4581 shared_hash_htab (set->vars) |
4073 htab_traverse (shared_hash_htab (set->vars), canonicalize_vars_star, set); | 4582 ->traverse <dataflow_set *, canonicalize_values_star> (set); |
4583 shared_hash_htab (set->vars) | |
4584 ->traverse <dataflow_set *, canonicalize_vars_star> (set); | |
4074 } | 4585 } |
4075 | 4586 |
4076 /* Return a node whose loc is a MEM that refers to EXPR in the | 4587 /* Return a node whose loc is a MEM that refers to EXPR in the |
4077 location list of a one-part variable or value VAR, or in that of | 4588 location list of a one-part variable or value VAR, or in that of |
4078 any values recursively mentioned in the location lists. */ | 4589 any values recursively mentioned in the location lists. */ |
4079 | 4590 |
4080 static location_chain | 4591 static location_chain * |
4081 find_mem_expr_in_1pdv (tree expr, rtx val, htab_t vars) | 4592 find_mem_expr_in_1pdv (tree expr, rtx val, variable_table_type *vars) |
4082 { | 4593 { |
4083 location_chain node; | 4594 location_chain *node; |
4084 decl_or_value dv; | 4595 decl_or_value dv; |
4085 variable var; | 4596 variable *var; |
4086 location_chain where = NULL; | 4597 location_chain *where = NULL; |
4087 | 4598 |
4088 if (!val) | 4599 if (!val) |
4089 return NULL; | 4600 return NULL; |
4090 | 4601 |
4091 gcc_assert (GET_CODE (val) == VALUE | 4602 gcc_assert (GET_CODE (val) == VALUE |
4092 && !VALUE_RECURSED_INTO (val)); | 4603 && !VALUE_RECURSED_INTO (val)); |
4093 | 4604 |
4094 dv = dv_from_value (val); | 4605 dv = dv_from_value (val); |
4095 var = (variable) htab_find_with_hash (vars, dv, dv_htab_hash (dv)); | 4606 var = vars->find_with_hash (dv, dv_htab_hash (dv)); |
4096 | 4607 |
4097 if (!var) | 4608 if (!var) |
4098 return NULL; | 4609 return NULL; |
4099 | 4610 |
4100 gcc_assert (dv_onepart_p (var->dv)); | 4611 gcc_assert (var->onepart); |
4101 | 4612 |
4102 if (!var->n_var_parts) | 4613 if (!var->n_var_parts) |
4103 return NULL; | 4614 return NULL; |
4104 | 4615 |
4105 gcc_assert (var->var_part[0].offset == 0); | |
4106 | |
4107 VALUE_RECURSED_INTO (val) = true; | 4616 VALUE_RECURSED_INTO (val) = true; |
4108 | 4617 |
4109 for (node = var->var_part[0].loc_chain; node; node = node->next) | 4618 for (node = var->var_part[0].loc_chain; node; node = node->next) |
4110 if (MEM_P (node->loc) && MEM_EXPR (node->loc) == expr | 4619 if (MEM_P (node->loc) |
4111 && MEM_OFFSET (node->loc) == 0) | 4620 && MEM_EXPR (node->loc) == expr |
4621 && INT_MEM_OFFSET (node->loc) == 0) | |
4112 { | 4622 { |
4113 where = node; | 4623 where = node; |
4114 break; | 4624 break; |
4115 } | 4625 } |
4116 else if (GET_CODE (node->loc) == VALUE | 4626 else if (GET_CODE (node->loc) == VALUE |
4148 | 4658 |
4149 /* Remove all MEMs from the location list of a hash table entry for a | 4659 /* Remove all MEMs from the location list of a hash table entry for a |
4150 one-part variable, except those whose MEM attributes map back to | 4660 one-part variable, except those whose MEM attributes map back to |
4151 the variable itself, directly or within a VALUE. */ | 4661 the variable itself, directly or within a VALUE. */ |
4152 | 4662 |
4153 static int | 4663 int |
4154 dataflow_set_preserve_mem_locs (void **slot, void *data) | 4664 dataflow_set_preserve_mem_locs (variable **slot, dataflow_set *set) |
4155 { | 4665 { |
4156 dataflow_set *set = (dataflow_set *) data; | 4666 variable *var = *slot; |
4157 variable var = (variable) *slot; | 4667 |
4158 | 4668 if (var->onepart == ONEPART_VDECL || var->onepart == ONEPART_DEXPR) |
4159 if (dv_is_decl_p (var->dv) && dv_onepart_p (var->dv)) | |
4160 { | 4669 { |
4161 tree decl = dv_as_decl (var->dv); | 4670 tree decl = dv_as_decl (var->dv); |
4162 location_chain loc, *locp; | 4671 location_chain *loc, **locp; |
4163 bool changed = false; | 4672 bool changed = false; |
4164 | 4673 |
4165 if (!var->n_var_parts) | 4674 if (!var->n_var_parts) |
4166 return 1; | 4675 return 1; |
4167 | 4676 |
4169 | 4678 |
4170 if (shared_var_p (var, set->vars)) | 4679 if (shared_var_p (var, set->vars)) |
4171 { | 4680 { |
4172 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next) | 4681 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next) |
4173 { | 4682 { |
4174 /* We want to remove dying MEMs that doesn't refer to | 4683 /* We want to remove dying MEMs that don't refer to DECL. */ |
4175 DECL. */ | |
4176 if (GET_CODE (loc->loc) == MEM | 4684 if (GET_CODE (loc->loc) == MEM |
4177 && (MEM_EXPR (loc->loc) != decl | 4685 && (MEM_EXPR (loc->loc) != decl |
4178 || MEM_OFFSET (loc->loc)) | 4686 || INT_MEM_OFFSET (loc->loc) != 0) |
4179 && !mem_dies_at_call (loc->loc)) | 4687 && mem_dies_at_call (loc->loc)) |
4180 break; | 4688 break; |
4181 /* We want to move here MEMs that do refer to DECL. */ | 4689 /* We want to move here MEMs that do refer to DECL. */ |
4182 else if (GET_CODE (loc->loc) == VALUE | 4690 else if (GET_CODE (loc->loc) == VALUE |
4183 && find_mem_expr_in_1pdv (decl, loc->loc, | 4691 && find_mem_expr_in_1pdv (decl, loc->loc, |
4184 shared_hash_htab (set->vars))) | 4692 shared_hash_htab (set->vars))) |
4187 | 4695 |
4188 if (!loc) | 4696 if (!loc) |
4189 return 1; | 4697 return 1; |
4190 | 4698 |
4191 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN); | 4699 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN); |
4192 var = (variable)*slot; | 4700 var = *slot; |
4193 gcc_assert (var->n_var_parts == 1); | 4701 gcc_assert (var->n_var_parts == 1); |
4194 } | 4702 } |
4195 | 4703 |
4196 for (locp = &var->var_part[0].loc_chain, loc = *locp; | 4704 for (locp = &var->var_part[0].loc_chain, loc = *locp; |
4197 loc; loc = *locp) | 4705 loc; loc = *locp) |
4198 { | 4706 { |
4199 rtx old_loc = loc->loc; | 4707 rtx old_loc = loc->loc; |
4200 if (GET_CODE (old_loc) == VALUE) | 4708 if (GET_CODE (old_loc) == VALUE) |
4201 { | 4709 { |
4202 location_chain mem_node | 4710 location_chain *mem_node |
4203 = find_mem_expr_in_1pdv (decl, loc->loc, | 4711 = find_mem_expr_in_1pdv (decl, loc->loc, |
4204 shared_hash_htab (set->vars)); | 4712 shared_hash_htab (set->vars)); |
4205 | 4713 |
4206 /* ??? This picks up only one out of multiple MEMs that | 4714 /* ??? This picks up only one out of multiple MEMs that |
4207 refer to the same variable. Do we ever need to be | 4715 refer to the same variable. Do we ever need to be |
4217 } | 4725 } |
4218 } | 4726 } |
4219 | 4727 |
4220 if (GET_CODE (loc->loc) != MEM | 4728 if (GET_CODE (loc->loc) != MEM |
4221 || (MEM_EXPR (loc->loc) == decl | 4729 || (MEM_EXPR (loc->loc) == decl |
4222 && MEM_OFFSET (loc->loc) == 0) | 4730 && INT_MEM_OFFSET (loc->loc) == 0) |
4223 || !mem_dies_at_call (loc->loc)) | 4731 || !mem_dies_at_call (loc->loc)) |
4224 { | 4732 { |
4225 if (old_loc != loc->loc && emit_notes) | 4733 if (old_loc != loc->loc && emit_notes) |
4226 { | 4734 { |
4227 if (old_loc == var->var_part[0].cur_loc) | 4735 if (old_loc == var->var_part[0].cur_loc) |
4228 { | 4736 { |
4229 changed = true; | 4737 changed = true; |
4230 var->var_part[0].cur_loc = NULL; | 4738 var->var_part[0].cur_loc = NULL; |
4231 var->cur_loc_changed = true; | |
4232 } | 4739 } |
4233 add_value_chains (var->dv, loc->loc); | |
4234 remove_value_chains (var->dv, old_loc); | |
4235 } | 4740 } |
4236 locp = &loc->next; | 4741 locp = &loc->next; |
4237 continue; | 4742 continue; |
4238 } | 4743 } |
4239 | 4744 |
4240 if (emit_notes) | 4745 if (emit_notes) |
4241 { | 4746 { |
4242 remove_value_chains (var->dv, old_loc); | |
4243 if (old_loc == var->var_part[0].cur_loc) | 4747 if (old_loc == var->var_part[0].cur_loc) |
4244 { | 4748 { |
4245 changed = true; | 4749 changed = true; |
4246 var->var_part[0].cur_loc = NULL; | 4750 var->var_part[0].cur_loc = NULL; |
4247 var->cur_loc_changed = true; | |
4248 } | 4751 } |
4249 } | 4752 } |
4250 *locp = loc->next; | 4753 *locp = loc->next; |
4251 pool_free (loc_chain_pool, loc); | 4754 delete loc; |
4252 } | 4755 } |
4253 | 4756 |
4254 if (!var->var_part[0].loc_chain) | 4757 if (!var->var_part[0].loc_chain) |
4255 { | 4758 { |
4256 var->n_var_parts--; | 4759 var->n_var_parts--; |
4262 | 4765 |
4263 return 1; | 4766 return 1; |
4264 } | 4767 } |
4265 | 4768 |
4266 /* Remove all MEMs from the location list of a hash table entry for a | 4769 /* Remove all MEMs from the location list of a hash table entry for a |
4267 value. */ | 4770 onepart variable. */ |
4268 | 4771 |
4269 static int | 4772 int |
4270 dataflow_set_remove_mem_locs (void **slot, void *data) | 4773 dataflow_set_remove_mem_locs (variable **slot, dataflow_set *set) |
4271 { | 4774 { |
4272 dataflow_set *set = (dataflow_set *) data; | 4775 variable *var = *slot; |
4273 variable var = (variable) *slot; | 4776 |
4274 | 4777 if (var->onepart != NOT_ONEPART) |
4275 if (dv_is_value_p (var->dv)) | 4778 { |
4276 { | 4779 location_chain *loc, **locp; |
4277 location_chain loc, *locp; | |
4278 bool changed = false; | 4780 bool changed = false; |
4781 rtx cur_loc; | |
4279 | 4782 |
4280 gcc_assert (var->n_var_parts == 1); | 4783 gcc_assert (var->n_var_parts == 1); |
4281 | 4784 |
4282 if (shared_var_p (var, set->vars)) | 4785 if (shared_var_p (var, set->vars)) |
4283 { | 4786 { |
4288 | 4791 |
4289 if (!loc) | 4792 if (!loc) |
4290 return 1; | 4793 return 1; |
4291 | 4794 |
4292 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN); | 4795 slot = unshare_variable (set, slot, var, VAR_INIT_STATUS_UNKNOWN); |
4293 var = (variable)*slot; | 4796 var = *slot; |
4294 gcc_assert (var->n_var_parts == 1); | 4797 gcc_assert (var->n_var_parts == 1); |
4295 } | 4798 } |
4799 | |
4800 if (VAR_LOC_1PAUX (var)) | |
4801 cur_loc = VAR_LOC_FROM (var); | |
4802 else | |
4803 cur_loc = var->var_part[0].cur_loc; | |
4296 | 4804 |
4297 for (locp = &var->var_part[0].loc_chain, loc = *locp; | 4805 for (locp = &var->var_part[0].loc_chain, loc = *locp; |
4298 loc; loc = *locp) | 4806 loc; loc = *locp) |
4299 { | 4807 { |
4300 if (GET_CODE (loc->loc) != MEM | 4808 if (GET_CODE (loc->loc) != MEM |
4302 { | 4810 { |
4303 locp = &loc->next; | 4811 locp = &loc->next; |
4304 continue; | 4812 continue; |
4305 } | 4813 } |
4306 | 4814 |
4307 if (emit_notes) | |
4308 remove_value_chains (var->dv, loc->loc); | |
4309 *locp = loc->next; | 4815 *locp = loc->next; |
4310 /* If we have deleted the location which was last emitted | 4816 /* If we have deleted the location which was last emitted |
4311 we have to emit new location so add the variable to set | 4817 we have to emit new location so add the variable to set |
4312 of changed variables. */ | 4818 of changed variables. */ |
4313 if (var->var_part[0].cur_loc == loc->loc) | 4819 if (cur_loc == loc->loc) |
4314 { | 4820 { |
4315 changed = true; | 4821 changed = true; |
4316 var->var_part[0].cur_loc = NULL; | 4822 var->var_part[0].cur_loc = NULL; |
4317 var->cur_loc_changed = true; | 4823 if (VAR_LOC_1PAUX (var)) |
4824 VAR_LOC_FROM (var) = NULL; | |
4318 } | 4825 } |
4319 pool_free (loc_chain_pool, loc); | 4826 delete loc; |
4320 } | 4827 } |
4321 | 4828 |
4322 if (!var->var_part[0].loc_chain) | 4829 if (!var->var_part[0].loc_chain) |
4323 { | 4830 { |
4324 var->n_var_parts--; | 4831 var->n_var_parts--; |
4333 | 4840 |
4334 /* Remove all variable-location information about call-clobbered | 4841 /* Remove all variable-location information about call-clobbered |
4335 registers, as well as associations between MEMs and VALUEs. */ | 4842 registers, as well as associations between MEMs and VALUEs. */ |
4336 | 4843 |
4337 static void | 4844 static void |
4338 dataflow_set_clear_at_call (dataflow_set *set) | 4845 dataflow_set_clear_at_call (dataflow_set *set, rtx_insn *call_insn) |
4339 { | 4846 { |
4340 int r; | 4847 unsigned int r; |
4341 | 4848 hard_reg_set_iterator hrsi; |
4342 for (r = 0; r < FIRST_PSEUDO_REGISTER; r++) | 4849 HARD_REG_SET invalidated_regs; |
4343 if (TEST_HARD_REG_BIT (regs_invalidated_by_call, r)) | 4850 |
4344 var_regno_delete (set, r); | 4851 get_call_reg_set_usage (call_insn, &invalidated_regs, |
4852 regs_invalidated_by_call); | |
4853 | |
4854 EXECUTE_IF_SET_IN_HARD_REG_SET (invalidated_regs, 0, r, hrsi) | |
4855 var_regno_delete (set, r); | |
4345 | 4856 |
4346 if (MAY_HAVE_DEBUG_INSNS) | 4857 if (MAY_HAVE_DEBUG_INSNS) |
4347 { | 4858 { |
4348 set->traversed_vars = set->vars; | 4859 set->traversed_vars = set->vars; |
4349 htab_traverse (shared_hash_htab (set->vars), | 4860 shared_hash_htab (set->vars) |
4350 dataflow_set_preserve_mem_locs, set); | 4861 ->traverse <dataflow_set *, dataflow_set_preserve_mem_locs> (set); |
4351 set->traversed_vars = set->vars; | 4862 set->traversed_vars = set->vars; |
4352 htab_traverse (shared_hash_htab (set->vars), dataflow_set_remove_mem_locs, | 4863 shared_hash_htab (set->vars) |
4353 set); | 4864 ->traverse <dataflow_set *, dataflow_set_remove_mem_locs> (set); |
4354 set->traversed_vars = NULL; | 4865 set->traversed_vars = NULL; |
4355 } | 4866 } |
4356 } | 4867 } |
4357 | 4868 |
4358 static bool | 4869 static bool |
4359 variable_part_different_p (variable_part *vp1, variable_part *vp2) | 4870 variable_part_different_p (variable_part *vp1, variable_part *vp2) |
4360 { | 4871 { |
4361 location_chain lc1, lc2; | 4872 location_chain *lc1, *lc2; |
4362 | 4873 |
4363 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next) | 4874 for (lc1 = vp1->loc_chain; lc1; lc1 = lc1->next) |
4364 { | 4875 { |
4365 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next) | 4876 for (lc2 = vp2->loc_chain; lc2; lc2 = lc2->next) |
4366 { | 4877 { |
4380 | 4891 |
4381 /* Return true if one-part variables VAR1 and VAR2 are different. | 4892 /* Return true if one-part variables VAR1 and VAR2 are different. |
4382 They must be in canonical order. */ | 4893 They must be in canonical order. */ |
4383 | 4894 |
4384 static bool | 4895 static bool |
4385 onepart_variable_different_p (variable var1, variable var2) | 4896 onepart_variable_different_p (variable *var1, variable *var2) |
4386 { | 4897 { |
4387 location_chain lc1, lc2; | 4898 location_chain *lc1, *lc2; |
4388 | 4899 |
4389 if (var1 == var2) | 4900 if (var1 == var2) |
4390 return false; | 4901 return false; |
4391 | 4902 |
4392 gcc_assert (var1->n_var_parts == 1 | 4903 gcc_assert (var1->n_var_parts == 1 |
4406 } | 4917 } |
4407 | 4918 |
4408 return lc1 != lc2; | 4919 return lc1 != lc2; |
4409 } | 4920 } |
4410 | 4921 |
4922 /* Return true if one-part variables VAR1 and VAR2 are different. | |
4923 They must be in canonical order. */ | |
4924 | |
4925 static void | |
4926 dump_onepart_variable_differences (variable *var1, variable *var2) | |
4927 { | |
4928 location_chain *lc1, *lc2; | |
4929 | |
4930 gcc_assert (var1 != var2); | |
4931 gcc_assert (dump_file); | |
4932 gcc_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)); | |
4933 gcc_assert (var1->n_var_parts == 1 | |
4934 && var2->n_var_parts == 1); | |
4935 | |
4936 lc1 = var1->var_part[0].loc_chain; | |
4937 lc2 = var2->var_part[0].loc_chain; | |
4938 | |
4939 gcc_assert (lc1 && lc2); | |
4940 | |
4941 while (lc1 && lc2) | |
4942 { | |
4943 switch (loc_cmp (lc1->loc, lc2->loc)) | |
4944 { | |
4945 case -1: | |
4946 fprintf (dump_file, "removed: "); | |
4947 print_rtl_single (dump_file, lc1->loc); | |
4948 lc1 = lc1->next; | |
4949 continue; | |
4950 case 0: | |
4951 break; | |
4952 case 1: | |
4953 fprintf (dump_file, "added: "); | |
4954 print_rtl_single (dump_file, lc2->loc); | |
4955 lc2 = lc2->next; | |
4956 continue; | |
4957 default: | |
4958 gcc_unreachable (); | |
4959 } | |
4960 lc1 = lc1->next; | |
4961 lc2 = lc2->next; | |
4962 } | |
4963 | |
4964 while (lc1) | |
4965 { | |
4966 fprintf (dump_file, "removed: "); | |
4967 print_rtl_single (dump_file, lc1->loc); | |
4968 lc1 = lc1->next; | |
4969 } | |
4970 | |
4971 while (lc2) | |
4972 { | |
4973 fprintf (dump_file, "added: "); | |
4974 print_rtl_single (dump_file, lc2->loc); | |
4975 lc2 = lc2->next; | |
4976 } | |
4977 } | |
4978 | |
4411 /* Return true if variables VAR1 and VAR2 are different. */ | 4979 /* Return true if variables VAR1 and VAR2 are different. */ |
4412 | 4980 |
4413 static bool | 4981 static bool |
4414 variable_different_p (variable var1, variable var2) | 4982 variable_different_p (variable *var1, variable *var2) |
4415 { | 4983 { |
4416 int i; | 4984 int i; |
4417 | 4985 |
4418 if (var1 == var2) | 4986 if (var1 == var2) |
4419 return false; | 4987 return false; |
4420 | 4988 |
4989 if (var1->onepart != var2->onepart) | |
4990 return true; | |
4991 | |
4421 if (var1->n_var_parts != var2->n_var_parts) | 4992 if (var1->n_var_parts != var2->n_var_parts) |
4422 return true; | 4993 return true; |
4423 | 4994 |
4995 if (var1->onepart && var1->n_var_parts) | |
4996 { | |
4997 gcc_checking_assert (dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv) | |
4998 && var1->n_var_parts == 1); | |
4999 /* One-part values have locations in a canonical order. */ | |
5000 return onepart_variable_different_p (var1, var2); | |
5001 } | |
5002 | |
4424 for (i = 0; i < var1->n_var_parts; i++) | 5003 for (i = 0; i < var1->n_var_parts; i++) |
4425 { | 5004 { |
4426 if (var1->var_part[i].offset != var2->var_part[i].offset) | 5005 if (VAR_PART_OFFSET (var1, i) != VAR_PART_OFFSET (var2, i)) |
4427 return true; | 5006 return true; |
4428 /* One-part values have locations in a canonical order. */ | |
4429 if (i == 0 && var1->var_part[i].offset == 0 && dv_onepart_p (var1->dv)) | |
4430 { | |
4431 gcc_assert (var1->n_var_parts == 1 | |
4432 && dv_as_opaque (var1->dv) == dv_as_opaque (var2->dv)); | |
4433 return onepart_variable_different_p (var1, var2); | |
4434 } | |
4435 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i])) | 5007 if (variable_part_different_p (&var1->var_part[i], &var2->var_part[i])) |
4436 return true; | 5008 return true; |
4437 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i])) | 5009 if (variable_part_different_p (&var2->var_part[i], &var1->var_part[i])) |
4438 return true; | 5010 return true; |
4439 } | 5011 } |
4443 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */ | 5015 /* Return true if dataflow sets OLD_SET and NEW_SET differ. */ |
4444 | 5016 |
4445 static bool | 5017 static bool |
4446 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set) | 5018 dataflow_set_different (dataflow_set *old_set, dataflow_set *new_set) |
4447 { | 5019 { |
4448 htab_iterator hi; | 5020 variable_iterator_type hi; |
4449 variable var1; | 5021 variable *var1; |
5022 bool diffound = false; | |
5023 bool details = (dump_file && (dump_flags & TDF_DETAILS)); | |
5024 | |
5025 #define RETRUE \ | |
5026 do \ | |
5027 { \ | |
5028 if (!details) \ | |
5029 return true; \ | |
5030 else \ | |
5031 diffound = true; \ | |
5032 } \ | |
5033 while (0) | |
4450 | 5034 |
4451 if (old_set->vars == new_set->vars) | 5035 if (old_set->vars == new_set->vars) |
4452 return false; | 5036 return false; |
4453 | 5037 |
4454 if (htab_elements (shared_hash_htab (old_set->vars)) | 5038 if (shared_hash_htab (old_set->vars)->elements () |
4455 != htab_elements (shared_hash_htab (new_set->vars))) | 5039 != shared_hash_htab (new_set->vars)->elements ()) |
4456 return true; | 5040 RETRUE; |
4457 | 5041 |
4458 FOR_EACH_HTAB_ELEMENT (shared_hash_htab (old_set->vars), var1, variable, hi) | 5042 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (old_set->vars), |
4459 { | 5043 var1, variable, hi) |
4460 htab_t htab = shared_hash_htab (new_set->vars); | 5044 { |
4461 variable var2 = (variable) htab_find_with_hash (htab, var1->dv, | 5045 variable_table_type *htab = shared_hash_htab (new_set->vars); |
4462 dv_htab_hash (var1->dv)); | 5046 variable *var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv)); |
5047 | |
4463 if (!var2) | 5048 if (!var2) |
4464 { | 5049 { |
4465 if (dump_file && (dump_flags & TDF_DETAILS)) | 5050 if (dump_file && (dump_flags & TDF_DETAILS)) |
4466 { | 5051 { |
4467 fprintf (dump_file, "dataflow difference found: removal of:\n"); | 5052 fprintf (dump_file, "dataflow difference found: removal of:\n"); |
4468 dump_var (var1); | 5053 dump_var (var1); |
4469 } | 5054 } |
4470 return true; | 5055 RETRUE; |
4471 } | 5056 } |
4472 | 5057 else if (variable_different_p (var1, var2)) |
4473 if (variable_different_p (var1, var2)) | 5058 { |
4474 { | 5059 if (details) |
4475 if (dump_file && (dump_flags & TDF_DETAILS)) | |
4476 { | 5060 { |
4477 fprintf (dump_file, "dataflow difference found: " | 5061 fprintf (dump_file, "dataflow difference found: " |
4478 "old and new follow:\n"); | 5062 "old and new follow:\n"); |
4479 dump_var (var1); | 5063 dump_var (var1); |
5064 if (dv_onepart_p (var1->dv)) | |
5065 dump_onepart_variable_differences (var1, var2); | |
4480 dump_var (var2); | 5066 dump_var (var2); |
4481 } | 5067 } |
4482 return true; | 5068 RETRUE; |
4483 } | 5069 } |
4484 } | 5070 } |
4485 | 5071 |
4486 /* No need to traverse the second hashtab, if both have the same number | 5072 /* There's no need to traverse the second hashtab unless we want to |
4487 of elements and the second one had all entries found in the first one, | 5073 print the details. If both have the same number of elements and |
4488 then it can't have any extra entries. */ | 5074 the second one had all entries found in the first one, then the |
4489 return false; | 5075 second can't have any extra entries. */ |
5076 if (!details) | |
5077 return diffound; | |
5078 | |
5079 FOR_EACH_HASH_TABLE_ELEMENT (*shared_hash_htab (new_set->vars), | |
5080 var1, variable, hi) | |
5081 { | |
5082 variable_table_type *htab = shared_hash_htab (old_set->vars); | |
5083 variable *var2 = htab->find_with_hash (var1->dv, dv_htab_hash (var1->dv)); | |
5084 if (!var2) | |
5085 { | |
5086 if (details) | |
5087 { | |
5088 fprintf (dump_file, "dataflow difference found: addition of:\n"); | |
5089 dump_var (var1); | |
5090 } | |
5091 RETRUE; | |
5092 } | |
5093 } | |
5094 | |
5095 #undef RETRUE | |
5096 | |
5097 return diffound; | |
4490 } | 5098 } |
4491 | 5099 |
4492 /* Free the contents of dataflow set SET. */ | 5100 /* Free the contents of dataflow set SET. */ |
4493 | 5101 |
4494 static void | 5102 static void |
4501 | 5109 |
4502 shared_hash_destroy (set->vars); | 5110 shared_hash_destroy (set->vars); |
4503 set->vars = NULL; | 5111 set->vars = NULL; |
4504 } | 5112 } |
4505 | 5113 |
4506 /* Return true if RTL X contains a SYMBOL_REF. */ | 5114 /* Return true if T is a tracked parameter with non-degenerate record type. */ |
4507 | 5115 |
4508 static bool | 5116 static bool |
4509 contains_symbol_ref (rtx x) | 5117 tracked_record_parameter_p (tree t) |
4510 { | 5118 { |
4511 const char *fmt; | 5119 if (TREE_CODE (t) != PARM_DECL) |
4512 RTX_CODE code; | |
4513 int i; | |
4514 | |
4515 if (!x) | |
4516 return false; | 5120 return false; |
4517 | 5121 |
4518 code = GET_CODE (x); | 5122 if (DECL_MODE (t) == BLKmode) |
4519 if (code == SYMBOL_REF) | 5123 return false; |
4520 return true; | 5124 |
4521 | 5125 tree type = TREE_TYPE (t); |
4522 fmt = GET_RTX_FORMAT (code); | 5126 if (TREE_CODE (type) != RECORD_TYPE) |
4523 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | 5127 return false; |
4524 { | 5128 |
4525 if (fmt[i] == 'e') | 5129 if (TYPE_FIELDS (type) == NULL_TREE |
4526 { | 5130 || DECL_CHAIN (TYPE_FIELDS (type)) == NULL_TREE) |
4527 if (contains_symbol_ref (XEXP (x, i))) | 5131 return false; |
4528 return true; | 5132 |
4529 } | 5133 return true; |
4530 else if (fmt[i] == 'E') | |
4531 { | |
4532 int j; | |
4533 for (j = 0; j < XVECLEN (x, i); j++) | |
4534 if (contains_symbol_ref (XVECEXP (x, i, j))) | |
4535 return true; | |
4536 } | |
4537 } | |
4538 | |
4539 return false; | |
4540 } | 5134 } |
4541 | 5135 |
4542 /* Shall EXPR be tracked? */ | 5136 /* Shall EXPR be tracked? */ |
4543 | 5137 |
4544 static bool | 5138 static bool |
4549 | 5143 |
4550 if (TREE_CODE (expr) == DEBUG_EXPR_DECL) | 5144 if (TREE_CODE (expr) == DEBUG_EXPR_DECL) |
4551 return DECL_RTL_SET_P (expr); | 5145 return DECL_RTL_SET_P (expr); |
4552 | 5146 |
4553 /* If EXPR is not a parameter or a variable do not track it. */ | 5147 /* If EXPR is not a parameter or a variable do not track it. */ |
4554 if (TREE_CODE (expr) != VAR_DECL && TREE_CODE (expr) != PARM_DECL) | 5148 if (!VAR_P (expr) && TREE_CODE (expr) != PARM_DECL) |
4555 return 0; | 5149 return 0; |
4556 | 5150 |
4557 /* It also must have a name... */ | 5151 /* It also must have a name... */ |
4558 if (!DECL_NAME (expr) && need_rtl) | 5152 if (!DECL_NAME (expr) && need_rtl) |
4559 return 0; | 5153 return 0; |
4565 | 5159 |
4566 /* If this expression is really a debug alias of some other declaration, we | 5160 /* If this expression is really a debug alias of some other declaration, we |
4567 don't need to track this expression if the ultimate declaration is | 5161 don't need to track this expression if the ultimate declaration is |
4568 ignored. */ | 5162 ignored. */ |
4569 realdecl = expr; | 5163 realdecl = expr; |
4570 if (DECL_DEBUG_EXPR_IS_FROM (realdecl)) | 5164 if (VAR_P (realdecl) && DECL_HAS_DEBUG_EXPR_P (realdecl)) |
4571 { | 5165 { |
4572 realdecl = DECL_DEBUG_EXPR (realdecl); | 5166 realdecl = DECL_DEBUG_EXPR (realdecl); |
4573 if (realdecl == NULL_TREE) | 5167 if (!DECL_P (realdecl)) |
4574 realdecl = expr; | 5168 { |
4575 else if (!DECL_P (realdecl)) | 5169 if (handled_component_p (realdecl) |
4576 { | 5170 || (TREE_CODE (realdecl) == MEM_REF |
4577 if (handled_component_p (realdecl)) | 5171 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR)) |
4578 { | 5172 { |
4579 HOST_WIDE_INT bitsize, bitpos, maxsize; | 5173 HOST_WIDE_INT bitsize, bitpos, maxsize; |
5174 bool reverse; | |
4580 tree innerdecl | 5175 tree innerdecl |
4581 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize, | 5176 = get_ref_base_and_extent (realdecl, &bitpos, &bitsize, |
4582 &maxsize); | 5177 &maxsize, &reverse); |
4583 if (!DECL_P (innerdecl) | 5178 if (!DECL_P (innerdecl) |
4584 || DECL_IGNORED_P (innerdecl) | 5179 || DECL_IGNORED_P (innerdecl) |
5180 /* Do not track declarations for parts of tracked record | |
5181 parameters since we want to track them as a whole. */ | |
5182 || tracked_record_parameter_p (innerdecl) | |
4585 || TREE_STATIC (innerdecl) | 5183 || TREE_STATIC (innerdecl) |
4586 || bitsize <= 0 | 5184 || bitsize <= 0 |
4587 || bitpos + bitsize > 256 | 5185 || bitpos + bitsize > 256 |
4588 || bitsize != maxsize) | 5186 || bitsize != maxsize) |
4589 return 0; | 5187 return 0; |
4612 Example: | 5210 Example: |
4613 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv"))); | 5211 extern char **_dl_argv_internal __attribute__ ((alias ("_dl_argv"))); |
4614 char **_dl_argv; | 5212 char **_dl_argv; |
4615 */ | 5213 */ |
4616 if (decl_rtl && MEM_P (decl_rtl) | 5214 if (decl_rtl && MEM_P (decl_rtl) |
4617 && contains_symbol_ref (XEXP (decl_rtl, 0))) | 5215 && contains_symbol_ref_p (XEXP (decl_rtl, 0))) |
4618 return 0; | 5216 return 0; |
4619 | 5217 |
4620 /* If RTX is a memory it should not be very large (because it would be | 5218 /* If RTX is a memory it should not be very large (because it would be |
4621 an array or struct). */ | 5219 an array or struct). */ |
4622 if (decl_rtl && MEM_P (decl_rtl)) | 5220 if (decl_rtl && MEM_P (decl_rtl)) |
4623 { | 5221 { |
4624 /* Do not track structures and arrays. */ | 5222 /* Do not track structures and arrays. */ |
4625 if (GET_MODE (decl_rtl) == BLKmode | 5223 if ((GET_MODE (decl_rtl) == BLKmode |
4626 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl))) | 5224 || AGGREGATE_TYPE_P (TREE_TYPE (realdecl))) |
5225 && !tracked_record_parameter_p (realdecl)) | |
4627 return 0; | 5226 return 0; |
4628 if (MEM_SIZE (decl_rtl) | 5227 if (MEM_SIZE_KNOWN_P (decl_rtl) |
4629 && INTVAL (MEM_SIZE (decl_rtl)) > MAX_VAR_PARTS) | 5228 && MEM_SIZE (decl_rtl) > MAX_VAR_PARTS) |
4630 return 0; | 5229 return 0; |
4631 } | 5230 } |
4632 | 5231 |
4633 DECL_CHANGED (expr) = 0; | 5232 DECL_CHANGED (expr) = 0; |
4634 DECL_CHANGED (realdecl) = 0; | 5233 DECL_CHANGED (realdecl) = 0; |
4679 the lowpart we can track in *MODE_OUT (if nonnull) and its offset | 5278 the lowpart we can track in *MODE_OUT (if nonnull) and its offset |
4680 from EXPR in *OFFSET_OUT (if nonnull). */ | 5279 from EXPR in *OFFSET_OUT (if nonnull). */ |
4681 | 5280 |
4682 static bool | 5281 static bool |
4683 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p, | 5282 track_loc_p (rtx loc, tree expr, HOST_WIDE_INT offset, bool store_reg_p, |
4684 enum machine_mode *mode_out, HOST_WIDE_INT *offset_out) | 5283 machine_mode *mode_out, HOST_WIDE_INT *offset_out) |
4685 { | 5284 { |
4686 enum machine_mode mode; | 5285 machine_mode mode; |
4687 | 5286 |
4688 if (expr == NULL || !track_expr_p (expr, true)) | 5287 if (expr == NULL || !track_expr_p (expr, true)) |
4689 return false; | 5288 return false; |
4690 | 5289 |
4691 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the | 5290 /* If REG was a paradoxical subreg, its REG_ATTRS will describe the |
4692 whole subreg, but only the old inner part is really relevant. */ | 5291 whole subreg, but only the old inner part is really relevant. */ |
4693 mode = GET_MODE (loc); | 5292 mode = GET_MODE (loc); |
4694 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc))) | 5293 if (REG_P (loc) && !HARD_REGISTER_NUM_P (ORIGINAL_REGNO (loc))) |
4695 { | 5294 { |
4696 enum machine_mode pseudo_mode; | 5295 machine_mode pseudo_mode; |
4697 | 5296 |
4698 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc)); | 5297 pseudo_mode = PSEUDO_REGNO_MODE (ORIGINAL_REGNO (loc)); |
4699 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (pseudo_mode)) | 5298 if (paradoxical_subreg_p (mode, pseudo_mode)) |
4700 { | 5299 { |
4701 offset += byte_lowpart_offset (pseudo_mode, mode); | 5300 offset += byte_lowpart_offset (pseudo_mode, mode); |
4702 mode = pseudo_mode; | 5301 mode = pseudo_mode; |
4703 } | 5302 } |
4704 } | 5303 } |
4708 the whole of register LOC; in that case, the whole of EXPR is | 5307 the whole of register LOC; in that case, the whole of EXPR is |
4709 being changed. We exclude complex modes from the second case | 5308 being changed. We exclude complex modes from the second case |
4710 because the real and imaginary parts are represented as separate | 5309 because the real and imaginary parts are represented as separate |
4711 pseudo registers, even if the whole complex value fits into one | 5310 pseudo registers, even if the whole complex value fits into one |
4712 hard register. */ | 5311 hard register. */ |
4713 if ((GET_MODE_SIZE (mode) > GET_MODE_SIZE (DECL_MODE (expr)) | 5312 if ((paradoxical_subreg_p (mode, DECL_MODE (expr)) |
4714 || (store_reg_p | 5313 || (store_reg_p |
4715 && !COMPLEX_MODE_P (DECL_MODE (expr)) | 5314 && !COMPLEX_MODE_P (DECL_MODE (expr)) |
4716 && hard_regno_nregs[REGNO (loc)][DECL_MODE (expr)] == 1)) | 5315 && hard_regno_nregs (REGNO (loc), DECL_MODE (expr)) == 1)) |
4717 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0) | 5316 && offset + byte_lowpart_offset (DECL_MODE (expr), mode) == 0) |
4718 { | 5317 { |
4719 mode = DECL_MODE (expr); | 5318 mode = DECL_MODE (expr); |
4720 offset = 0; | 5319 offset = 0; |
4721 } | 5320 } |
4733 /* Return the MODE lowpart of LOC, or null if LOC is not something we | 5332 /* Return the MODE lowpart of LOC, or null if LOC is not something we |
4734 want to track. When returning nonnull, make sure that the attributes | 5333 want to track. When returning nonnull, make sure that the attributes |
4735 on the returned value are updated. */ | 5334 on the returned value are updated. */ |
4736 | 5335 |
4737 static rtx | 5336 static rtx |
4738 var_lowpart (enum machine_mode mode, rtx loc) | 5337 var_lowpart (machine_mode mode, rtx loc) |
4739 { | 5338 { |
4740 unsigned int offset, reg_offset, regno; | 5339 unsigned int offset, reg_offset, regno; |
5340 | |
5341 if (GET_MODE (loc) == mode) | |
5342 return loc; | |
4741 | 5343 |
4742 if (!REG_P (loc) && !MEM_P (loc)) | 5344 if (!REG_P (loc) && !MEM_P (loc)) |
4743 return NULL; | 5345 return NULL; |
4744 | |
4745 if (GET_MODE (loc) == mode) | |
4746 return loc; | |
4747 | 5346 |
4748 offset = byte_lowpart_offset (mode, GET_MODE (loc)); | 5347 offset = byte_lowpart_offset (mode, GET_MODE (loc)); |
4749 | 5348 |
4750 if (MEM_P (loc)) | 5349 if (MEM_P (loc)) |
4751 return adjust_address_nv (loc, mode, offset); | 5350 return adjust_address_nv (loc, mode, offset); |
4759 /* Carry information about uses and stores while walking rtx. */ | 5358 /* Carry information about uses and stores while walking rtx. */ |
4760 | 5359 |
4761 struct count_use_info | 5360 struct count_use_info |
4762 { | 5361 { |
4763 /* The insn where the RTX is. */ | 5362 /* The insn where the RTX is. */ |
4764 rtx insn; | 5363 rtx_insn *insn; |
4765 | 5364 |
4766 /* The basic block where insn is. */ | 5365 /* The basic block where insn is. */ |
4767 basic_block bb; | 5366 basic_block bb; |
4768 | 5367 |
4769 /* The array of n_sets sets in the insn, as determined by cselib. */ | 5368 /* The array of n_sets sets in the insn, as determined by cselib. */ |
4775 }; | 5374 }; |
4776 | 5375 |
4777 /* Find a VALUE corresponding to X. */ | 5376 /* Find a VALUE corresponding to X. */ |
4778 | 5377 |
4779 static inline cselib_val * | 5378 static inline cselib_val * |
4780 find_use_val (rtx x, enum machine_mode mode, struct count_use_info *cui) | 5379 find_use_val (rtx x, machine_mode mode, struct count_use_info *cui) |
4781 { | 5380 { |
4782 int i; | 5381 int i; |
4783 | 5382 |
4784 if (cui->sets) | 5383 if (cui->sets) |
4785 { | 5384 { |
4806 } | 5405 } |
4807 | 5406 |
4808 return NULL; | 5407 return NULL; |
4809 } | 5408 } |
4810 | 5409 |
4811 /* Helper function to get mode of MEM's address. */ | |
4812 | |
4813 static inline enum machine_mode | |
4814 get_address_mode (rtx mem) | |
4815 { | |
4816 enum machine_mode mode = GET_MODE (XEXP (mem, 0)); | |
4817 if (mode != VOIDmode) | |
4818 return mode; | |
4819 return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem)); | |
4820 } | |
4821 | |
4822 /* Replace all registers and addresses in an expression with VALUE | 5410 /* Replace all registers and addresses in an expression with VALUE |
4823 expressions that map back to them, unless the expression is a | 5411 expressions that map back to them, unless the expression is a |
4824 register. If no mapping is or can be performed, returns NULL. */ | 5412 register. If no mapping is or can be performed, returns NULL. */ |
4825 | 5413 |
4826 static rtx | 5414 static rtx |
4827 replace_expr_with_values (rtx loc) | 5415 replace_expr_with_values (rtx loc) |
4828 { | 5416 { |
4829 if (REG_P (loc)) | 5417 if (REG_P (loc) || GET_CODE (loc) == ENTRY_VALUE) |
4830 return NULL; | 5418 return NULL; |
4831 else if (MEM_P (loc)) | 5419 else if (MEM_P (loc)) |
4832 { | 5420 { |
4833 cselib_val *addr = cselib_lookup (XEXP (loc, 0), | 5421 cselib_val *addr = cselib_lookup (XEXP (loc, 0), |
4834 get_address_mode (loc), 0, | 5422 get_address_mode (loc), 0, |
4840 } | 5428 } |
4841 else | 5429 else |
4842 return cselib_subst_to_values (loc, VOIDmode); | 5430 return cselib_subst_to_values (loc, VOIDmode); |
4843 } | 5431 } |
4844 | 5432 |
5433 /* Return true if X contains a DEBUG_EXPR. */ | |
5434 | |
5435 static bool | |
5436 rtx_debug_expr_p (const_rtx x) | |
5437 { | |
5438 subrtx_iterator::array_type array; | |
5439 FOR_EACH_SUBRTX (iter, array, x, ALL) | |
5440 if (GET_CODE (*iter) == DEBUG_EXPR) | |
5441 return true; | |
5442 return false; | |
5443 } | |
5444 | |
4845 /* Determine what kind of micro operation to choose for a USE. Return | 5445 /* Determine what kind of micro operation to choose for a USE. Return |
4846 MO_CLOBBER if no micro operation is to be generated. */ | 5446 MO_CLOBBER if no micro operation is to be generated. */ |
4847 | 5447 |
4848 static enum micro_operation_type | 5448 static enum micro_operation_type |
4849 use_type (rtx loc, struct count_use_info *cui, enum machine_mode *modep) | 5449 use_type (rtx loc, struct count_use_info *cui, machine_mode *modep) |
4850 { | 5450 { |
4851 tree expr; | 5451 tree expr; |
4852 | 5452 |
4853 if (cui && cui->sets) | 5453 if (cui && cui->sets) |
4854 { | 5454 { |
4921 if (!expr) | 5521 if (!expr) |
4922 return MO_CLOBBER; | 5522 return MO_CLOBBER; |
4923 else if (target_for_debug_bind (var_debug_decl (expr))) | 5523 else if (target_for_debug_bind (var_debug_decl (expr))) |
4924 return MO_CLOBBER; | 5524 return MO_CLOBBER; |
4925 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc), | 5525 else if (track_loc_p (loc, expr, INT_MEM_OFFSET (loc), |
4926 false, modep, NULL)) | 5526 false, modep, NULL) |
5527 /* Multi-part variables shouldn't refer to one-part | |
5528 variable names such as VALUEs (never happens) or | |
5529 DEBUG_EXPRs (only happens in the presence of debug | |
5530 insns). */ | |
5531 && (!MAY_HAVE_DEBUG_INSNS | |
5532 || !rtx_debug_expr_p (XEXP (loc, 0)))) | |
4927 return MO_USE; | 5533 return MO_USE; |
4928 else | 5534 else |
4929 return MO_CLOBBER; | 5535 return MO_CLOBBER; |
4930 } | 5536 } |
4931 | 5537 |
4934 | 5540 |
4935 /* Log to OUT information about micro-operation MOPT involving X in | 5541 /* Log to OUT information about micro-operation MOPT involving X in |
4936 INSN of BB. */ | 5542 INSN of BB. */ |
4937 | 5543 |
4938 static inline void | 5544 static inline void |
4939 log_op_type (rtx x, basic_block bb, rtx insn, | 5545 log_op_type (rtx x, basic_block bb, rtx_insn *insn, |
4940 enum micro_operation_type mopt, FILE *out) | 5546 enum micro_operation_type mopt, FILE *out) |
4941 { | 5547 { |
4942 fprintf (out, "bb %i op %i insn %i %s ", | 5548 fprintf (out, "bb %i op %i insn %i %s ", |
4943 bb->index, VEC_length (micro_operation, VTI (bb)->mos), | 5549 bb->index, VTI (bb)->mos.length (), |
4944 INSN_UID (insn), micro_operation_type_name[mopt]); | 5550 INSN_UID (insn), micro_operation_type_name[mopt]); |
4945 print_inline_rtx (out, x, 2); | 5551 print_inline_rtx (out, x, 2); |
4946 fputc ('\n', out); | 5552 fputc ('\n', out); |
4947 } | 5553 } |
4948 | 5554 |
4961 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump) | 5567 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_COPIED", (x), CONCAT)->jump) |
4962 /* Whether the location in the CONCAT should be handled like a | 5568 /* Whether the location in the CONCAT should be handled like a |
4963 MO_CLOBBER as well. */ | 5569 MO_CLOBBER as well. */ |
4964 #define VAL_EXPR_IS_CLOBBERED(x) \ | 5570 #define VAL_EXPR_IS_CLOBBERED(x) \ |
4965 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging) | 5571 (RTL_FLAG_CHECK1 ("VAL_EXPR_IS_CLOBBERED", (x), CONCAT)->unchanging) |
4966 /* Whether the location is a CONCAT of the MO_VAL_SET expression and | |
4967 a reverse operation that should be handled afterwards. */ | |
4968 #define VAL_EXPR_HAS_REVERSE(x) \ | |
4969 (RTL_FLAG_CHECK1 ("VAL_EXPR_HAS_REVERSE", (x), CONCAT)->return_val) | |
4970 | 5572 |
4971 /* All preserved VALUEs. */ | 5573 /* All preserved VALUEs. */ |
4972 static VEC (rtx, heap) *preserved_values; | 5574 static vec<rtx> preserved_values; |
4973 | 5575 |
4974 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */ | 5576 /* Ensure VAL is preserved and remember it in a vector for vt_emit_notes. */ |
4975 | 5577 |
4976 static void | 5578 static void |
4977 preserve_value (cselib_val *val) | 5579 preserve_value (cselib_val *val) |
4978 { | 5580 { |
4979 cselib_preserve_value (val); | 5581 cselib_preserve_value (val); |
4980 VEC_safe_push (rtx, heap, preserved_values, val->val_rtx); | 5582 preserved_values.safe_push (val->val_rtx); |
4981 } | 5583 } |
4982 | 5584 |
4983 /* Helper function for MO_VAL_LOC handling. Return non-zero if | 5585 /* Helper function for MO_VAL_LOC handling. Return non-zero if |
4984 any rtxes not suitable for CONST use not replaced by VALUEs | 5586 any rtxes not suitable for CONST use not replaced by VALUEs |
4985 are discovered. */ | 5587 are discovered. */ |
4986 | 5588 |
4987 static int | 5589 static bool |
4988 non_suitable_const (rtx *x, void *data ATTRIBUTE_UNUSED) | 5590 non_suitable_const (const_rtx x) |
4989 { | 5591 { |
4990 if (*x == NULL_RTX) | 5592 subrtx_iterator::array_type array; |
4991 return 0; | 5593 FOR_EACH_SUBRTX (iter, array, x, ALL) |
4992 | 5594 { |
4993 switch (GET_CODE (*x)) | 5595 const_rtx x = *iter; |
4994 { | 5596 switch (GET_CODE (x)) |
4995 case REG: | 5597 { |
4996 case DEBUG_EXPR: | 5598 case REG: |
4997 case PC: | 5599 case DEBUG_EXPR: |
4998 case SCRATCH: | 5600 case PC: |
4999 case CC0: | 5601 case SCRATCH: |
5000 case ASM_INPUT: | 5602 case CC0: |
5001 case ASM_OPERANDS: | 5603 case ASM_INPUT: |
5002 return 1; | 5604 case ASM_OPERANDS: |
5003 case MEM: | 5605 return true; |
5004 return !MEM_READONLY_P (*x); | 5606 case MEM: |
5005 default: | 5607 if (!MEM_READONLY_P (x)) |
5006 return 0; | 5608 return true; |
5007 } | 5609 break; |
5610 default: | |
5611 break; | |
5612 } | |
5613 } | |
5614 return false; | |
5008 } | 5615 } |
5009 | 5616 |
5010 /* Add uses (register and memory references) LOC which will be tracked | 5617 /* Add uses (register and memory references) LOC which will be tracked |
5011 to VTI (bb)->mos. INSN is instruction which the LOC is part of. */ | 5618 to VTI (bb)->mos. */ |
5012 | 5619 |
5013 static int | 5620 static void |
5014 add_uses (rtx *ploc, void *data) | 5621 add_uses (rtx loc, struct count_use_info *cui) |
5015 { | 5622 { |
5016 rtx loc = *ploc; | 5623 machine_mode mode = VOIDmode; |
5017 enum machine_mode mode = VOIDmode; | |
5018 struct count_use_info *cui = (struct count_use_info *)data; | |
5019 enum micro_operation_type type = use_type (loc, cui, &mode); | 5624 enum micro_operation_type type = use_type (loc, cui, &mode); |
5020 | 5625 |
5021 if (type != MO_CLOBBER) | 5626 if (type != MO_CLOBBER) |
5022 { | 5627 { |
5023 basic_block bb = cui->bb; | 5628 basic_block bb = cui->bb; |
5035 | 5640 |
5036 gcc_assert (cui->sets); | 5641 gcc_assert (cui->sets); |
5037 | 5642 |
5038 if (MEM_P (vloc) | 5643 if (MEM_P (vloc) |
5039 && !REG_P (XEXP (vloc, 0)) | 5644 && !REG_P (XEXP (vloc, 0)) |
5040 && !MEM_P (XEXP (vloc, 0)) | 5645 && !MEM_P (XEXP (vloc, 0))) |
5041 && (GET_CODE (XEXP (vloc, 0)) != PLUS | |
5042 || XEXP (XEXP (vloc, 0), 0) != cfa_base_rtx | |
5043 || !CONST_INT_P (XEXP (XEXP (vloc, 0), 1)))) | |
5044 { | 5646 { |
5045 rtx mloc = vloc; | 5647 rtx mloc = vloc; |
5046 enum machine_mode address_mode = get_address_mode (mloc); | 5648 machine_mode address_mode = get_address_mode (mloc); |
5047 cselib_val *val | 5649 cselib_val *val |
5048 = cselib_lookup (XEXP (mloc, 0), address_mode, 0, | 5650 = cselib_lookup (XEXP (mloc, 0), address_mode, 0, |
5049 GET_MODE (mloc)); | 5651 GET_MODE (mloc)); |
5050 | 5652 |
5051 if (val && !cselib_preserved_value_p (val)) | 5653 if (val && !cselib_preserved_value_p (val)) |
5052 { | 5654 preserve_value (val); |
5053 micro_operation moa; | |
5054 preserve_value (val); | |
5055 mloc = cselib_subst_to_values (XEXP (mloc, 0), | |
5056 GET_MODE (mloc)); | |
5057 moa.type = MO_VAL_USE; | |
5058 moa.insn = cui->insn; | |
5059 moa.u.loc = gen_rtx_CONCAT (address_mode, | |
5060 val->val_rtx, mloc); | |
5061 if (dump_file && (dump_flags & TDF_DETAILS)) | |
5062 log_op_type (moa.u.loc, cui->bb, cui->insn, | |
5063 moa.type, dump_file); | |
5064 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa); | |
5065 } | |
5066 } | 5655 } |
5067 | 5656 |
5068 if (CONSTANT_P (vloc) | 5657 if (CONSTANT_P (vloc) |
5069 && (GET_CODE (vloc) != CONST | 5658 && (GET_CODE (vloc) != CONST || non_suitable_const (vloc))) |
5070 || for_each_rtx (&vloc, non_suitable_const, NULL))) | |
5071 /* For constants don't look up any value. */; | 5659 /* For constants don't look up any value. */; |
5072 else if (!VAR_LOC_UNKNOWN_P (vloc) | 5660 else if (!VAR_LOC_UNKNOWN_P (vloc) && !unsuitable_loc (vloc) |
5073 && (val = find_use_val (vloc, GET_MODE (oloc), cui))) | 5661 && (val = find_use_val (vloc, GET_MODE (oloc), cui))) |
5074 { | 5662 { |
5075 enum machine_mode mode2; | 5663 machine_mode mode2; |
5076 enum micro_operation_type type2; | 5664 enum micro_operation_type type2; |
5077 rtx nloc = replace_expr_with_values (vloc); | 5665 rtx nloc = NULL; |
5666 bool resolvable = REG_P (vloc) || MEM_P (vloc); | |
5667 | |
5668 if (resolvable) | |
5669 nloc = replace_expr_with_values (vloc); | |
5078 | 5670 |
5079 if (nloc) | 5671 if (nloc) |
5080 { | 5672 { |
5081 oloc = shallow_copy_rtx (oloc); | 5673 oloc = shallow_copy_rtx (oloc); |
5082 PAT_VAR_LOCATION_LOC (oloc) = nloc; | 5674 PAT_VAR_LOCATION_LOC (oloc) = nloc; |
5090 || type2 == MO_CLOBBER); | 5682 || type2 == MO_CLOBBER); |
5091 | 5683 |
5092 if (type2 == MO_CLOBBER | 5684 if (type2 == MO_CLOBBER |
5093 && !cselib_preserved_value_p (val)) | 5685 && !cselib_preserved_value_p (val)) |
5094 { | 5686 { |
5095 VAL_NEEDS_RESOLUTION (oloc) = 1; | 5687 VAL_NEEDS_RESOLUTION (oloc) = resolvable; |
5096 preserve_value (val); | 5688 preserve_value (val); |
5097 } | 5689 } |
5098 } | 5690 } |
5099 else if (!VAR_LOC_UNKNOWN_P (vloc)) | 5691 else if (!VAR_LOC_UNKNOWN_P (vloc)) |
5100 { | 5692 { |
5104 | 5696 |
5105 mo.u.loc = oloc; | 5697 mo.u.loc = oloc; |
5106 } | 5698 } |
5107 else if (type == MO_VAL_USE) | 5699 else if (type == MO_VAL_USE) |
5108 { | 5700 { |
5109 enum machine_mode mode2 = VOIDmode; | 5701 machine_mode mode2 = VOIDmode; |
5110 enum micro_operation_type type2; | 5702 enum micro_operation_type type2; |
5111 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui); | 5703 cselib_val *val = find_use_val (loc, GET_MODE (loc), cui); |
5112 rtx vloc, oloc = loc, nloc; | 5704 rtx vloc, oloc = loc, nloc; |
5113 | 5705 |
5114 gcc_assert (cui->sets); | 5706 gcc_assert (cui->sets); |
5115 | 5707 |
5116 if (MEM_P (oloc) | 5708 if (MEM_P (oloc) |
5117 && !REG_P (XEXP (oloc, 0)) | 5709 && !REG_P (XEXP (oloc, 0)) |
5118 && !MEM_P (XEXP (oloc, 0)) | 5710 && !MEM_P (XEXP (oloc, 0))) |
5119 && (GET_CODE (XEXP (oloc, 0)) != PLUS | |
5120 || XEXP (XEXP (oloc, 0), 0) != cfa_base_rtx | |
5121 || !CONST_INT_P (XEXP (XEXP (oloc, 0), 1)))) | |
5122 { | 5711 { |
5123 rtx mloc = oloc; | 5712 rtx mloc = oloc; |
5124 enum machine_mode address_mode = get_address_mode (mloc); | 5713 machine_mode address_mode = get_address_mode (mloc); |
5125 cselib_val *val | 5714 cselib_val *val |
5126 = cselib_lookup (XEXP (mloc, 0), address_mode, 0, | 5715 = cselib_lookup (XEXP (mloc, 0), address_mode, 0, |
5127 GET_MODE (mloc)); | 5716 GET_MODE (mloc)); |
5128 | 5717 |
5129 if (val && !cselib_preserved_value_p (val)) | 5718 if (val && !cselib_preserved_value_p (val)) |
5130 { | 5719 preserve_value (val); |
5131 micro_operation moa; | |
5132 preserve_value (val); | |
5133 mloc = cselib_subst_to_values (XEXP (mloc, 0), | |
5134 GET_MODE (mloc)); | |
5135 moa.type = MO_VAL_USE; | |
5136 moa.insn = cui->insn; | |
5137 moa.u.loc = gen_rtx_CONCAT (address_mode, | |
5138 val->val_rtx, mloc); | |
5139 if (dump_file && (dump_flags & TDF_DETAILS)) | |
5140 log_op_type (moa.u.loc, cui->bb, cui->insn, | |
5141 moa.type, dump_file); | |
5142 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa); | |
5143 } | |
5144 } | 5720 } |
5145 | 5721 |
5146 type2 = use_type (loc, 0, &mode2); | 5722 type2 = use_type (loc, 0, &mode2); |
5147 | 5723 |
5148 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR | 5724 gcc_assert (type2 == MO_USE || type2 == MO_USE_NO_VAR |
5161 (concat (concat val use) src): same as above, with use as | 5737 (concat (concat val use) src): same as above, with use as |
5162 the MO_USE tracked value, if it differs from src. | 5738 the MO_USE tracked value, if it differs from src. |
5163 | 5739 |
5164 */ | 5740 */ |
5165 | 5741 |
5742 gcc_checking_assert (REG_P (loc) || MEM_P (loc)); | |
5166 nloc = replace_expr_with_values (loc); | 5743 nloc = replace_expr_with_values (loc); |
5167 if (!nloc) | 5744 if (!nloc) |
5168 nloc = oloc; | 5745 nloc = oloc; |
5169 | 5746 |
5170 if (vloc != nloc) | 5747 if (vloc != nloc) |
5185 else | 5762 else |
5186 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR); | 5763 gcc_assert (type == MO_USE || type == MO_USE_NO_VAR); |
5187 | 5764 |
5188 if (dump_file && (dump_flags & TDF_DETAILS)) | 5765 if (dump_file && (dump_flags & TDF_DETAILS)) |
5189 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file); | 5766 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file); |
5190 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo); | 5767 VTI (bb)->mos.safe_push (mo); |
5191 } | 5768 } |
5192 | |
5193 return 0; | |
5194 } | 5769 } |
5195 | 5770 |
5196 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */ | 5771 /* Helper function for finding all uses of REG/MEM in X in insn INSN. */ |
5197 | 5772 |
5198 static void | 5773 static void |
5199 add_uses_1 (rtx *x, void *cui) | 5774 add_uses_1 (rtx *x, void *cui) |
5200 { | 5775 { |
5201 for_each_rtx (x, add_uses, cui); | 5776 subrtx_var_iterator::array_type array; |
5202 } | 5777 FOR_EACH_SUBRTX_VAR (iter, array, *x, NONCONST) |
5203 | 5778 add_uses (*iter, (struct count_use_info *) cui); |
5204 /* Attempt to reverse the EXPR operation in the debug info. Say for | 5779 } |
5205 reg1 = reg2 + 6 even when reg2 is no longer live we | 5780 |
5206 can express its value as VAL - 6. */ | 5781 /* This is the value used during expansion of locations. We want it |
5207 | 5782 to be unbounded, so that variables expanded deep in a recursion |
5208 static rtx | 5783 nest are fully evaluated, so that their values are cached |
5209 reverse_op (rtx val, const_rtx expr) | 5784 correctly. We avoid recursion cycles through other means, and we |
5785 don't unshare RTL, so excess complexity is not a problem. */ | |
5786 #define EXPR_DEPTH (INT_MAX) | |
5787 /* We use this to keep too-complex expressions from being emitted as | |
5788 location notes, and then to debug information. Users can trade | |
5789 compile time for ridiculously complex expressions, although they're | |
5790 seldom useful, and they may often have to be discarded as not | |
5791 representable anyway. */ | |
5792 #define EXPR_USE_DEPTH (PARAM_VALUE (PARAM_MAX_VARTRACK_EXPR_DEPTH)) | |
5793 | |
5794 /* Attempt to reverse the EXPR operation in the debug info and record | |
5795 it in the cselib table. Say for reg1 = reg2 + 6 even when reg2 is | |
5796 no longer live we can express its value as VAL - 6. */ | |
5797 | |
5798 static void | |
5799 reverse_op (rtx val, const_rtx expr, rtx_insn *insn) | |
5210 { | 5800 { |
5211 rtx src, arg, ret; | 5801 rtx src, arg, ret; |
5212 cselib_val *v; | 5802 cselib_val *v; |
5803 struct elt_loc_list *l; | |
5213 enum rtx_code code; | 5804 enum rtx_code code; |
5805 int count; | |
5214 | 5806 |
5215 if (GET_CODE (expr) != SET) | 5807 if (GET_CODE (expr) != SET) |
5216 return NULL_RTX; | 5808 return; |
5217 | 5809 |
5218 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr))) | 5810 if (!REG_P (SET_DEST (expr)) || GET_MODE (val) != GET_MODE (SET_DEST (expr))) |
5219 return NULL_RTX; | 5811 return; |
5220 | 5812 |
5221 src = SET_SRC (expr); | 5813 src = SET_SRC (expr); |
5222 switch (GET_CODE (src)) | 5814 switch (GET_CODE (src)) |
5223 { | 5815 { |
5224 case PLUS: | 5816 case PLUS: |
5225 case MINUS: | 5817 case MINUS: |
5226 case XOR: | 5818 case XOR: |
5227 case NOT: | 5819 case NOT: |
5228 case NEG: | 5820 case NEG: |
5229 if (!REG_P (XEXP (src, 0))) | 5821 if (!REG_P (XEXP (src, 0))) |
5230 return NULL_RTX; | 5822 return; |
5231 break; | 5823 break; |
5232 case SIGN_EXTEND: | 5824 case SIGN_EXTEND: |
5233 case ZERO_EXTEND: | 5825 case ZERO_EXTEND: |
5234 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0))) | 5826 if (!REG_P (XEXP (src, 0)) && !MEM_P (XEXP (src, 0))) |
5235 return NULL_RTX; | 5827 return; |
5236 break; | 5828 break; |
5237 default: | 5829 default: |
5238 return NULL_RTX; | 5830 return; |
5239 } | 5831 } |
5240 | 5832 |
5241 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx) | 5833 if (!SCALAR_INT_MODE_P (GET_MODE (src)) || XEXP (src, 0) == cfa_base_rtx) |
5242 return NULL_RTX; | 5834 return; |
5243 | 5835 |
5244 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode); | 5836 v = cselib_lookup (XEXP (src, 0), GET_MODE (XEXP (src, 0)), 0, VOIDmode); |
5245 if (!v || !cselib_preserved_value_p (v)) | 5837 if (!v || !cselib_preserved_value_p (v)) |
5246 return NULL_RTX; | 5838 return; |
5839 | |
5840 /* Use canonical V to avoid creating multiple redundant expressions | |
5841 for different VALUES equivalent to V. */ | |
5842 v = canonical_cselib_val (v); | |
5843 | |
5844 /* Adding a reverse op isn't useful if V already has an always valid | |
5845 location. Ignore ENTRY_VALUE, while it is always constant, we should | |
5846 prefer non-ENTRY_VALUE locations whenever possible. */ | |
5847 for (l = v->locs, count = 0; l; l = l->next, count++) | |
5848 if (CONSTANT_P (l->loc) | |
5849 && (GET_CODE (l->loc) != CONST || !references_value_p (l->loc, 0))) | |
5850 return; | |
5851 /* Avoid creating too large locs lists. */ | |
5852 else if (count == PARAM_VALUE (PARAM_MAX_VARTRACK_REVERSE_OP_SIZE)) | |
5853 return; | |
5247 | 5854 |
5248 switch (GET_CODE (src)) | 5855 switch (GET_CODE (src)) |
5249 { | 5856 { |
5250 case NOT: | 5857 case NOT: |
5251 case NEG: | 5858 case NEG: |
5252 if (GET_MODE (v->val_rtx) != GET_MODE (val)) | 5859 if (GET_MODE (v->val_rtx) != GET_MODE (val)) |
5253 return NULL_RTX; | 5860 return; |
5254 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val); | 5861 ret = gen_rtx_fmt_e (GET_CODE (src), GET_MODE (val), val); |
5255 break; | 5862 break; |
5256 case SIGN_EXTEND: | 5863 case SIGN_EXTEND: |
5257 case ZERO_EXTEND: | 5864 case ZERO_EXTEND: |
5258 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val); | 5865 ret = gen_lowpart_SUBREG (GET_MODE (v->val_rtx), val); |
5266 case MINUS: | 5873 case MINUS: |
5267 code = PLUS; | 5874 code = PLUS; |
5268 goto binary; | 5875 goto binary; |
5269 binary: | 5876 binary: |
5270 if (GET_MODE (v->val_rtx) != GET_MODE (val)) | 5877 if (GET_MODE (v->val_rtx) != GET_MODE (val)) |
5271 return NULL_RTX; | 5878 return; |
5272 arg = XEXP (src, 1); | 5879 arg = XEXP (src, 1); |
5273 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF) | 5880 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF) |
5274 { | 5881 { |
5275 arg = cselib_expand_value_rtx (arg, scratch_regs, 5); | 5882 arg = cselib_expand_value_rtx (arg, scratch_regs, 5); |
5276 if (arg == NULL_RTX) | 5883 if (arg == NULL_RTX) |
5277 return NULL_RTX; | 5884 return; |
5278 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF) | 5885 if (!CONST_INT_P (arg) && GET_CODE (arg) != SYMBOL_REF) |
5279 return NULL_RTX; | 5886 return; |
5280 } | 5887 } |
5281 ret = simplify_gen_binary (code, GET_MODE (val), val, arg); | 5888 ret = simplify_gen_binary (code, GET_MODE (val), val, arg); |
5282 if (ret == val) | |
5283 /* Ensure ret isn't VALUE itself (which can happen e.g. for | |
5284 (plus (reg1) (reg2)) when reg2 is known to be 0), as that | |
5285 breaks a lot of routines during var-tracking. */ | |
5286 ret = gen_rtx_fmt_ee (PLUS, GET_MODE (val), val, const0_rtx); | |
5287 break; | 5889 break; |
5288 default: | 5890 default: |
5289 gcc_unreachable (); | 5891 gcc_unreachable (); |
5290 } | 5892 } |
5291 | 5893 |
5292 return gen_rtx_CONCAT (GET_MODE (v->val_rtx), v->val_rtx, ret); | 5894 cselib_add_permanent_equiv (v, ret, insn); |
5293 } | 5895 } |
5294 | 5896 |
5295 /* Add stores (register and memory references) LOC which will be tracked | 5897 /* Add stores (register and memory references) LOC which will be tracked |
5296 to VTI (bb)->mos. EXPR is the RTL expression containing the store. | 5898 to VTI (bb)->mos. EXPR is the RTL expression containing the store. |
5297 CUIP->insn is instruction which the LOC is part of. */ | 5899 CUIP->insn is instruction which the LOC is part of. */ |
5298 | 5900 |
5299 static void | 5901 static void |
5300 add_stores (rtx loc, const_rtx expr, void *cuip) | 5902 add_stores (rtx loc, const_rtx expr, void *cuip) |
5301 { | 5903 { |
5302 enum machine_mode mode = VOIDmode, mode2; | 5904 machine_mode mode = VOIDmode, mode2; |
5303 struct count_use_info *cui = (struct count_use_info *)cuip; | 5905 struct count_use_info *cui = (struct count_use_info *)cuip; |
5304 basic_block bb = cui->bb; | 5906 basic_block bb = cui->bb; |
5305 micro_operation mo; | 5907 micro_operation mo; |
5306 rtx oloc = loc, nloc, src = NULL; | 5908 rtx oloc = loc, nloc, src = NULL; |
5307 enum micro_operation_type type = use_type (loc, cui, &mode); | 5909 enum micro_operation_type type = use_type (loc, cui, &mode); |
5308 bool track_p = false; | 5910 bool track_p = false; |
5309 cselib_val *v; | 5911 cselib_val *v; |
5310 bool resolve, preserve; | 5912 bool resolve, preserve; |
5311 rtx reverse; | |
5312 | 5913 |
5313 if (type == MO_CLOBBER) | 5914 if (type == MO_CLOBBER) |
5314 return; | 5915 return; |
5315 | 5916 |
5316 mode2 = mode; | 5917 mode2 = mode; |
5322 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE) | 5923 || !(track_p = use_type (loc, NULL, &mode2) == MO_USE) |
5323 || GET_CODE (expr) == CLOBBER) | 5924 || GET_CODE (expr) == CLOBBER) |
5324 { | 5925 { |
5325 mo.type = MO_CLOBBER; | 5926 mo.type = MO_CLOBBER; |
5326 mo.u.loc = loc; | 5927 mo.u.loc = loc; |
5928 if (GET_CODE (expr) == SET | |
5929 && SET_DEST (expr) == loc | |
5930 && !unsuitable_loc (SET_SRC (expr)) | |
5931 && find_use_val (loc, mode, cui)) | |
5932 { | |
5933 gcc_checking_assert (type == MO_VAL_SET); | |
5934 mo.u.loc = gen_rtx_SET (loc, SET_SRC (expr)); | |
5935 } | |
5327 } | 5936 } |
5328 else | 5937 else |
5329 { | 5938 { |
5330 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc) | 5939 if (GET_CODE (expr) == SET |
5940 && SET_DEST (expr) == loc | |
5941 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS) | |
5331 src = var_lowpart (mode2, SET_SRC (expr)); | 5942 src = var_lowpart (mode2, SET_SRC (expr)); |
5332 loc = var_lowpart (mode2, loc); | 5943 loc = var_lowpart (mode2, loc); |
5333 | 5944 |
5334 if (src == NULL) | 5945 if (src == NULL) |
5335 { | 5946 { |
5336 mo.type = MO_SET; | 5947 mo.type = MO_SET; |
5337 mo.u.loc = loc; | 5948 mo.u.loc = loc; |
5338 } | 5949 } |
5339 else | 5950 else |
5340 { | 5951 { |
5341 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src); | 5952 rtx xexpr = gen_rtx_SET (loc, src); |
5342 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc))) | 5953 if (same_variable_part_p (src, REG_EXPR (loc), REG_OFFSET (loc))) |
5343 mo.type = MO_COPY; | 5954 { |
5955 /* If this is an instruction copying (part of) a parameter | |
5956 passed by invisible reference to its register location, | |
5957 pretend it's a SET so that the initial memory location | |
5958 is discarded, as the parameter register can be reused | |
5959 for other purposes and we do not track locations based | |
5960 on generic registers. */ | |
5961 if (MEM_P (src) | |
5962 && REG_EXPR (loc) | |
5963 && TREE_CODE (REG_EXPR (loc)) == PARM_DECL | |
5964 && DECL_MODE (REG_EXPR (loc)) != BLKmode | |
5965 && MEM_P (DECL_INCOMING_RTL (REG_EXPR (loc))) | |
5966 && XEXP (DECL_INCOMING_RTL (REG_EXPR (loc)), 0) | |
5967 != arg_pointer_rtx) | |
5968 mo.type = MO_SET; | |
5969 else | |
5970 mo.type = MO_COPY; | |
5971 } | |
5344 else | 5972 else |
5345 mo.type = MO_SET; | 5973 mo.type = MO_SET; |
5346 mo.u.loc = xexpr; | 5974 mo.u.loc = xexpr; |
5347 } | 5975 } |
5348 } | 5976 } |
5352 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE) | 5980 && ((track_p = use_type (loc, NULL, &mode2) == MO_USE) |
5353 || cui->sets)) | 5981 || cui->sets)) |
5354 { | 5982 { |
5355 if (MEM_P (loc) && type == MO_VAL_SET | 5983 if (MEM_P (loc) && type == MO_VAL_SET |
5356 && !REG_P (XEXP (loc, 0)) | 5984 && !REG_P (XEXP (loc, 0)) |
5357 && !MEM_P (XEXP (loc, 0)) | 5985 && !MEM_P (XEXP (loc, 0))) |
5358 && (GET_CODE (XEXP (loc, 0)) != PLUS | |
5359 || XEXP (XEXP (loc, 0), 0) != cfa_base_rtx | |
5360 || !CONST_INT_P (XEXP (XEXP (loc, 0), 1)))) | |
5361 { | 5986 { |
5362 rtx mloc = loc; | 5987 rtx mloc = loc; |
5363 enum machine_mode address_mode = get_address_mode (mloc); | 5988 machine_mode address_mode = get_address_mode (mloc); |
5364 cselib_val *val = cselib_lookup (XEXP (mloc, 0), | 5989 cselib_val *val = cselib_lookup (XEXP (mloc, 0), |
5365 address_mode, 0, | 5990 address_mode, 0, |
5366 GET_MODE (mloc)); | 5991 GET_MODE (mloc)); |
5367 | 5992 |
5368 if (val && !cselib_preserved_value_p (val)) | 5993 if (val && !cselib_preserved_value_p (val)) |
5369 { | 5994 preserve_value (val); |
5370 preserve_value (val); | |
5371 mo.type = MO_VAL_USE; | |
5372 mloc = cselib_subst_to_values (XEXP (mloc, 0), | |
5373 GET_MODE (mloc)); | |
5374 mo.u.loc = gen_rtx_CONCAT (address_mode, val->val_rtx, mloc); | |
5375 mo.insn = cui->insn; | |
5376 if (dump_file && (dump_flags & TDF_DETAILS)) | |
5377 log_op_type (mo.u.loc, cui->bb, cui->insn, | |
5378 mo.type, dump_file); | |
5379 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo); | |
5380 } | |
5381 } | 5995 } |
5382 | 5996 |
5383 if (GET_CODE (expr) == CLOBBER || !track_p) | 5997 if (GET_CODE (expr) == CLOBBER || !track_p) |
5384 { | 5998 { |
5385 mo.type = MO_CLOBBER; | 5999 mo.type = MO_CLOBBER; |
5386 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc; | 6000 mo.u.loc = track_p ? var_lowpart (mode2, loc) : loc; |
5387 } | 6001 } |
5388 else | 6002 else |
5389 { | 6003 { |
5390 if (GET_CODE (expr) == SET && SET_DEST (expr) == loc) | 6004 if (GET_CODE (expr) == SET |
6005 && SET_DEST (expr) == loc | |
6006 && GET_CODE (SET_SRC (expr)) != ASM_OPERANDS) | |
5391 src = var_lowpart (mode2, SET_SRC (expr)); | 6007 src = var_lowpart (mode2, SET_SRC (expr)); |
5392 loc = var_lowpart (mode2, loc); | 6008 loc = var_lowpart (mode2, loc); |
5393 | 6009 |
5394 if (src == NULL) | 6010 if (src == NULL) |
5395 { | 6011 { |
5396 mo.type = MO_SET; | 6012 mo.type = MO_SET; |
5397 mo.u.loc = loc; | 6013 mo.u.loc = loc; |
5398 } | 6014 } |
5399 else | 6015 else |
5400 { | 6016 { |
5401 rtx xexpr = gen_rtx_SET (VOIDmode, loc, src); | 6017 rtx xexpr = gen_rtx_SET (loc, src); |
5402 if (same_variable_part_p (SET_SRC (xexpr), | 6018 if (same_variable_part_p (SET_SRC (xexpr), |
5403 MEM_EXPR (loc), | 6019 MEM_EXPR (loc), |
5404 INT_MEM_OFFSET (loc))) | 6020 INT_MEM_OFFSET (loc))) |
5405 mo.type = MO_COPY; | 6021 mo.type = MO_COPY; |
5406 else | 6022 else |
5421 if (!v) | 6037 if (!v) |
5422 goto log_and_return; | 6038 goto log_and_return; |
5423 | 6039 |
5424 resolve = preserve = !cselib_preserved_value_p (v); | 6040 resolve = preserve = !cselib_preserved_value_p (v); |
5425 | 6041 |
6042 /* We cannot track values for multiple-part variables, so we track only | |
6043 locations for tracked record parameters. */ | |
6044 if (track_p | |
6045 && REG_P (loc) | |
6046 && REG_EXPR (loc) | |
6047 && tracked_record_parameter_p (REG_EXPR (loc))) | |
6048 { | |
6049 /* Although we don't use the value here, it could be used later by the | |
6050 mere virtue of its existence as the operand of the reverse operation | |
6051 that gave rise to it (typically extension/truncation). Make sure it | |
6052 is preserved as required by vt_expand_var_loc_chain. */ | |
6053 if (preserve) | |
6054 preserve_value (v); | |
6055 goto log_and_return; | |
6056 } | |
6057 | |
6058 if (loc == stack_pointer_rtx | |
6059 && hard_frame_pointer_adjustment != -1 | |
6060 && preserve) | |
6061 cselib_set_value_sp_based (v); | |
6062 | |
5426 nloc = replace_expr_with_values (oloc); | 6063 nloc = replace_expr_with_values (oloc); |
5427 if (nloc) | 6064 if (nloc) |
5428 oloc = nloc; | 6065 oloc = nloc; |
5429 | 6066 |
5430 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC) | 6067 if (GET_CODE (PATTERN (cui->insn)) == COND_EXEC) |
5431 { | 6068 { |
5432 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode); | 6069 cselib_val *oval = cselib_lookup (oloc, GET_MODE (oloc), 0, VOIDmode); |
5433 | 6070 |
5434 gcc_assert (oval != v); | 6071 if (oval == v) |
6072 return; | |
5435 gcc_assert (REG_P (oloc) || MEM_P (oloc)); | 6073 gcc_assert (REG_P (oloc) || MEM_P (oloc)); |
5436 | 6074 |
5437 if (!cselib_preserved_value_p (oval)) | 6075 if (oval && !cselib_preserved_value_p (oval)) |
5438 { | 6076 { |
5439 micro_operation moa; | 6077 micro_operation moa; |
5440 | 6078 |
5441 preserve_value (oval); | 6079 preserve_value (oval); |
5442 | 6080 |
5446 moa.insn = cui->insn; | 6084 moa.insn = cui->insn; |
5447 | 6085 |
5448 if (dump_file && (dump_flags & TDF_DETAILS)) | 6086 if (dump_file && (dump_flags & TDF_DETAILS)) |
5449 log_op_type (moa.u.loc, cui->bb, cui->insn, | 6087 log_op_type (moa.u.loc, cui->bb, cui->insn, |
5450 moa.type, dump_file); | 6088 moa.type, dump_file); |
5451 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &moa); | 6089 VTI (bb)->mos.safe_push (moa); |
5452 } | 6090 } |
5453 | 6091 |
5454 resolve = false; | 6092 resolve = false; |
5455 } | 6093 } |
5456 else if (resolve && GET_CODE (mo.u.loc) == SET) | 6094 else if (resolve && GET_CODE (mo.u.loc) == SET) |
5457 { | 6095 { |
5458 nloc = replace_expr_with_values (SET_SRC (expr)); | 6096 if (REG_P (SET_SRC (expr)) || MEM_P (SET_SRC (expr))) |
6097 nloc = replace_expr_with_values (SET_SRC (expr)); | |
6098 else | |
6099 nloc = NULL_RTX; | |
5459 | 6100 |
5460 /* Avoid the mode mismatch between oexpr and expr. */ | 6101 /* Avoid the mode mismatch between oexpr and expr. */ |
5461 if (!nloc && mode != mode2) | 6102 if (!nloc && mode != mode2) |
5462 { | 6103 { |
5463 nloc = SET_SRC (expr); | 6104 nloc = SET_SRC (expr); |
5464 gcc_assert (oloc == SET_DEST (expr)); | 6105 gcc_assert (oloc == SET_DEST (expr)); |
5465 } | 6106 } |
5466 | 6107 |
5467 if (nloc) | 6108 if (nloc && nloc != SET_SRC (mo.u.loc)) |
5468 oloc = gen_rtx_SET (GET_MODE (mo.u.loc), oloc, nloc); | 6109 oloc = gen_rtx_SET (oloc, nloc); |
5469 else | 6110 else |
5470 { | 6111 { |
5471 if (oloc == SET_DEST (mo.u.loc)) | 6112 if (oloc == SET_DEST (mo.u.loc)) |
5472 /* No point in duplicating. */ | 6113 /* No point in duplicating. */ |
5473 oloc = mo.u.loc; | 6114 oloc = mo.u.loc; |
5509 representations of dst and src, respectively. | 6150 representations of dst and src, respectively. |
5510 | 6151 |
5511 */ | 6152 */ |
5512 | 6153 |
5513 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC) | 6154 if (GET_CODE (PATTERN (cui->insn)) != COND_EXEC) |
5514 { | 6155 reverse_op (v->val_rtx, expr, cui->insn); |
5515 reverse = reverse_op (v->val_rtx, expr); | |
5516 if (reverse) | |
5517 { | |
5518 loc = gen_rtx_CONCAT (GET_MODE (mo.u.loc), loc, reverse); | |
5519 VAL_EXPR_HAS_REVERSE (loc) = 1; | |
5520 } | |
5521 } | |
5522 | 6156 |
5523 mo.u.loc = loc; | 6157 mo.u.loc = loc; |
5524 | 6158 |
5525 if (track_p) | 6159 if (track_p) |
5526 VAL_HOLDS_TRACK_EXPR (loc) = 1; | 6160 VAL_HOLDS_TRACK_EXPR (loc) = 1; |
5537 mo.type = MO_VAL_SET; | 6171 mo.type = MO_VAL_SET; |
5538 | 6172 |
5539 log_and_return: | 6173 log_and_return: |
5540 if (dump_file && (dump_flags & TDF_DETAILS)) | 6174 if (dump_file && (dump_flags & TDF_DETAILS)) |
5541 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file); | 6175 log_op_type (mo.u.loc, cui->bb, cui->insn, mo.type, dump_file); |
5542 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo); | 6176 VTI (bb)->mos.safe_push (mo); |
6177 } | |
6178 | |
6179 /* Arguments to the call. */ | |
6180 static rtx call_arguments; | |
6181 | |
6182 /* Compute call_arguments. */ | |
6183 | |
6184 static void | |
6185 prepare_call_arguments (basic_block bb, rtx_insn *insn) | |
6186 { | |
6187 rtx link, x, call; | |
6188 rtx prev, cur, next; | |
6189 rtx this_arg = NULL_RTX; | |
6190 tree type = NULL_TREE, t, fndecl = NULL_TREE; | |
6191 tree obj_type_ref = NULL_TREE; | |
6192 CUMULATIVE_ARGS args_so_far_v; | |
6193 cumulative_args_t args_so_far; | |
6194 | |
6195 memset (&args_so_far_v, 0, sizeof (args_so_far_v)); | |
6196 args_so_far = pack_cumulative_args (&args_so_far_v); | |
6197 call = get_call_rtx_from (insn); | |
6198 if (call) | |
6199 { | |
6200 if (GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF) | |
6201 { | |
6202 rtx symbol = XEXP (XEXP (call, 0), 0); | |
6203 if (SYMBOL_REF_DECL (symbol)) | |
6204 fndecl = SYMBOL_REF_DECL (symbol); | |
6205 } | |
6206 if (fndecl == NULL_TREE) | |
6207 fndecl = MEM_EXPR (XEXP (call, 0)); | |
6208 if (fndecl | |
6209 && TREE_CODE (TREE_TYPE (fndecl)) != FUNCTION_TYPE | |
6210 && TREE_CODE (TREE_TYPE (fndecl)) != METHOD_TYPE) | |
6211 fndecl = NULL_TREE; | |
6212 if (fndecl && TYPE_ARG_TYPES (TREE_TYPE (fndecl))) | |
6213 type = TREE_TYPE (fndecl); | |
6214 if (fndecl && TREE_CODE (fndecl) != FUNCTION_DECL) | |
6215 { | |
6216 if (TREE_CODE (fndecl) == INDIRECT_REF | |
6217 && TREE_CODE (TREE_OPERAND (fndecl, 0)) == OBJ_TYPE_REF) | |
6218 obj_type_ref = TREE_OPERAND (fndecl, 0); | |
6219 fndecl = NULL_TREE; | |
6220 } | |
6221 if (type) | |
6222 { | |
6223 for (t = TYPE_ARG_TYPES (type); t && t != void_list_node; | |
6224 t = TREE_CHAIN (t)) | |
6225 if (TREE_CODE (TREE_VALUE (t)) == REFERENCE_TYPE | |
6226 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_VALUE (t)))) | |
6227 break; | |
6228 if ((t == NULL || t == void_list_node) && obj_type_ref == NULL_TREE) | |
6229 type = NULL; | |
6230 else | |
6231 { | |
6232 int nargs ATTRIBUTE_UNUSED = list_length (TYPE_ARG_TYPES (type)); | |
6233 link = CALL_INSN_FUNCTION_USAGE (insn); | |
6234 #ifndef PCC_STATIC_STRUCT_RETURN | |
6235 if (aggregate_value_p (TREE_TYPE (type), type) | |
6236 && targetm.calls.struct_value_rtx (type, 0) == 0) | |
6237 { | |
6238 tree struct_addr = build_pointer_type (TREE_TYPE (type)); | |
6239 machine_mode mode = TYPE_MODE (struct_addr); | |
6240 rtx reg; | |
6241 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl, | |
6242 nargs + 1); | |
6243 reg = targetm.calls.function_arg (args_so_far, mode, | |
6244 struct_addr, true); | |
6245 targetm.calls.function_arg_advance (args_so_far, mode, | |
6246 struct_addr, true); | |
6247 if (reg == NULL_RTX) | |
6248 { | |
6249 for (; link; link = XEXP (link, 1)) | |
6250 if (GET_CODE (XEXP (link, 0)) == USE | |
6251 && MEM_P (XEXP (XEXP (link, 0), 0))) | |
6252 { | |
6253 link = XEXP (link, 1); | |
6254 break; | |
6255 } | |
6256 } | |
6257 } | |
6258 else | |
6259 #endif | |
6260 INIT_CUMULATIVE_ARGS (args_so_far_v, type, NULL_RTX, fndecl, | |
6261 nargs); | |
6262 if (obj_type_ref && TYPE_ARG_TYPES (type) != void_list_node) | |
6263 { | |
6264 machine_mode mode; | |
6265 t = TYPE_ARG_TYPES (type); | |
6266 mode = TYPE_MODE (TREE_VALUE (t)); | |
6267 this_arg = targetm.calls.function_arg (args_so_far, mode, | |
6268 TREE_VALUE (t), true); | |
6269 if (this_arg && !REG_P (this_arg)) | |
6270 this_arg = NULL_RTX; | |
6271 else if (this_arg == NULL_RTX) | |
6272 { | |
6273 for (; link; link = XEXP (link, 1)) | |
6274 if (GET_CODE (XEXP (link, 0)) == USE | |
6275 && MEM_P (XEXP (XEXP (link, 0), 0))) | |
6276 { | |
6277 this_arg = XEXP (XEXP (link, 0), 0); | |
6278 break; | |
6279 } | |
6280 } | |
6281 } | |
6282 } | |
6283 } | |
6284 } | |
6285 t = type ? TYPE_ARG_TYPES (type) : NULL_TREE; | |
6286 | |
6287 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1)) | |
6288 if (GET_CODE (XEXP (link, 0)) == USE) | |
6289 { | |
6290 rtx item = NULL_RTX; | |
6291 x = XEXP (XEXP (link, 0), 0); | |
6292 if (GET_MODE (link) == VOIDmode | |
6293 || GET_MODE (link) == BLKmode | |
6294 || (GET_MODE (link) != GET_MODE (x) | |
6295 && ((GET_MODE_CLASS (GET_MODE (link)) != MODE_INT | |
6296 && GET_MODE_CLASS (GET_MODE (link)) != MODE_PARTIAL_INT) | |
6297 || (GET_MODE_CLASS (GET_MODE (x)) != MODE_INT | |
6298 && GET_MODE_CLASS (GET_MODE (x)) != MODE_PARTIAL_INT)))) | |
6299 /* Can't do anything for these, if the original type mode | |
6300 isn't known or can't be converted. */; | |
6301 else if (REG_P (x)) | |
6302 { | |
6303 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode); | |
6304 scalar_int_mode mode; | |
6305 if (val && cselib_preserved_value_p (val)) | |
6306 item = val->val_rtx; | |
6307 else if (is_a <scalar_int_mode> (GET_MODE (x), &mode)) | |
6308 { | |
6309 opt_scalar_int_mode mode_iter; | |
6310 FOR_EACH_WIDER_MODE (mode_iter, mode) | |
6311 { | |
6312 mode = mode_iter.require (); | |
6313 if (GET_MODE_BITSIZE (mode) > BITS_PER_WORD) | |
6314 break; | |
6315 | |
6316 rtx reg = simplify_subreg (mode, x, GET_MODE (x), 0); | |
6317 if (reg == NULL_RTX || !REG_P (reg)) | |
6318 continue; | |
6319 val = cselib_lookup (reg, mode, 0, VOIDmode); | |
6320 if (val && cselib_preserved_value_p (val)) | |
6321 { | |
6322 item = val->val_rtx; | |
6323 break; | |
6324 } | |
6325 } | |
6326 } | |
6327 } | |
6328 else if (MEM_P (x)) | |
6329 { | |
6330 rtx mem = x; | |
6331 cselib_val *val; | |
6332 | |
6333 if (!frame_pointer_needed) | |
6334 { | |
6335 struct adjust_mem_data amd; | |
6336 amd.mem_mode = VOIDmode; | |
6337 amd.stack_adjust = -VTI (bb)->out.stack_adjust; | |
6338 amd.store = true; | |
6339 mem = simplify_replace_fn_rtx (mem, NULL_RTX, adjust_mems, | |
6340 &amd); | |
6341 gcc_assert (amd.side_effects.is_empty ()); | |
6342 } | |
6343 val = cselib_lookup (mem, GET_MODE (mem), 0, VOIDmode); | |
6344 if (val && cselib_preserved_value_p (val)) | |
6345 item = val->val_rtx; | |
6346 else if (GET_MODE_CLASS (GET_MODE (mem)) != MODE_INT | |
6347 && GET_MODE_CLASS (GET_MODE (mem)) != MODE_PARTIAL_INT) | |
6348 { | |
6349 /* For non-integer stack argument see also if they weren't | |
6350 initialized by integers. */ | |
6351 scalar_int_mode imode; | |
6352 if (int_mode_for_mode (GET_MODE (mem)).exists (&imode) | |
6353 && imode != GET_MODE (mem)) | |
6354 { | |
6355 val = cselib_lookup (adjust_address_nv (mem, imode, 0), | |
6356 imode, 0, VOIDmode); | |
6357 if (val && cselib_preserved_value_p (val)) | |
6358 item = lowpart_subreg (GET_MODE (x), val->val_rtx, | |
6359 imode); | |
6360 } | |
6361 } | |
6362 } | |
6363 if (item) | |
6364 { | |
6365 rtx x2 = x; | |
6366 if (GET_MODE (item) != GET_MODE (link)) | |
6367 item = lowpart_subreg (GET_MODE (link), item, GET_MODE (item)); | |
6368 if (GET_MODE (x2) != GET_MODE (link)) | |
6369 x2 = lowpart_subreg (GET_MODE (link), x2, GET_MODE (x2)); | |
6370 item = gen_rtx_CONCAT (GET_MODE (link), x2, item); | |
6371 call_arguments | |
6372 = gen_rtx_EXPR_LIST (VOIDmode, item, call_arguments); | |
6373 } | |
6374 if (t && t != void_list_node) | |
6375 { | |
6376 tree argtype = TREE_VALUE (t); | |
6377 machine_mode mode = TYPE_MODE (argtype); | |
6378 rtx reg; | |
6379 if (pass_by_reference (&args_so_far_v, mode, argtype, true)) | |
6380 { | |
6381 argtype = build_pointer_type (argtype); | |
6382 mode = TYPE_MODE (argtype); | |
6383 } | |
6384 reg = targetm.calls.function_arg (args_so_far, mode, | |
6385 argtype, true); | |
6386 if (TREE_CODE (argtype) == REFERENCE_TYPE | |
6387 && INTEGRAL_TYPE_P (TREE_TYPE (argtype)) | |
6388 && reg | |
6389 && REG_P (reg) | |
6390 && GET_MODE (reg) == mode | |
6391 && (GET_MODE_CLASS (mode) == MODE_INT | |
6392 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT) | |
6393 && REG_P (x) | |
6394 && REGNO (x) == REGNO (reg) | |
6395 && GET_MODE (x) == mode | |
6396 && item) | |
6397 { | |
6398 machine_mode indmode | |
6399 = TYPE_MODE (TREE_TYPE (argtype)); | |
6400 rtx mem = gen_rtx_MEM (indmode, x); | |
6401 cselib_val *val = cselib_lookup (mem, indmode, 0, VOIDmode); | |
6402 if (val && cselib_preserved_value_p (val)) | |
6403 { | |
6404 item = gen_rtx_CONCAT (indmode, mem, val->val_rtx); | |
6405 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item, | |
6406 call_arguments); | |
6407 } | |
6408 else | |
6409 { | |
6410 struct elt_loc_list *l; | |
6411 tree initial; | |
6412 | |
6413 /* Try harder, when passing address of a constant | |
6414 pool integer it can be easily read back. */ | |
6415 item = XEXP (item, 1); | |
6416 if (GET_CODE (item) == SUBREG) | |
6417 item = SUBREG_REG (item); | |
6418 gcc_assert (GET_CODE (item) == VALUE); | |
6419 val = CSELIB_VAL_PTR (item); | |
6420 for (l = val->locs; l; l = l->next) | |
6421 if (GET_CODE (l->loc) == SYMBOL_REF | |
6422 && TREE_CONSTANT_POOL_ADDRESS_P (l->loc) | |
6423 && SYMBOL_REF_DECL (l->loc) | |
6424 && DECL_INITIAL (SYMBOL_REF_DECL (l->loc))) | |
6425 { | |
6426 initial = DECL_INITIAL (SYMBOL_REF_DECL (l->loc)); | |
6427 if (tree_fits_shwi_p (initial)) | |
6428 { | |
6429 item = GEN_INT (tree_to_shwi (initial)); | |
6430 item = gen_rtx_CONCAT (indmode, mem, item); | |
6431 call_arguments | |
6432 = gen_rtx_EXPR_LIST (VOIDmode, item, | |
6433 call_arguments); | |
6434 } | |
6435 break; | |
6436 } | |
6437 } | |
6438 } | |
6439 targetm.calls.function_arg_advance (args_so_far, mode, | |
6440 argtype, true); | |
6441 t = TREE_CHAIN (t); | |
6442 } | |
6443 } | |
6444 | |
6445 /* Add debug arguments. */ | |
6446 if (fndecl | |
6447 && TREE_CODE (fndecl) == FUNCTION_DECL | |
6448 && DECL_HAS_DEBUG_ARGS_P (fndecl)) | |
6449 { | |
6450 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (fndecl); | |
6451 if (debug_args) | |
6452 { | |
6453 unsigned int ix; | |
6454 tree param; | |
6455 for (ix = 0; vec_safe_iterate (*debug_args, ix, ¶m); ix += 2) | |
6456 { | |
6457 rtx item; | |
6458 tree dtemp = (**debug_args)[ix + 1]; | |
6459 machine_mode mode = DECL_MODE (dtemp); | |
6460 item = gen_rtx_DEBUG_PARAMETER_REF (mode, param); | |
6461 item = gen_rtx_CONCAT (mode, item, DECL_RTL_KNOWN_SET (dtemp)); | |
6462 call_arguments = gen_rtx_EXPR_LIST (VOIDmode, item, | |
6463 call_arguments); | |
6464 } | |
6465 } | |
6466 } | |
6467 | |
6468 /* Reverse call_arguments chain. */ | |
6469 prev = NULL_RTX; | |
6470 for (cur = call_arguments; cur; cur = next) | |
6471 { | |
6472 next = XEXP (cur, 1); | |
6473 XEXP (cur, 1) = prev; | |
6474 prev = cur; | |
6475 } | |
6476 call_arguments = prev; | |
6477 | |
6478 x = get_call_rtx_from (insn); | |
6479 if (x) | |
6480 { | |
6481 x = XEXP (XEXP (x, 0), 0); | |
6482 if (GET_CODE (x) == SYMBOL_REF) | |
6483 /* Don't record anything. */; | |
6484 else if (CONSTANT_P (x)) | |
6485 { | |
6486 x = gen_rtx_CONCAT (GET_MODE (x) == VOIDmode ? Pmode : GET_MODE (x), | |
6487 pc_rtx, x); | |
6488 call_arguments | |
6489 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments); | |
6490 } | |
6491 else | |
6492 { | |
6493 cselib_val *val = cselib_lookup (x, GET_MODE (x), 0, VOIDmode); | |
6494 if (val && cselib_preserved_value_p (val)) | |
6495 { | |
6496 x = gen_rtx_CONCAT (GET_MODE (x), pc_rtx, val->val_rtx); | |
6497 call_arguments | |
6498 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments); | |
6499 } | |
6500 } | |
6501 } | |
6502 if (this_arg) | |
6503 { | |
6504 machine_mode mode | |
6505 = TYPE_MODE (TREE_TYPE (OBJ_TYPE_REF_EXPR (obj_type_ref))); | |
6506 rtx clobbered = gen_rtx_MEM (mode, this_arg); | |
6507 HOST_WIDE_INT token | |
6508 = tree_to_shwi (OBJ_TYPE_REF_TOKEN (obj_type_ref)); | |
6509 if (token) | |
6510 clobbered = plus_constant (mode, clobbered, | |
6511 token * GET_MODE_SIZE (mode)); | |
6512 clobbered = gen_rtx_MEM (mode, clobbered); | |
6513 x = gen_rtx_CONCAT (mode, gen_rtx_CLOBBER (VOIDmode, pc_rtx), clobbered); | |
6514 call_arguments | |
6515 = gen_rtx_EXPR_LIST (VOIDmode, x, call_arguments); | |
6516 } | |
5543 } | 6517 } |
5544 | 6518 |
5545 /* Callback for cselib_record_sets_hook, that records as micro | 6519 /* Callback for cselib_record_sets_hook, that records as micro |
5546 operations uses and stores in an insn after cselib_record_sets has | 6520 operations uses and stores in an insn after cselib_record_sets has |
5547 analyzed the sets in an insn, but before it modifies the stored | 6521 analyzed the sets in an insn, but before it modifies the stored |
5548 values in the internal tables, unless cselib_record_sets doesn't | 6522 values in the internal tables, unless cselib_record_sets doesn't |
5549 call it directly (perhaps because we're not doing cselib in the | 6523 call it directly (perhaps because we're not doing cselib in the |
5550 first place, in which case sets and n_sets will be 0). */ | 6524 first place, in which case sets and n_sets will be 0). */ |
5551 | 6525 |
5552 static void | 6526 static void |
5553 add_with_sets (rtx insn, struct cselib_set *sets, int n_sets) | 6527 add_with_sets (rtx_insn *insn, struct cselib_set *sets, int n_sets) |
5554 { | 6528 { |
5555 basic_block bb = BLOCK_FOR_INSN (insn); | 6529 basic_block bb = BLOCK_FOR_INSN (insn); |
5556 int n1, n2; | 6530 int n1, n2; |
5557 struct count_use_info cui; | 6531 struct count_use_info cui; |
5558 micro_operation *mos; | 6532 micro_operation *mos; |
5562 cui.insn = insn; | 6536 cui.insn = insn; |
5563 cui.bb = bb; | 6537 cui.bb = bb; |
5564 cui.sets = sets; | 6538 cui.sets = sets; |
5565 cui.n_sets = n_sets; | 6539 cui.n_sets = n_sets; |
5566 | 6540 |
5567 n1 = VEC_length (micro_operation, VTI (bb)->mos); | 6541 n1 = VTI (bb)->mos.length (); |
5568 cui.store_p = false; | 6542 cui.store_p = false; |
5569 note_uses (&PATTERN (insn), add_uses_1, &cui); | 6543 note_uses (&PATTERN (insn), add_uses_1, &cui); |
5570 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1; | 6544 n2 = VTI (bb)->mos.length () - 1; |
5571 mos = VEC_address (micro_operation, VTI (bb)->mos); | 6545 mos = VTI (bb)->mos.address (); |
5572 | 6546 |
5573 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and | 6547 /* Order the MO_USEs to be before MO_USE_NO_VARs and MO_VAL_USE, and |
5574 MO_VAL_LOC last. */ | 6548 MO_VAL_LOC last. */ |
5575 while (n1 < n2) | 6549 while (n1 < n2) |
5576 { | 6550 { |
5577 while (n1 < n2 && mos[n1].type == MO_USE) | 6551 while (n1 < n2 && mos[n1].type == MO_USE) |
5578 n1++; | 6552 n1++; |
5579 while (n1 < n2 && mos[n2].type != MO_USE) | 6553 while (n1 < n2 && mos[n2].type != MO_USE) |
5580 n2--; | 6554 n2--; |
5581 if (n1 < n2) | 6555 if (n1 < n2) |
5582 { | 6556 std::swap (mos[n1], mos[n2]); |
5583 micro_operation sw; | 6557 } |
5584 | 6558 |
5585 sw = mos[n1]; | 6559 n2 = VTI (bb)->mos.length () - 1; |
5586 mos[n1] = mos[n2]; | |
5587 mos[n2] = sw; | |
5588 } | |
5589 } | |
5590 | |
5591 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1; | |
5592 while (n1 < n2) | 6560 while (n1 < n2) |
5593 { | 6561 { |
5594 while (n1 < n2 && mos[n1].type != MO_VAL_LOC) | 6562 while (n1 < n2 && mos[n1].type != MO_VAL_LOC) |
5595 n1++; | 6563 n1++; |
5596 while (n1 < n2 && mos[n2].type == MO_VAL_LOC) | 6564 while (n1 < n2 && mos[n2].type == MO_VAL_LOC) |
5597 n2--; | 6565 n2--; |
5598 if (n1 < n2) | 6566 if (n1 < n2) |
5599 { | 6567 std::swap (mos[n1], mos[n2]); |
5600 micro_operation sw; | |
5601 | |
5602 sw = mos[n1]; | |
5603 mos[n1] = mos[n2]; | |
5604 mos[n2] = sw; | |
5605 } | |
5606 } | 6568 } |
5607 | 6569 |
5608 if (CALL_P (insn)) | 6570 if (CALL_P (insn)) |
5609 { | 6571 { |
5610 micro_operation mo; | 6572 micro_operation mo; |
5611 | 6573 |
5612 mo.type = MO_CALL; | 6574 mo.type = MO_CALL; |
5613 mo.insn = insn; | 6575 mo.insn = insn; |
5614 mo.u.loc = NULL_RTX; | 6576 mo.u.loc = call_arguments; |
6577 call_arguments = NULL_RTX; | |
5615 | 6578 |
5616 if (dump_file && (dump_flags & TDF_DETAILS)) | 6579 if (dump_file && (dump_flags & TDF_DETAILS)) |
5617 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file); | 6580 log_op_type (PATTERN (insn), bb, insn, mo.type, dump_file); |
5618 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, &mo); | 6581 VTI (bb)->mos.safe_push (mo); |
5619 } | 6582 } |
5620 | 6583 |
5621 n1 = VEC_length (micro_operation, VTI (bb)->mos); | 6584 n1 = VTI (bb)->mos.length (); |
5622 /* This will record NEXT_INSN (insn), such that we can | 6585 /* This will record NEXT_INSN (insn), such that we can |
5623 insert notes before it without worrying about any | 6586 insert notes before it without worrying about any |
5624 notes that MO_USEs might emit after the insn. */ | 6587 notes that MO_USEs might emit after the insn. */ |
5625 cui.store_p = true; | 6588 cui.store_p = true; |
5626 note_stores (PATTERN (insn), add_stores, &cui); | 6589 note_stores (PATTERN (insn), add_stores, &cui); |
5627 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1; | 6590 n2 = VTI (bb)->mos.length () - 1; |
5628 mos = VEC_address (micro_operation, VTI (bb)->mos); | 6591 mos = VTI (bb)->mos.address (); |
5629 | 6592 |
5630 /* Order the MO_VAL_USEs first (note_stores does nothing | 6593 /* Order the MO_VAL_USEs first (note_stores does nothing |
5631 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this | 6594 on DEBUG_INSNs, so there are no MO_VAL_LOCs from this |
5632 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */ | 6595 insn), then MO_CLOBBERs, then MO_SET/MO_COPY/MO_VAL_SET. */ |
5633 while (n1 < n2) | 6596 while (n1 < n2) |
5635 while (n1 < n2 && mos[n1].type == MO_VAL_USE) | 6598 while (n1 < n2 && mos[n1].type == MO_VAL_USE) |
5636 n1++; | 6599 n1++; |
5637 while (n1 < n2 && mos[n2].type != MO_VAL_USE) | 6600 while (n1 < n2 && mos[n2].type != MO_VAL_USE) |
5638 n2--; | 6601 n2--; |
5639 if (n1 < n2) | 6602 if (n1 < n2) |
5640 { | 6603 std::swap (mos[n1], mos[n2]); |
5641 micro_operation sw; | 6604 } |
5642 | 6605 |
5643 sw = mos[n1]; | 6606 n2 = VTI (bb)->mos.length () - 1; |
5644 mos[n1] = mos[n2]; | |
5645 mos[n2] = sw; | |
5646 } | |
5647 } | |
5648 | |
5649 n2 = VEC_length (micro_operation, VTI (bb)->mos) - 1; | |
5650 while (n1 < n2) | 6607 while (n1 < n2) |
5651 { | 6608 { |
5652 while (n1 < n2 && mos[n1].type == MO_CLOBBER) | 6609 while (n1 < n2 && mos[n1].type == MO_CLOBBER) |
5653 n1++; | 6610 n1++; |
5654 while (n1 < n2 && mos[n2].type != MO_CLOBBER) | 6611 while (n1 < n2 && mos[n2].type != MO_CLOBBER) |
5655 n2--; | 6612 n2--; |
5656 if (n1 < n2) | 6613 if (n1 < n2) |
5657 { | 6614 std::swap (mos[n1], mos[n2]); |
5658 micro_operation sw; | |
5659 | |
5660 sw = mos[n1]; | |
5661 mos[n1] = mos[n2]; | |
5662 mos[n2] = sw; | |
5663 } | |
5664 } | 6615 } |
5665 } | 6616 } |
5666 | 6617 |
5667 static enum var_init_status | 6618 static enum var_init_status |
5668 find_src_status (dataflow_set *in, rtx src) | 6619 find_src_status (dataflow_set *in, rtx src) |
5691 static rtx | 6642 static rtx |
5692 find_src_set_src (dataflow_set *set, rtx src) | 6643 find_src_set_src (dataflow_set *set, rtx src) |
5693 { | 6644 { |
5694 tree decl = NULL_TREE; /* The variable being copied around. */ | 6645 tree decl = NULL_TREE; /* The variable being copied around. */ |
5695 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */ | 6646 rtx set_src = NULL_RTX; /* The value for "decl" stored in "src". */ |
5696 variable var; | 6647 variable *var; |
5697 location_chain nextp; | 6648 location_chain *nextp; |
5698 int i; | 6649 int i; |
5699 bool found; | 6650 bool found; |
5700 | 6651 |
5701 if (src && REG_P (src)) | 6652 if (src && REG_P (src)) |
5702 decl = var_debug_decl (REG_EXPR (src)); | 6653 decl = var_debug_decl (REG_EXPR (src)); |
5740 | 6691 |
5741 dataflow_set_init (&old_out); | 6692 dataflow_set_init (&old_out); |
5742 dataflow_set_copy (&old_out, out); | 6693 dataflow_set_copy (&old_out, out); |
5743 dataflow_set_copy (out, in); | 6694 dataflow_set_copy (out, in); |
5744 | 6695 |
5745 FOR_EACH_VEC_ELT (micro_operation, VTI (bb)->mos, i, mo) | 6696 if (MAY_HAVE_DEBUG_INSNS) |
5746 { | 6697 local_get_addr_cache = new hash_map<rtx, rtx>; |
5747 rtx insn = mo->insn; | 6698 |
6699 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo) | |
6700 { | |
6701 rtx_insn *insn = mo->insn; | |
5748 | 6702 |
5749 switch (mo->type) | 6703 switch (mo->type) |
5750 { | 6704 { |
5751 case MO_CALL: | 6705 case MO_CALL: |
5752 dataflow_set_clear_at_call (out); | 6706 dataflow_set_clear_at_call (out, insn); |
5753 break; | 6707 break; |
5754 | 6708 |
5755 case MO_USE: | 6709 case MO_USE: |
5756 { | 6710 { |
5757 rtx loc = mo->u.loc; | 6711 rtx loc = mo->u.loc; |
5832 break; | 6786 break; |
5833 | 6787 |
5834 case MO_VAL_SET: | 6788 case MO_VAL_SET: |
5835 { | 6789 { |
5836 rtx loc = mo->u.loc; | 6790 rtx loc = mo->u.loc; |
5837 rtx val, vloc, uloc, reverse = NULL_RTX; | 6791 rtx val, vloc, uloc; |
6792 rtx dstv, srcv; | |
5838 | 6793 |
5839 vloc = loc; | 6794 vloc = loc; |
5840 if (VAL_EXPR_HAS_REVERSE (loc)) | |
5841 { | |
5842 reverse = XEXP (loc, 1); | |
5843 vloc = XEXP (loc, 0); | |
5844 } | |
5845 uloc = XEXP (vloc, 1); | 6795 uloc = XEXP (vloc, 1); |
5846 val = XEXP (vloc, 0); | 6796 val = XEXP (vloc, 0); |
5847 vloc = uloc; | 6797 vloc = uloc; |
5848 | 6798 |
6799 if (GET_CODE (uloc) == SET) | |
6800 { | |
6801 dstv = SET_DEST (uloc); | |
6802 srcv = SET_SRC (uloc); | |
6803 } | |
6804 else | |
6805 { | |
6806 dstv = uloc; | |
6807 srcv = NULL; | |
6808 } | |
6809 | |
5849 if (GET_CODE (val) == CONCAT) | 6810 if (GET_CODE (val) == CONCAT) |
5850 { | 6811 { |
5851 vloc = XEXP (val, 1); | 6812 dstv = vloc = XEXP (val, 1); |
5852 val = XEXP (val, 0); | 6813 val = XEXP (val, 0); |
5853 } | 6814 } |
5854 | 6815 |
5855 if (GET_CODE (vloc) == SET) | 6816 if (GET_CODE (vloc) == SET) |
5856 { | 6817 { |
5857 rtx vsrc = SET_SRC (vloc); | 6818 srcv = SET_SRC (vloc); |
5858 | 6819 |
5859 gcc_assert (val != vsrc); | 6820 gcc_assert (val != srcv); |
5860 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc)); | 6821 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc)); |
5861 | 6822 |
5862 vloc = SET_DEST (vloc); | 6823 dstv = vloc = SET_DEST (vloc); |
5863 | 6824 |
5864 if (VAL_NEEDS_RESOLUTION (loc)) | 6825 if (VAL_NEEDS_RESOLUTION (loc)) |
5865 val_resolve (out, val, vsrc, insn); | 6826 val_resolve (out, val, srcv, insn); |
5866 } | 6827 } |
5867 else if (VAL_NEEDS_RESOLUTION (loc)) | 6828 else if (VAL_NEEDS_RESOLUTION (loc)) |
5868 { | 6829 { |
5869 gcc_assert (GET_CODE (uloc) == SET | 6830 gcc_assert (GET_CODE (uloc) == SET |
5870 && GET_CODE (SET_SRC (uloc)) == REG); | 6831 && GET_CODE (SET_SRC (uloc)) == REG); |
5876 if (VAL_EXPR_IS_CLOBBERED (loc)) | 6837 if (VAL_EXPR_IS_CLOBBERED (loc)) |
5877 { | 6838 { |
5878 if (REG_P (uloc)) | 6839 if (REG_P (uloc)) |
5879 var_reg_delete (out, uloc, true); | 6840 var_reg_delete (out, uloc, true); |
5880 else if (MEM_P (uloc)) | 6841 else if (MEM_P (uloc)) |
5881 var_mem_delete (out, uloc, true); | 6842 { |
6843 gcc_assert (MEM_P (dstv)); | |
6844 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc)); | |
6845 var_mem_delete (out, dstv, true); | |
6846 } | |
5882 } | 6847 } |
5883 else | 6848 else |
5884 { | 6849 { |
5885 bool copied_p = VAL_EXPR_IS_COPIED (loc); | 6850 bool copied_p = VAL_EXPR_IS_COPIED (loc); |
5886 rtx set_src = NULL; | 6851 rtx src = NULL, dst = uloc; |
5887 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED; | 6852 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED; |
5888 | 6853 |
5889 if (GET_CODE (uloc) == SET) | 6854 if (GET_CODE (uloc) == SET) |
5890 { | 6855 { |
5891 set_src = SET_SRC (uloc); | 6856 src = SET_SRC (uloc); |
5892 uloc = SET_DEST (uloc); | 6857 dst = SET_DEST (uloc); |
5893 } | 6858 } |
5894 | 6859 |
5895 if (copied_p) | 6860 if (copied_p) |
5896 { | 6861 { |
5897 if (flag_var_tracking_uninit) | 6862 if (flag_var_tracking_uninit) |
5898 { | 6863 { |
5899 status = find_src_status (in, set_src); | 6864 status = find_src_status (in, src); |
5900 | 6865 |
5901 if (status == VAR_INIT_STATUS_UNKNOWN) | 6866 if (status == VAR_INIT_STATUS_UNKNOWN) |
5902 status = find_src_status (out, set_src); | 6867 status = find_src_status (out, src); |
5903 } | 6868 } |
5904 | 6869 |
5905 set_src = find_src_set_src (in, set_src); | 6870 src = find_src_set_src (in, src); |
5906 } | 6871 } |
5907 | 6872 |
5908 if (REG_P (uloc)) | 6873 if (REG_P (dst)) |
5909 var_reg_delete_and_set (out, uloc, !copied_p, | 6874 var_reg_delete_and_set (out, dst, !copied_p, |
5910 status, set_src); | 6875 status, srcv); |
5911 else if (MEM_P (uloc)) | 6876 else if (MEM_P (dst)) |
5912 var_mem_delete_and_set (out, uloc, !copied_p, | 6877 { |
5913 status, set_src); | 6878 gcc_assert (MEM_P (dstv)); |
6879 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst)); | |
6880 var_mem_delete_and_set (out, dstv, !copied_p, | |
6881 status, srcv); | |
6882 } | |
5914 } | 6883 } |
5915 } | 6884 } |
5916 else if (REG_P (uloc)) | 6885 else if (REG_P (uloc)) |
5917 var_regno_delete (out, REGNO (uloc)); | 6886 var_regno_delete (out, REGNO (uloc)); |
5918 | 6887 else if (MEM_P (uloc)) |
5919 val_store (out, val, vloc, insn, true); | 6888 { |
5920 | 6889 gcc_checking_assert (GET_CODE (vloc) == MEM); |
5921 if (reverse) | 6890 gcc_checking_assert (dstv == vloc); |
5922 val_store (out, XEXP (reverse, 0), XEXP (reverse, 1), | 6891 if (dstv != vloc) |
5923 insn, false); | 6892 clobber_overlapping_mems (out, vloc); |
6893 } | |
6894 | |
6895 val_store (out, val, dstv, insn, true); | |
5924 } | 6896 } |
5925 break; | 6897 break; |
5926 | 6898 |
5927 case MO_SET: | 6899 case MO_SET: |
5928 { | 6900 { |
6003 } | 6975 } |
6004 } | 6976 } |
6005 | 6977 |
6006 if (MAY_HAVE_DEBUG_INSNS) | 6978 if (MAY_HAVE_DEBUG_INSNS) |
6007 { | 6979 { |
6980 delete local_get_addr_cache; | |
6981 local_get_addr_cache = NULL; | |
6982 | |
6008 dataflow_set_equiv_regs (out); | 6983 dataflow_set_equiv_regs (out); |
6009 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_mark, | 6984 shared_hash_htab (out->vars) |
6010 out); | 6985 ->traverse <dataflow_set *, canonicalize_values_mark> (out); |
6011 htab_traverse (shared_hash_htab (out->vars), canonicalize_values_star, | 6986 shared_hash_htab (out->vars) |
6012 out); | 6987 ->traverse <dataflow_set *, canonicalize_values_star> (out); |
6013 #if ENABLE_CHECKING | 6988 if (flag_checking) |
6014 htab_traverse (shared_hash_htab (out->vars), | 6989 shared_hash_htab (out->vars) |
6015 canonicalize_loc_order_check, out); | 6990 ->traverse <dataflow_set *, canonicalize_loc_order_check> (out); |
6016 #endif | |
6017 } | 6991 } |
6018 changed = dataflow_set_different (&old_out, out); | 6992 changed = dataflow_set_different (&old_out, out); |
6019 dataflow_set_destroy (&old_out); | 6993 dataflow_set_destroy (&old_out); |
6020 return changed; | 6994 return changed; |
6021 } | 6995 } |
6023 /* Find the locations of variables in the whole function. */ | 6997 /* Find the locations of variables in the whole function. */ |
6024 | 6998 |
6025 static bool | 6999 static bool |
6026 vt_find_locations (void) | 7000 vt_find_locations (void) |
6027 { | 7001 { |
6028 fibheap_t worklist, pending, fibheap_swap; | 7002 bb_heap_t *worklist = new bb_heap_t (LONG_MIN); |
6029 sbitmap visited, in_worklist, in_pending, sbitmap_swap; | 7003 bb_heap_t *pending = new bb_heap_t (LONG_MIN); |
7004 sbitmap in_worklist, in_pending; | |
6030 basic_block bb; | 7005 basic_block bb; |
6031 edge e; | 7006 edge e; |
6032 int *bb_order; | 7007 int *bb_order; |
6033 int *rc_order; | 7008 int *rc_order; |
6034 int i; | 7009 int i; |
6037 bool success = true; | 7012 bool success = true; |
6038 | 7013 |
6039 timevar_push (TV_VAR_TRACKING_DATAFLOW); | 7014 timevar_push (TV_VAR_TRACKING_DATAFLOW); |
6040 /* Compute reverse completion order of depth first search of the CFG | 7015 /* Compute reverse completion order of depth first search of the CFG |
6041 so that the data-flow runs faster. */ | 7016 so that the data-flow runs faster. */ |
6042 rc_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS); | 7017 rc_order = XNEWVEC (int, n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS); |
6043 bb_order = XNEWVEC (int, last_basic_block); | 7018 bb_order = XNEWVEC (int, last_basic_block_for_fn (cfun)); |
6044 pre_and_rev_post_order_compute (NULL, rc_order, false); | 7019 pre_and_rev_post_order_compute (NULL, rc_order, false); |
6045 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++) | 7020 for (i = 0; i < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; i++) |
6046 bb_order[rc_order[i]] = i; | 7021 bb_order[rc_order[i]] = i; |
6047 free (rc_order); | 7022 free (rc_order); |
6048 | 7023 |
6049 worklist = fibheap_new (); | 7024 auto_sbitmap visited (last_basic_block_for_fn (cfun)); |
6050 pending = fibheap_new (); | 7025 in_worklist = sbitmap_alloc (last_basic_block_for_fn (cfun)); |
6051 visited = sbitmap_alloc (last_basic_block); | 7026 in_pending = sbitmap_alloc (last_basic_block_for_fn (cfun)); |
6052 in_worklist = sbitmap_alloc (last_basic_block); | 7027 bitmap_clear (in_worklist); |
6053 in_pending = sbitmap_alloc (last_basic_block); | 7028 |
6054 sbitmap_zero (in_worklist); | 7029 FOR_EACH_BB_FN (bb, cfun) |
6055 | 7030 pending->insert (bb_order[bb->index], bb); |
6056 FOR_EACH_BB (bb) | 7031 bitmap_ones (in_pending); |
6057 fibheap_insert (pending, bb_order[bb->index], bb); | 7032 |
6058 sbitmap_ones (in_pending); | 7033 while (success && !pending->empty ()) |
6059 | 7034 { |
6060 while (success && !fibheap_empty (pending)) | 7035 std::swap (worklist, pending); |
6061 { | 7036 std::swap (in_worklist, in_pending); |
6062 fibheap_swap = pending; | 7037 |
6063 pending = worklist; | 7038 bitmap_clear (visited); |
6064 worklist = fibheap_swap; | 7039 |
6065 sbitmap_swap = in_pending; | 7040 while (!worklist->empty ()) |
6066 in_pending = in_worklist; | 7041 { |
6067 in_worklist = sbitmap_swap; | 7042 bb = worklist->extract_min (); |
6068 | 7043 bitmap_clear_bit (in_worklist, bb->index); |
6069 sbitmap_zero (visited); | 7044 gcc_assert (!bitmap_bit_p (visited, bb->index)); |
6070 | 7045 if (!bitmap_bit_p (visited, bb->index)) |
6071 while (!fibheap_empty (worklist)) | |
6072 { | |
6073 bb = (basic_block) fibheap_extract_min (worklist); | |
6074 RESET_BIT (in_worklist, bb->index); | |
6075 gcc_assert (!TEST_BIT (visited, bb->index)); | |
6076 if (!TEST_BIT (visited, bb->index)) | |
6077 { | 7046 { |
6078 bool changed; | 7047 bool changed; |
6079 edge_iterator ei; | 7048 edge_iterator ei; |
6080 int oldinsz, oldoutsz; | 7049 int oldinsz, oldoutsz; |
6081 | 7050 |
6082 SET_BIT (visited, bb->index); | 7051 bitmap_set_bit (visited, bb->index); |
6083 | 7052 |
6084 if (VTI (bb)->in.vars) | 7053 if (VTI (bb)->in.vars) |
6085 { | 7054 { |
6086 htabsz | 7055 htabsz |
6087 -= (htab_size (shared_hash_htab (VTI (bb)->in.vars)) | 7056 -= shared_hash_htab (VTI (bb)->in.vars)->size () |
6088 + htab_size (shared_hash_htab (VTI (bb)->out.vars))); | 7057 + shared_hash_htab (VTI (bb)->out.vars)->size (); |
6089 oldinsz | 7058 oldinsz = shared_hash_htab (VTI (bb)->in.vars)->elements (); |
6090 = htab_elements (shared_hash_htab (VTI (bb)->in.vars)); | |
6091 oldoutsz | 7059 oldoutsz |
6092 = htab_elements (shared_hash_htab (VTI (bb)->out.vars)); | 7060 = shared_hash_htab (VTI (bb)->out.vars)->elements (); |
6093 } | 7061 } |
6094 else | 7062 else |
6095 oldinsz = oldoutsz = 0; | 7063 oldinsz = oldoutsz = 0; |
6096 | 7064 |
6097 if (MAY_HAVE_DEBUG_INSNS) | 7065 if (MAY_HAVE_DEBUG_INSNS) |
6122 } | 7090 } |
6123 | 7091 |
6124 if (adjust) | 7092 if (adjust) |
6125 { | 7093 { |
6126 dataflow_post_merge_adjust (in, &VTI (bb)->permp); | 7094 dataflow_post_merge_adjust (in, &VTI (bb)->permp); |
6127 #if ENABLE_CHECKING | 7095 |
6128 /* Merge and merge_adjust should keep entries in | 7096 if (flag_checking) |
6129 canonical order. */ | 7097 /* Merge and merge_adjust should keep entries in |
6130 htab_traverse (shared_hash_htab (in->vars), | 7098 canonical order. */ |
6131 canonicalize_loc_order_check, | 7099 shared_hash_htab (in->vars) |
6132 in); | 7100 ->traverse <dataflow_set *, |
6133 #endif | 7101 canonicalize_loc_order_check> (in); |
7102 | |
6134 if (dst_can_be_shared) | 7103 if (dst_can_be_shared) |
6135 { | 7104 { |
6136 shared_hash_destroy (in->vars); | 7105 shared_hash_destroy (in->vars); |
6137 in->vars = shared_hash_copy (first_out->vars); | 7106 in->vars = shared_hash_copy (first_out->vars); |
6138 } | 7107 } |
6147 FOR_EACH_EDGE (e, ei, bb->preds) | 7116 FOR_EACH_EDGE (e, ei, bb->preds) |
6148 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out); | 7117 dataflow_set_union (&VTI (bb)->in, &VTI (e->src)->out); |
6149 } | 7118 } |
6150 | 7119 |
6151 changed = compute_bb_dataflow (bb); | 7120 changed = compute_bb_dataflow (bb); |
6152 htabsz += (htab_size (shared_hash_htab (VTI (bb)->in.vars)) | 7121 htabsz += shared_hash_htab (VTI (bb)->in.vars)->size () |
6153 + htab_size (shared_hash_htab (VTI (bb)->out.vars))); | 7122 + shared_hash_htab (VTI (bb)->out.vars)->size (); |
6154 | 7123 |
6155 if (htabmax && htabsz > htabmax) | 7124 if (htabmax && htabsz > htabmax) |
6156 { | 7125 { |
6157 if (MAY_HAVE_DEBUG_INSNS) | 7126 if (MAY_HAVE_DEBUG_INSNS) |
6158 inform (DECL_SOURCE_LOCATION (cfun->decl), | 7127 inform (DECL_SOURCE_LOCATION (cfun->decl), |
6167 | 7136 |
6168 if (changed) | 7137 if (changed) |
6169 { | 7138 { |
6170 FOR_EACH_EDGE (e, ei, bb->succs) | 7139 FOR_EACH_EDGE (e, ei, bb->succs) |
6171 { | 7140 { |
6172 if (e->dest == EXIT_BLOCK_PTR) | 7141 if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun)) |
6173 continue; | 7142 continue; |
6174 | 7143 |
6175 if (TEST_BIT (visited, e->dest->index)) | 7144 if (bitmap_bit_p (visited, e->dest->index)) |
6176 { | 7145 { |
6177 if (!TEST_BIT (in_pending, e->dest->index)) | 7146 if (!bitmap_bit_p (in_pending, e->dest->index)) |
6178 { | 7147 { |
6179 /* Send E->DEST to next round. */ | 7148 /* Send E->DEST to next round. */ |
6180 SET_BIT (in_pending, e->dest->index); | 7149 bitmap_set_bit (in_pending, e->dest->index); |
6181 fibheap_insert (pending, | 7150 pending->insert (bb_order[e->dest->index], |
6182 bb_order[e->dest->index], | 7151 e->dest); |
6183 e->dest); | |
6184 } | 7152 } |
6185 } | 7153 } |
6186 else if (!TEST_BIT (in_worklist, e->dest->index)) | 7154 else if (!bitmap_bit_p (in_worklist, e->dest->index)) |
6187 { | 7155 { |
6188 /* Add E->DEST to current round. */ | 7156 /* Add E->DEST to current round. */ |
6189 SET_BIT (in_worklist, e->dest->index); | 7157 bitmap_set_bit (in_worklist, e->dest->index); |
6190 fibheap_insert (worklist, bb_order[e->dest->index], | 7158 worklist->insert (bb_order[e->dest->index], |
6191 e->dest); | 7159 e->dest); |
6192 } | 7160 } |
6193 } | 7161 } |
6194 } | 7162 } |
6195 | 7163 |
6196 if (dump_file) | 7164 if (dump_file) |
6197 fprintf (dump_file, | 7165 fprintf (dump_file, |
6198 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n", | 7166 "BB %i: in %i (was %i), out %i (was %i), rem %i + %i, tsz %i\n", |
6199 bb->index, | 7167 bb->index, |
6200 (int)htab_elements (shared_hash_htab (VTI (bb)->in.vars)), | 7168 (int)shared_hash_htab (VTI (bb)->in.vars)->size (), |
6201 oldinsz, | 7169 oldinsz, |
6202 (int)htab_elements (shared_hash_htab (VTI (bb)->out.vars)), | 7170 (int)shared_hash_htab (VTI (bb)->out.vars)->size (), |
6203 oldoutsz, | 7171 oldoutsz, |
6204 (int)worklist->nodes, (int)pending->nodes, htabsz); | 7172 (int)worklist->nodes (), (int)pending->nodes (), |
7173 htabsz); | |
6205 | 7174 |
6206 if (dump_file && (dump_flags & TDF_DETAILS)) | 7175 if (dump_file && (dump_flags & TDF_DETAILS)) |
6207 { | 7176 { |
6208 fprintf (dump_file, "BB %i IN:\n", bb->index); | 7177 fprintf (dump_file, "BB %i IN:\n", bb->index); |
6209 dump_dataflow_set (&VTI (bb)->in); | 7178 dump_dataflow_set (&VTI (bb)->in); |
6213 } | 7182 } |
6214 } | 7183 } |
6215 } | 7184 } |
6216 | 7185 |
6217 if (success && MAY_HAVE_DEBUG_INSNS) | 7186 if (success && MAY_HAVE_DEBUG_INSNS) |
6218 FOR_EACH_BB (bb) | 7187 FOR_EACH_BB_FN (bb, cfun) |
6219 gcc_assert (VTI (bb)->flooded); | 7188 gcc_assert (VTI (bb)->flooded); |
6220 | 7189 |
6221 free (bb_order); | 7190 free (bb_order); |
6222 fibheap_delete (worklist); | 7191 delete worklist; |
6223 fibheap_delete (pending); | 7192 delete pending; |
6224 sbitmap_free (visited); | |
6225 sbitmap_free (in_worklist); | 7193 sbitmap_free (in_worklist); |
6226 sbitmap_free (in_pending); | 7194 sbitmap_free (in_pending); |
6227 | 7195 |
6228 timevar_pop (TV_VAR_TRACKING_DATAFLOW); | 7196 timevar_pop (TV_VAR_TRACKING_DATAFLOW); |
6229 return success; | 7197 return success; |
6230 } | 7198 } |
6231 | 7199 |
6232 /* Print the content of the LIST to dump file. */ | 7200 /* Print the content of the LIST to dump file. */ |
6233 | 7201 |
6234 static void | 7202 static void |
6235 dump_attrs_list (attrs list) | 7203 dump_attrs_list (attrs *list) |
6236 { | 7204 { |
6237 for (; list; list = list->next) | 7205 for (; list; list = list->next) |
6238 { | 7206 { |
6239 if (dv_is_decl_p (list->dv)) | 7207 if (dv_is_decl_p (list->dv)) |
6240 print_mem_expr (dump_file, dv_as_decl (list->dv)); | 7208 print_mem_expr (dump_file, dv_as_decl (list->dv)); |
6245 fprintf (dump_file, "\n"); | 7213 fprintf (dump_file, "\n"); |
6246 } | 7214 } |
6247 | 7215 |
6248 /* Print the information about variable *SLOT to dump file. */ | 7216 /* Print the information about variable *SLOT to dump file. */ |
6249 | 7217 |
6250 static int | 7218 int |
6251 dump_var_slot (void **slot, void *data ATTRIBUTE_UNUSED) | 7219 dump_var_tracking_slot (variable **slot, void *data ATTRIBUTE_UNUSED) |
6252 { | 7220 { |
6253 variable var = (variable) *slot; | 7221 variable *var = *slot; |
6254 | 7222 |
6255 dump_var (var); | 7223 dump_var (var); |
6256 | 7224 |
6257 /* Continue traversing the hash table. */ | 7225 /* Continue traversing the hash table. */ |
6258 return 1; | 7226 return 1; |
6259 } | 7227 } |
6260 | 7228 |
6261 /* Print the information about variable VAR to dump file. */ | 7229 /* Print the information about variable VAR to dump file. */ |
6262 | 7230 |
6263 static void | 7231 static void |
6264 dump_var (variable var) | 7232 dump_var (variable *var) |
6265 { | 7233 { |
6266 int i; | 7234 int i; |
6267 location_chain node; | 7235 location_chain *node; |
6268 | 7236 |
6269 if (dv_is_decl_p (var->dv)) | 7237 if (dv_is_decl_p (var->dv)) |
6270 { | 7238 { |
6271 const_tree decl = dv_as_decl (var->dv); | 7239 const_tree decl = dv_as_decl (var->dv); |
6272 | 7240 |
6290 } | 7258 } |
6291 | 7259 |
6292 for (i = 0; i < var->n_var_parts; i++) | 7260 for (i = 0; i < var->n_var_parts; i++) |
6293 { | 7261 { |
6294 fprintf (dump_file, " offset %ld\n", | 7262 fprintf (dump_file, " offset %ld\n", |
6295 (long) var->var_part[i].offset); | 7263 (long)(var->onepart ? 0 : VAR_PART_OFFSET (var, i))); |
6296 for (node = var->var_part[i].loc_chain; node; node = node->next) | 7264 for (node = var->var_part[i].loc_chain; node; node = node->next) |
6297 { | 7265 { |
6298 fprintf (dump_file, " "); | 7266 fprintf (dump_file, " "); |
6299 if (node->init == VAR_INIT_STATUS_UNINITIALIZED) | 7267 if (node->init == VAR_INIT_STATUS_UNINITIALIZED) |
6300 fprintf (dump_file, "[uninit]"); | 7268 fprintf (dump_file, "[uninit]"); |
6304 } | 7272 } |
6305 | 7273 |
6306 /* Print the information about variables from hash table VARS to dump file. */ | 7274 /* Print the information about variables from hash table VARS to dump file. */ |
6307 | 7275 |
6308 static void | 7276 static void |
6309 dump_vars (htab_t vars) | 7277 dump_vars (variable_table_type *vars) |
6310 { | 7278 { |
6311 if (htab_elements (vars) > 0) | 7279 if (vars->elements () > 0) |
6312 { | 7280 { |
6313 fprintf (dump_file, "Variables:\n"); | 7281 fprintf (dump_file, "Variables:\n"); |
6314 htab_traverse (vars, dump_var_slot, NULL); | 7282 vars->traverse <void *, dump_var_tracking_slot> (NULL); |
6315 } | 7283 } |
6316 } | 7284 } |
6317 | 7285 |
6318 /* Print the dataflow set SET to dump file. */ | 7286 /* Print the dataflow set SET to dump file. */ |
6319 | 7287 |
6341 static void | 7309 static void |
6342 dump_dataflow_sets (void) | 7310 dump_dataflow_sets (void) |
6343 { | 7311 { |
6344 basic_block bb; | 7312 basic_block bb; |
6345 | 7313 |
6346 FOR_EACH_BB (bb) | 7314 FOR_EACH_BB_FN (bb, cfun) |
6347 { | 7315 { |
6348 fprintf (dump_file, "\nBasic block %d:\n", bb->index); | 7316 fprintf (dump_file, "\nBasic block %d:\n", bb->index); |
6349 fprintf (dump_file, "IN:\n"); | 7317 fprintf (dump_file, "IN:\n"); |
6350 dump_dataflow_set (&VTI (bb)->in); | 7318 dump_dataflow_set (&VTI (bb)->in); |
6351 fprintf (dump_file, "OUT:\n"); | 7319 fprintf (dump_file, "OUT:\n"); |
6352 dump_dataflow_set (&VTI (bb)->out); | 7320 dump_dataflow_set (&VTI (bb)->out); |
6353 } | 7321 } |
6354 } | 7322 } |
6355 | 7323 |
7324 /* Return the variable for DV in dropped_values, inserting one if | |
7325 requested with INSERT. */ | |
7326 | |
7327 static inline variable * | |
7328 variable_from_dropped (decl_or_value dv, enum insert_option insert) | |
7329 { | |
7330 variable **slot; | |
7331 variable *empty_var; | |
7332 onepart_enum onepart; | |
7333 | |
7334 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), insert); | |
7335 | |
7336 if (!slot) | |
7337 return NULL; | |
7338 | |
7339 if (*slot) | |
7340 return *slot; | |
7341 | |
7342 gcc_checking_assert (insert == INSERT); | |
7343 | |
7344 onepart = dv_onepart_p (dv); | |
7345 | |
7346 gcc_checking_assert (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR); | |
7347 | |
7348 empty_var = onepart_pool_allocate (onepart); | |
7349 empty_var->dv = dv; | |
7350 empty_var->refcount = 1; | |
7351 empty_var->n_var_parts = 0; | |
7352 empty_var->onepart = onepart; | |
7353 empty_var->in_changed_variables = false; | |
7354 empty_var->var_part[0].loc_chain = NULL; | |
7355 empty_var->var_part[0].cur_loc = NULL; | |
7356 VAR_LOC_1PAUX (empty_var) = NULL; | |
7357 set_dv_changed (dv, true); | |
7358 | |
7359 *slot = empty_var; | |
7360 | |
7361 return empty_var; | |
7362 } | |
7363 | |
7364 /* Recover the one-part aux from dropped_values. */ | |
7365 | |
7366 static struct onepart_aux * | |
7367 recover_dropped_1paux (variable *var) | |
7368 { | |
7369 variable *dvar; | |
7370 | |
7371 gcc_checking_assert (var->onepart); | |
7372 | |
7373 if (VAR_LOC_1PAUX (var)) | |
7374 return VAR_LOC_1PAUX (var); | |
7375 | |
7376 if (var->onepart == ONEPART_VDECL) | |
7377 return NULL; | |
7378 | |
7379 dvar = variable_from_dropped (var->dv, NO_INSERT); | |
7380 | |
7381 if (!dvar) | |
7382 return NULL; | |
7383 | |
7384 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (dvar); | |
7385 VAR_LOC_1PAUX (dvar) = NULL; | |
7386 | |
7387 return VAR_LOC_1PAUX (var); | |
7388 } | |
7389 | |
6356 /* Add variable VAR to the hash table of changed variables and | 7390 /* Add variable VAR to the hash table of changed variables and |
6357 if it has no locations delete it from SET's hash table. */ | 7391 if it has no locations delete it from SET's hash table. */ |
6358 | 7392 |
6359 static void | 7393 static void |
6360 variable_was_changed (variable var, dataflow_set *set) | 7394 variable_was_changed (variable *var, dataflow_set *set) |
6361 { | 7395 { |
6362 hashval_t hash = dv_htab_hash (var->dv); | 7396 hashval_t hash = dv_htab_hash (var->dv); |
6363 | 7397 |
6364 if (emit_notes) | 7398 if (emit_notes) |
6365 { | 7399 { |
6366 void **slot; | 7400 variable **slot; |
6367 bool old_cur_loc_changed = false; | |
6368 | 7401 |
6369 /* Remember this decl or VALUE has been added to changed_variables. */ | 7402 /* Remember this decl or VALUE has been added to changed_variables. */ |
6370 set_dv_changed (var->dv, true); | 7403 set_dv_changed (var->dv, true); |
6371 | 7404 |
6372 slot = htab_find_slot_with_hash (changed_variables, | 7405 slot = changed_variables->find_slot_with_hash (var->dv, hash, INSERT); |
6373 var->dv, | |
6374 hash, INSERT); | |
6375 | 7406 |
6376 if (*slot) | 7407 if (*slot) |
6377 { | 7408 { |
6378 variable old_var = (variable) *slot; | 7409 variable *old_var = *slot; |
6379 gcc_assert (old_var->in_changed_variables); | 7410 gcc_assert (old_var->in_changed_variables); |
6380 old_var->in_changed_variables = false; | 7411 old_var->in_changed_variables = false; |
6381 old_cur_loc_changed = old_var->cur_loc_changed; | 7412 if (var != old_var && var->onepart) |
7413 { | |
7414 /* Restore the auxiliary info from an empty variable | |
7415 previously created for changed_variables, so it is | |
7416 not lost. */ | |
7417 gcc_checking_assert (!VAR_LOC_1PAUX (var)); | |
7418 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (old_var); | |
7419 VAR_LOC_1PAUX (old_var) = NULL; | |
7420 } | |
6382 variable_htab_free (*slot); | 7421 variable_htab_free (*slot); |
6383 } | 7422 } |
7423 | |
6384 if (set && var->n_var_parts == 0) | 7424 if (set && var->n_var_parts == 0) |
6385 { | 7425 { |
6386 variable empty_var; | 7426 onepart_enum onepart = var->onepart; |
6387 | 7427 variable *empty_var = NULL; |
6388 empty_var = (variable) pool_alloc (dv_pool (var->dv)); | 7428 variable **dslot = NULL; |
6389 empty_var->dv = var->dv; | 7429 |
6390 empty_var->refcount = 1; | 7430 if (onepart == ONEPART_VALUE || onepart == ONEPART_DEXPR) |
6391 empty_var->n_var_parts = 0; | 7431 { |
6392 empty_var->cur_loc_changed = true; | 7432 dslot = dropped_values->find_slot_with_hash (var->dv, |
7433 dv_htab_hash (var->dv), | |
7434 INSERT); | |
7435 empty_var = *dslot; | |
7436 | |
7437 if (empty_var) | |
7438 { | |
7439 gcc_checking_assert (!empty_var->in_changed_variables); | |
7440 if (!VAR_LOC_1PAUX (var)) | |
7441 { | |
7442 VAR_LOC_1PAUX (var) = VAR_LOC_1PAUX (empty_var); | |
7443 VAR_LOC_1PAUX (empty_var) = NULL; | |
7444 } | |
7445 else | |
7446 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var)); | |
7447 } | |
7448 } | |
7449 | |
7450 if (!empty_var) | |
7451 { | |
7452 empty_var = onepart_pool_allocate (onepart); | |
7453 empty_var->dv = var->dv; | |
7454 empty_var->refcount = 1; | |
7455 empty_var->n_var_parts = 0; | |
7456 empty_var->onepart = onepart; | |
7457 if (dslot) | |
7458 { | |
7459 empty_var->refcount++; | |
7460 *dslot = empty_var; | |
7461 } | |
7462 } | |
7463 else | |
7464 empty_var->refcount++; | |
6393 empty_var->in_changed_variables = true; | 7465 empty_var->in_changed_variables = true; |
6394 *slot = empty_var; | 7466 *slot = empty_var; |
7467 if (onepart) | |
7468 { | |
7469 empty_var->var_part[0].loc_chain = NULL; | |
7470 empty_var->var_part[0].cur_loc = NULL; | |
7471 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (var); | |
7472 VAR_LOC_1PAUX (var) = NULL; | |
7473 } | |
6395 goto drop_var; | 7474 goto drop_var; |
6396 } | 7475 } |
6397 else | 7476 else |
6398 { | 7477 { |
7478 if (var->onepart && !VAR_LOC_1PAUX (var)) | |
7479 recover_dropped_1paux (var); | |
6399 var->refcount++; | 7480 var->refcount++; |
6400 var->in_changed_variables = true; | 7481 var->in_changed_variables = true; |
6401 /* If within processing one uop a variable is deleted | |
6402 and then readded, we need to assume it has changed. */ | |
6403 if (old_cur_loc_changed) | |
6404 var->cur_loc_changed = true; | |
6405 *slot = var; | 7482 *slot = var; |
6406 } | 7483 } |
6407 } | 7484 } |
6408 else | 7485 else |
6409 { | 7486 { |
6410 gcc_assert (set); | 7487 gcc_assert (set); |
6411 if (var->n_var_parts == 0) | 7488 if (var->n_var_parts == 0) |
6412 { | 7489 { |
6413 void **slot; | 7490 variable **slot; |
6414 | 7491 |
6415 drop_var: | 7492 drop_var: |
6416 slot = shared_hash_find_slot_noinsert (set->vars, var->dv); | 7493 slot = shared_hash_find_slot_noinsert (set->vars, var->dv); |
6417 if (slot) | 7494 if (slot) |
6418 { | 7495 { |
6419 if (shared_hash_shared (set->vars)) | 7496 if (shared_hash_shared (set->vars)) |
6420 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, | 7497 slot = shared_hash_find_slot_unshare (&set->vars, var->dv, |
6421 NO_INSERT); | 7498 NO_INSERT); |
6422 htab_clear_slot (shared_hash_htab (set->vars), slot); | 7499 shared_hash_htab (set->vars)->clear_slot (slot); |
6423 } | 7500 } |
6424 } | 7501 } |
6425 } | 7502 } |
6426 } | 7503 } |
6427 | 7504 |
6429 Return -1 if not found. If INSERTION_POINT is non-NULL, the | 7506 Return -1 if not found. If INSERTION_POINT is non-NULL, the |
6430 referenced int will be set to the index that the part has or should | 7507 referenced int will be set to the index that the part has or should |
6431 have, if it should be inserted. */ | 7508 have, if it should be inserted. */ |
6432 | 7509 |
6433 static inline int | 7510 static inline int |
6434 find_variable_location_part (variable var, HOST_WIDE_INT offset, | 7511 find_variable_location_part (variable *var, HOST_WIDE_INT offset, |
6435 int *insertion_point) | 7512 int *insertion_point) |
6436 { | 7513 { |
6437 int pos, low, high; | 7514 int pos, low, high; |
7515 | |
7516 if (var->onepart) | |
7517 { | |
7518 if (offset != 0) | |
7519 return -1; | |
7520 | |
7521 if (insertion_point) | |
7522 *insertion_point = 0; | |
7523 | |
7524 return var->n_var_parts - 1; | |
7525 } | |
6438 | 7526 |
6439 /* Find the location part. */ | 7527 /* Find the location part. */ |
6440 low = 0; | 7528 low = 0; |
6441 high = var->n_var_parts; | 7529 high = var->n_var_parts; |
6442 while (low != high) | 7530 while (low != high) |
6443 { | 7531 { |
6444 pos = (low + high) / 2; | 7532 pos = (low + high) / 2; |
6445 if (var->var_part[pos].offset < offset) | 7533 if (VAR_PART_OFFSET (var, pos) < offset) |
6446 low = pos + 1; | 7534 low = pos + 1; |
6447 else | 7535 else |
6448 high = pos; | 7536 high = pos; |
6449 } | 7537 } |
6450 pos = low; | 7538 pos = low; |
6451 | 7539 |
6452 if (insertion_point) | 7540 if (insertion_point) |
6453 *insertion_point = pos; | 7541 *insertion_point = pos; |
6454 | 7542 |
6455 if (pos < var->n_var_parts && var->var_part[pos].offset == offset) | 7543 if (pos < var->n_var_parts && VAR_PART_OFFSET (var, pos) == offset) |
6456 return pos; | 7544 return pos; |
6457 | 7545 |
6458 return -1; | 7546 return -1; |
6459 } | 7547 } |
6460 | 7548 |
6461 static void ** | 7549 static variable ** |
6462 set_slot_part (dataflow_set *set, rtx loc, void **slot, | 7550 set_slot_part (dataflow_set *set, rtx loc, variable **slot, |
6463 decl_or_value dv, HOST_WIDE_INT offset, | 7551 decl_or_value dv, HOST_WIDE_INT offset, |
6464 enum var_init_status initialized, rtx set_src) | 7552 enum var_init_status initialized, rtx set_src) |
6465 { | 7553 { |
6466 int pos; | 7554 int pos; |
6467 location_chain node, next; | 7555 location_chain *node, *next; |
6468 location_chain *nextp; | 7556 location_chain **nextp; |
6469 variable var; | 7557 variable *var; |
6470 bool onepart = dv_onepart_p (dv); | 7558 onepart_enum onepart; |
6471 | 7559 |
6472 gcc_assert (offset == 0 || !onepart); | 7560 var = *slot; |
6473 gcc_assert (loc != dv_as_opaque (dv)); | 7561 |
6474 | 7562 if (var) |
6475 var = (variable) *slot; | 7563 onepart = var->onepart; |
7564 else | |
7565 onepart = dv_onepart_p (dv); | |
7566 | |
7567 gcc_checking_assert (offset == 0 || !onepart); | |
7568 gcc_checking_assert (loc != dv_as_opaque (dv)); | |
6476 | 7569 |
6477 if (! flag_var_tracking_uninit) | 7570 if (! flag_var_tracking_uninit) |
6478 initialized = VAR_INIT_STATUS_INITIALIZED; | 7571 initialized = VAR_INIT_STATUS_INITIALIZED; |
6479 | 7572 |
6480 if (!var) | 7573 if (!var) |
6481 { | 7574 { |
6482 /* Create new variable information. */ | 7575 /* Create new variable information. */ |
6483 var = (variable) pool_alloc (dv_pool (dv)); | 7576 var = onepart_pool_allocate (onepart); |
6484 var->dv = dv; | 7577 var->dv = dv; |
6485 var->refcount = 1; | 7578 var->refcount = 1; |
6486 var->n_var_parts = 1; | 7579 var->n_var_parts = 1; |
6487 var->cur_loc_changed = false; | 7580 var->onepart = onepart; |
6488 var->in_changed_variables = false; | 7581 var->in_changed_variables = false; |
6489 var->var_part[0].offset = offset; | 7582 if (var->onepart) |
7583 VAR_LOC_1PAUX (var) = NULL; | |
7584 else | |
7585 VAR_PART_OFFSET (var, 0) = offset; | |
6490 var->var_part[0].loc_chain = NULL; | 7586 var->var_part[0].loc_chain = NULL; |
6491 var->var_part[0].cur_loc = NULL; | 7587 var->var_part[0].cur_loc = NULL; |
6492 *slot = var; | 7588 *slot = var; |
6493 pos = 0; | 7589 pos = 0; |
6494 nextp = &var->var_part[0].loc_chain; | 7590 nextp = &var->var_part[0].loc_chain; |
6582 return slot; | 7678 return slot; |
6583 | 7679 |
6584 if (shared_var_p (var, set->vars)) | 7680 if (shared_var_p (var, set->vars)) |
6585 { | 7681 { |
6586 slot = unshare_variable (set, slot, var, initialized); | 7682 slot = unshare_variable (set, slot, var, initialized); |
6587 var = (variable)*slot; | 7683 var = *slot; |
6588 for (nextp = &var->var_part[0].loc_chain; c; | 7684 for (nextp = &var->var_part[0].loc_chain; c; |
6589 nextp = &(*nextp)->next) | 7685 nextp = &(*nextp)->next) |
6590 c--; | 7686 c--; |
6591 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc); | 7687 gcc_assert ((!node && !*nextp) || node->loc == (*nextp)->loc); |
6592 } | 7688 } |
6621 { | 7717 { |
6622 /* We have to make a copy of a shared variable. */ | 7718 /* We have to make a copy of a shared variable. */ |
6623 if (shared_var_p (var, set->vars)) | 7719 if (shared_var_p (var, set->vars)) |
6624 { | 7720 { |
6625 slot = unshare_variable (set, slot, var, initialized); | 7721 slot = unshare_variable (set, slot, var, initialized); |
6626 var = (variable)*slot; | 7722 var = *slot; |
6627 } | 7723 } |
6628 } | 7724 } |
6629 } | 7725 } |
6630 else | 7726 else |
6631 { | 7727 { |
6633 | 7729 |
6634 /* We have to make a copy of the shared variable. */ | 7730 /* We have to make a copy of the shared variable. */ |
6635 if (shared_var_p (var, set->vars)) | 7731 if (shared_var_p (var, set->vars)) |
6636 { | 7732 { |
6637 slot = unshare_variable (set, slot, var, initialized); | 7733 slot = unshare_variable (set, slot, var, initialized); |
6638 var = (variable)*slot; | 7734 var = *slot; |
6639 } | 7735 } |
6640 | 7736 |
6641 /* We track only variables whose size is <= MAX_VAR_PARTS bytes | 7737 /* We track only variables whose size is <= MAX_VAR_PARTS bytes |
6642 thus there are at most MAX_VAR_PARTS different offsets. */ | 7738 thus there are at most MAX_VAR_PARTS different offsets. */ |
6643 gcc_assert (var->n_var_parts < MAX_VAR_PARTS | 7739 gcc_assert (var->n_var_parts < MAX_VAR_PARTS |
6644 && (!var->n_var_parts || !dv_onepart_p (var->dv))); | 7740 && (!var->n_var_parts || !onepart)); |
6645 | 7741 |
6646 /* We have to move the elements of array starting at index | 7742 /* We have to move the elements of array starting at index |
6647 inspos to the next position. */ | 7743 inspos to the next position. */ |
6648 for (pos = var->n_var_parts; pos > inspos; pos--) | 7744 for (pos = var->n_var_parts; pos > inspos; pos--) |
6649 var->var_part[pos] = var->var_part[pos - 1]; | 7745 var->var_part[pos] = var->var_part[pos - 1]; |
6650 | 7746 |
6651 var->n_var_parts++; | 7747 var->n_var_parts++; |
6652 var->var_part[pos].offset = offset; | 7748 gcc_checking_assert (!onepart); |
7749 VAR_PART_OFFSET (var, pos) = offset; | |
6653 var->var_part[pos].loc_chain = NULL; | 7750 var->var_part[pos].loc_chain = NULL; |
6654 var->var_part[pos].cur_loc = NULL; | 7751 var->var_part[pos].cur_loc = NULL; |
6655 } | 7752 } |
6656 | 7753 |
6657 /* Delete the location from the list. */ | 7754 /* Delete the location from the list. */ |
6668 if (node->init > initialized) | 7765 if (node->init > initialized) |
6669 initialized = node->init; | 7766 initialized = node->init; |
6670 if (node->set_src != NULL && set_src == NULL) | 7767 if (node->set_src != NULL && set_src == NULL) |
6671 set_src = node->set_src; | 7768 set_src = node->set_src; |
6672 if (var->var_part[pos].cur_loc == node->loc) | 7769 if (var->var_part[pos].cur_loc == node->loc) |
6673 { | 7770 var->var_part[pos].cur_loc = NULL; |
6674 var->var_part[pos].cur_loc = NULL; | 7771 delete node; |
6675 var->cur_loc_changed = true; | |
6676 } | |
6677 pool_free (loc_chain_pool, node); | |
6678 *nextp = next; | 7772 *nextp = next; |
6679 break; | 7773 break; |
6680 } | 7774 } |
6681 else | 7775 else |
6682 nextp = &node->next; | 7776 nextp = &node->next; |
6684 | 7778 |
6685 nextp = &var->var_part[pos].loc_chain; | 7779 nextp = &var->var_part[pos].loc_chain; |
6686 } | 7780 } |
6687 | 7781 |
6688 /* Add the location to the beginning. */ | 7782 /* Add the location to the beginning. */ |
6689 node = (location_chain) pool_alloc (loc_chain_pool); | 7783 node = new location_chain; |
6690 node->loc = loc; | 7784 node->loc = loc; |
6691 node->init = initialized; | 7785 node->init = initialized; |
6692 node->set_src = set_src; | 7786 node->set_src = set_src; |
6693 node->next = *nextp; | 7787 node->next = *nextp; |
6694 *nextp = node; | 7788 *nextp = node; |
6695 | 7789 |
6696 if (onepart && emit_notes) | |
6697 add_value_chains (var->dv, loc); | |
6698 | |
6699 /* If no location was emitted do so. */ | 7790 /* If no location was emitted do so. */ |
6700 if (var->var_part[pos].cur_loc == NULL) | 7791 if (var->var_part[pos].cur_loc == NULL) |
6701 variable_was_changed (var, set); | 7792 variable_was_changed (var, set); |
6702 | 7793 |
6703 return slot; | 7794 return slot; |
6713 set_variable_part (dataflow_set *set, rtx loc, | 7804 set_variable_part (dataflow_set *set, rtx loc, |
6714 decl_or_value dv, HOST_WIDE_INT offset, | 7805 decl_or_value dv, HOST_WIDE_INT offset, |
6715 enum var_init_status initialized, rtx set_src, | 7806 enum var_init_status initialized, rtx set_src, |
6716 enum insert_option iopt) | 7807 enum insert_option iopt) |
6717 { | 7808 { |
6718 void **slot; | 7809 variable **slot; |
6719 | 7810 |
6720 if (iopt == NO_INSERT) | 7811 if (iopt == NO_INSERT) |
6721 slot = shared_hash_find_slot_noinsert (set->vars, dv); | 7812 slot = shared_hash_find_slot_noinsert (set->vars, dv); |
6722 else | 7813 else |
6723 { | 7814 { |
6724 slot = shared_hash_find_slot (set->vars, dv); | 7815 slot = shared_hash_find_slot (set->vars, dv); |
6725 if (!slot) | 7816 if (!slot) |
6726 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt); | 7817 slot = shared_hash_find_slot_unshare (&set->vars, dv, iopt); |
6727 } | 7818 } |
6728 slot = set_slot_part (set, loc, slot, dv, offset, initialized, set_src); | 7819 set_slot_part (set, loc, slot, dv, offset, initialized, set_src); |
6729 } | 7820 } |
6730 | 7821 |
6731 /* Remove all recorded register locations for the given variable part | 7822 /* Remove all recorded register locations for the given variable part |
6732 from dataflow set SET, except for those that are identical to loc. | 7823 from dataflow set SET, except for those that are identical to loc. |
6733 The variable part is specified by variable's declaration or value | 7824 The variable part is specified by variable's declaration or value |
6734 DV and offset OFFSET. */ | 7825 DV and offset OFFSET. */ |
6735 | 7826 |
6736 static void ** | 7827 static variable ** |
6737 clobber_slot_part (dataflow_set *set, rtx loc, void **slot, | 7828 clobber_slot_part (dataflow_set *set, rtx loc, variable **slot, |
6738 HOST_WIDE_INT offset, rtx set_src) | 7829 HOST_WIDE_INT offset, rtx set_src) |
6739 { | 7830 { |
6740 variable var = (variable) *slot; | 7831 variable *var = *slot; |
6741 int pos = find_variable_location_part (var, offset, NULL); | 7832 int pos = find_variable_location_part (var, offset, NULL); |
6742 | 7833 |
6743 if (pos >= 0) | 7834 if (pos >= 0) |
6744 { | 7835 { |
6745 location_chain node, next; | 7836 location_chain *node, *next; |
6746 | 7837 |
6747 /* Remove the register locations from the dataflow set. */ | 7838 /* Remove the register locations from the dataflow set. */ |
6748 next = var->var_part[pos].loc_chain; | 7839 next = var->var_part[pos].loc_chain; |
6749 for (node = next; node; node = next) | 7840 for (node = next; node; node = next) |
6750 { | 7841 { |
6755 || MEM_P (set_src) | 7846 || MEM_P (set_src) |
6756 || !rtx_equal_p (set_src, node->set_src))) | 7847 || !rtx_equal_p (set_src, node->set_src))) |
6757 { | 7848 { |
6758 if (REG_P (node->loc)) | 7849 if (REG_P (node->loc)) |
6759 { | 7850 { |
6760 attrs anode, anext; | 7851 attrs *anode, *anext; |
6761 attrs *anextp; | 7852 attrs **anextp; |
6762 | 7853 |
6763 /* Remove the variable part from the register's | 7854 /* Remove the variable part from the register's |
6764 list, but preserve any other variable parts | 7855 list, but preserve any other variable parts |
6765 that might be regarded as live in that same | 7856 that might be regarded as live in that same |
6766 register. */ | 7857 register. */ |
6769 { | 7860 { |
6770 anext = anode->next; | 7861 anext = anode->next; |
6771 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv) | 7862 if (dv_as_opaque (anode->dv) == dv_as_opaque (var->dv) |
6772 && anode->offset == offset) | 7863 && anode->offset == offset) |
6773 { | 7864 { |
6774 pool_free (attrs_pool, anode); | 7865 delete anode; |
6775 *anextp = anext; | 7866 *anextp = anext; |
6776 } | 7867 } |
6777 else | 7868 else |
6778 anextp = &anode->next; | 7869 anextp = &anode->next; |
6779 } | 7870 } |
6794 | 7885 |
6795 static void | 7886 static void |
6796 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv, | 7887 clobber_variable_part (dataflow_set *set, rtx loc, decl_or_value dv, |
6797 HOST_WIDE_INT offset, rtx set_src) | 7888 HOST_WIDE_INT offset, rtx set_src) |
6798 { | 7889 { |
6799 void **slot; | 7890 variable **slot; |
6800 | 7891 |
6801 if (!dv_as_opaque (dv) | 7892 if (!dv_as_opaque (dv) |
6802 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv)))) | 7893 || (!dv_is_value_p (dv) && ! DECL_P (dv_as_decl (dv)))) |
6803 return; | 7894 return; |
6804 | 7895 |
6805 slot = shared_hash_find_slot_noinsert (set->vars, dv); | 7896 slot = shared_hash_find_slot_noinsert (set->vars, dv); |
6806 if (!slot) | 7897 if (!slot) |
6807 return; | 7898 return; |
6808 | 7899 |
6809 slot = clobber_slot_part (set, loc, slot, offset, set_src); | 7900 clobber_slot_part (set, loc, slot, offset, set_src); |
6810 } | 7901 } |
6811 | 7902 |
6812 /* Delete the part of variable's location from dataflow set SET. The | 7903 /* Delete the part of variable's location from dataflow set SET. The |
6813 variable part is specified by its SET->vars slot SLOT and offset | 7904 variable part is specified by its SET->vars slot SLOT and offset |
6814 OFFSET and the part's location by LOC. */ | 7905 OFFSET and the part's location by LOC. */ |
6815 | 7906 |
6816 static void ** | 7907 static variable ** |
6817 delete_slot_part (dataflow_set *set, rtx loc, void **slot, | 7908 delete_slot_part (dataflow_set *set, rtx loc, variable **slot, |
6818 HOST_WIDE_INT offset) | 7909 HOST_WIDE_INT offset) |
6819 { | 7910 { |
6820 variable var = (variable) *slot; | 7911 variable *var = *slot; |
6821 int pos = find_variable_location_part (var, offset, NULL); | 7912 int pos = find_variable_location_part (var, offset, NULL); |
6822 | 7913 |
6823 if (pos >= 0) | 7914 if (pos >= 0) |
6824 { | 7915 { |
6825 location_chain node, next; | 7916 location_chain *node, *next; |
6826 location_chain *nextp; | 7917 location_chain **nextp; |
6827 bool changed; | 7918 bool changed; |
7919 rtx cur_loc; | |
6828 | 7920 |
6829 if (shared_var_p (var, set->vars)) | 7921 if (shared_var_p (var, set->vars)) |
6830 { | 7922 { |
6831 /* If the variable contains the location part we have to | 7923 /* If the variable contains the location part we have to |
6832 make a copy of the variable. */ | 7924 make a copy of the variable. */ |
6837 && REGNO (node->loc) == REGNO (loc)) | 7929 && REGNO (node->loc) == REGNO (loc)) |
6838 || rtx_equal_p (node->loc, loc)) | 7930 || rtx_equal_p (node->loc, loc)) |
6839 { | 7931 { |
6840 slot = unshare_variable (set, slot, var, | 7932 slot = unshare_variable (set, slot, var, |
6841 VAR_INIT_STATUS_UNKNOWN); | 7933 VAR_INIT_STATUS_UNKNOWN); |
6842 var = (variable)*slot; | 7934 var = *slot; |
6843 break; | 7935 break; |
6844 } | 7936 } |
6845 } | 7937 } |
6846 } | 7938 } |
7939 | |
7940 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var)) | |
7941 cur_loc = VAR_LOC_FROM (var); | |
7942 else | |
7943 cur_loc = var->var_part[pos].cur_loc; | |
6847 | 7944 |
6848 /* Delete the location part. */ | 7945 /* Delete the location part. */ |
6849 changed = false; | 7946 changed = false; |
6850 nextp = &var->var_part[pos].loc_chain; | 7947 nextp = &var->var_part[pos].loc_chain; |
6851 for (node = *nextp; node; node = next) | 7948 for (node = *nextp; node; node = next) |
6853 next = node->next; | 7950 next = node->next; |
6854 if ((REG_P (node->loc) && REG_P (loc) | 7951 if ((REG_P (node->loc) && REG_P (loc) |
6855 && REGNO (node->loc) == REGNO (loc)) | 7952 && REGNO (node->loc) == REGNO (loc)) |
6856 || rtx_equal_p (node->loc, loc)) | 7953 || rtx_equal_p (node->loc, loc)) |
6857 { | 7954 { |
6858 if (emit_notes && pos == 0 && dv_onepart_p (var->dv)) | |
6859 remove_value_chains (var->dv, node->loc); | |
6860 /* If we have deleted the location which was last emitted | 7955 /* If we have deleted the location which was last emitted |
6861 we have to emit new location so add the variable to set | 7956 we have to emit new location so add the variable to set |
6862 of changed variables. */ | 7957 of changed variables. */ |
6863 if (var->var_part[pos].cur_loc == node->loc) | 7958 if (cur_loc == node->loc) |
6864 { | 7959 { |
6865 changed = true; | 7960 changed = true; |
6866 var->var_part[pos].cur_loc = NULL; | 7961 var->var_part[pos].cur_loc = NULL; |
6867 var->cur_loc_changed = true; | 7962 if (pos == 0 && var->onepart && VAR_LOC_1PAUX (var)) |
7963 VAR_LOC_FROM (var) = NULL; | |
6868 } | 7964 } |
6869 pool_free (loc_chain_pool, node); | 7965 delete node; |
6870 *nextp = next; | 7966 *nextp = next; |
6871 break; | 7967 break; |
6872 } | 7968 } |
6873 else | 7969 else |
6874 nextp = &node->next; | 7970 nextp = &node->next; |
6876 | 7972 |
6877 if (var->var_part[pos].loc_chain == NULL) | 7973 if (var->var_part[pos].loc_chain == NULL) |
6878 { | 7974 { |
6879 changed = true; | 7975 changed = true; |
6880 var->n_var_parts--; | 7976 var->n_var_parts--; |
6881 if (emit_notes) | |
6882 var->cur_loc_changed = true; | |
6883 while (pos < var->n_var_parts) | 7977 while (pos < var->n_var_parts) |
6884 { | 7978 { |
6885 var->var_part[pos] = var->var_part[pos + 1]; | 7979 var->var_part[pos] = var->var_part[pos + 1]; |
6886 pos++; | 7980 pos++; |
6887 } | 7981 } |
6899 | 7993 |
6900 static void | 7994 static void |
6901 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv, | 7995 delete_variable_part (dataflow_set *set, rtx loc, decl_or_value dv, |
6902 HOST_WIDE_INT offset) | 7996 HOST_WIDE_INT offset) |
6903 { | 7997 { |
6904 void **slot = shared_hash_find_slot_noinsert (set->vars, dv); | 7998 variable **slot = shared_hash_find_slot_noinsert (set->vars, dv); |
6905 if (!slot) | 7999 if (!slot) |
6906 return; | 8000 return; |
6907 | 8001 |
6908 slot = delete_slot_part (set, loc, slot, offset); | 8002 delete_slot_part (set, loc, slot, offset); |
6909 } | 8003 } |
8004 | |
6910 | 8005 |
6911 /* Structure for passing some other parameters to function | 8006 /* Structure for passing some other parameters to function |
6912 vt_expand_loc_callback. */ | 8007 vt_expand_loc_callback. */ |
6913 struct expand_loc_callback_data | 8008 struct expand_loc_callback_data |
6914 { | 8009 { |
6915 /* The variables and values active at this point. */ | 8010 /* The variables and values active at this point. */ |
6916 htab_t vars; | 8011 variable_table_type *vars; |
6917 | 8012 |
6918 /* True in vt_expand_loc_dummy calls, no rtl should be allocated. | 8013 /* Stack of values and debug_exprs under expansion, and their |
6919 Non-NULL should be returned if vt_expand_loc would return | 8014 children. */ |
6920 non-NULL in that case, NULL otherwise. cur_loc_changed should be | 8015 auto_vec<rtx, 4> expanding; |
6921 computed and cur_loc recomputed when possible (but just once | 8016 |
6922 per emit_notes_for_changes call). */ | 8017 /* Stack of values and debug_exprs whose expansion hit recursion |
6923 bool dummy; | 8018 cycles. They will have VALUE_RECURSED_INTO marked when added to |
6924 | 8019 this list. This flag will be cleared if any of its dependencies |
6925 /* True if expansion of subexpressions had to recompute some | 8020 resolves to a valid location. So, if the flag remains set at the |
6926 VALUE/DEBUG_EXPR_DECL's cur_loc or used a VALUE/DEBUG_EXPR_DECL | 8021 end of the search, we know no valid location for this one can |
6927 whose cur_loc has been already recomputed during current | 8022 possibly exist. */ |
6928 emit_notes_for_changes call. */ | 8023 auto_vec<rtx, 4> pending; |
6929 bool cur_loc_changed; | 8024 |
8025 /* The maximum depth among the sub-expressions under expansion. | |
8026 Zero indicates no expansion so far. */ | |
8027 expand_depth depth; | |
6930 }; | 8028 }; |
8029 | |
8030 /* Allocate the one-part auxiliary data structure for VAR, with enough | |
8031 room for COUNT dependencies. */ | |
8032 | |
8033 static void | |
8034 loc_exp_dep_alloc (variable *var, int count) | |
8035 { | |
8036 size_t allocsize; | |
8037 | |
8038 gcc_checking_assert (var->onepart); | |
8039 | |
8040 /* We can be called with COUNT == 0 to allocate the data structure | |
8041 without any dependencies, e.g. for the backlinks only. However, | |
8042 if we are specifying a COUNT, then the dependency list must have | |
8043 been emptied before. It would be possible to adjust pointers or | |
8044 force it empty here, but this is better done at an earlier point | |
8045 in the algorithm, so we instead leave an assertion to catch | |
8046 errors. */ | |
8047 gcc_checking_assert (!count | |
8048 || VAR_LOC_DEP_VEC (var) == NULL | |
8049 || VAR_LOC_DEP_VEC (var)->is_empty ()); | |
8050 | |
8051 if (VAR_LOC_1PAUX (var) && VAR_LOC_DEP_VEC (var)->space (count)) | |
8052 return; | |
8053 | |
8054 allocsize = offsetof (struct onepart_aux, deps) | |
8055 + vec<loc_exp_dep, va_heap, vl_embed>::embedded_size (count); | |
8056 | |
8057 if (VAR_LOC_1PAUX (var)) | |
8058 { | |
8059 VAR_LOC_1PAUX (var) = XRESIZEVAR (struct onepart_aux, | |
8060 VAR_LOC_1PAUX (var), allocsize); | |
8061 /* If the reallocation moves the onepaux structure, the | |
8062 back-pointer to BACKLINKS in the first list member will still | |
8063 point to its old location. Adjust it. */ | |
8064 if (VAR_LOC_DEP_LST (var)) | |
8065 VAR_LOC_DEP_LST (var)->pprev = VAR_LOC_DEP_LSTP (var); | |
8066 } | |
8067 else | |
8068 { | |
8069 VAR_LOC_1PAUX (var) = XNEWVAR (struct onepart_aux, allocsize); | |
8070 *VAR_LOC_DEP_LSTP (var) = NULL; | |
8071 VAR_LOC_FROM (var) = NULL; | |
8072 VAR_LOC_DEPTH (var).complexity = 0; | |
8073 VAR_LOC_DEPTH (var).entryvals = 0; | |
8074 } | |
8075 VAR_LOC_DEP_VEC (var)->embedded_init (count); | |
8076 } | |
8077 | |
8078 /* Remove all entries from the vector of active dependencies of VAR, | |
8079 removing them from the back-links lists too. */ | |
8080 | |
8081 static void | |
8082 loc_exp_dep_clear (variable *var) | |
8083 { | |
8084 while (VAR_LOC_DEP_VEC (var) && !VAR_LOC_DEP_VEC (var)->is_empty ()) | |
8085 { | |
8086 loc_exp_dep *led = &VAR_LOC_DEP_VEC (var)->last (); | |
8087 if (led->next) | |
8088 led->next->pprev = led->pprev; | |
8089 if (led->pprev) | |
8090 *led->pprev = led->next; | |
8091 VAR_LOC_DEP_VEC (var)->pop (); | |
8092 } | |
8093 } | |
8094 | |
8095 /* Insert an active dependency from VAR on X to the vector of | |
8096 dependencies, and add the corresponding back-link to X's list of | |
8097 back-links in VARS. */ | |
8098 | |
8099 static void | |
8100 loc_exp_insert_dep (variable *var, rtx x, variable_table_type *vars) | |
8101 { | |
8102 decl_or_value dv; | |
8103 variable *xvar; | |
8104 loc_exp_dep *led; | |
8105 | |
8106 dv = dv_from_rtx (x); | |
8107 | |
8108 /* ??? Build a vector of variables parallel to EXPANDING, to avoid | |
8109 an additional look up? */ | |
8110 xvar = vars->find_with_hash (dv, dv_htab_hash (dv)); | |
8111 | |
8112 if (!xvar) | |
8113 { | |
8114 xvar = variable_from_dropped (dv, NO_INSERT); | |
8115 gcc_checking_assert (xvar); | |
8116 } | |
8117 | |
8118 /* No point in adding the same backlink more than once. This may | |
8119 arise if say the same value appears in two complex expressions in | |
8120 the same loc_list, or even more than once in a single | |
8121 expression. */ | |
8122 if (VAR_LOC_DEP_LST (xvar) && VAR_LOC_DEP_LST (xvar)->dv == var->dv) | |
8123 return; | |
8124 | |
8125 if (var->onepart == NOT_ONEPART) | |
8126 led = new loc_exp_dep; | |
8127 else | |
8128 { | |
8129 loc_exp_dep empty; | |
8130 memset (&empty, 0, sizeof (empty)); | |
8131 VAR_LOC_DEP_VEC (var)->quick_push (empty); | |
8132 led = &VAR_LOC_DEP_VEC (var)->last (); | |
8133 } | |
8134 led->dv = var->dv; | |
8135 led->value = x; | |
8136 | |
8137 loc_exp_dep_alloc (xvar, 0); | |
8138 led->pprev = VAR_LOC_DEP_LSTP (xvar); | |
8139 led->next = *led->pprev; | |
8140 if (led->next) | |
8141 led->next->pprev = &led->next; | |
8142 *led->pprev = led; | |
8143 } | |
8144 | |
8145 /* Create active dependencies of VAR on COUNT values starting at | |
8146 VALUE, and corresponding back-links to the entries in VARS. Return | |
8147 true if we found any pending-recursion results. */ | |
8148 | |
8149 static bool | |
8150 loc_exp_dep_set (variable *var, rtx result, rtx *value, int count, | |
8151 variable_table_type *vars) | |
8152 { | |
8153 bool pending_recursion = false; | |
8154 | |
8155 gcc_checking_assert (VAR_LOC_DEP_VEC (var) == NULL | |
8156 || VAR_LOC_DEP_VEC (var)->is_empty ()); | |
8157 | |
8158 /* Set up all dependencies from last_child (as set up at the end of | |
8159 the loop above) to the end. */ | |
8160 loc_exp_dep_alloc (var, count); | |
8161 | |
8162 while (count--) | |
8163 { | |
8164 rtx x = *value++; | |
8165 | |
8166 if (!pending_recursion) | |
8167 pending_recursion = !result && VALUE_RECURSED_INTO (x); | |
8168 | |
8169 loc_exp_insert_dep (var, x, vars); | |
8170 } | |
8171 | |
8172 return pending_recursion; | |
8173 } | |
8174 | |
8175 /* Notify the back-links of IVAR that are pending recursion that we | |
8176 have found a non-NIL value for it, so they are cleared for another | |
8177 attempt to compute a current location. */ | |
8178 | |
8179 static void | |
8180 notify_dependents_of_resolved_value (variable *ivar, variable_table_type *vars) | |
8181 { | |
8182 loc_exp_dep *led, *next; | |
8183 | |
8184 for (led = VAR_LOC_DEP_LST (ivar); led; led = next) | |
8185 { | |
8186 decl_or_value dv = led->dv; | |
8187 variable *var; | |
8188 | |
8189 next = led->next; | |
8190 | |
8191 if (dv_is_value_p (dv)) | |
8192 { | |
8193 rtx value = dv_as_value (dv); | |
8194 | |
8195 /* If we have already resolved it, leave it alone. */ | |
8196 if (!VALUE_RECURSED_INTO (value)) | |
8197 continue; | |
8198 | |
8199 /* Check that VALUE_RECURSED_INTO, true from the test above, | |
8200 implies NO_LOC_P. */ | |
8201 gcc_checking_assert (NO_LOC_P (value)); | |
8202 | |
8203 /* We won't notify variables that are being expanded, | |
8204 because their dependency list is cleared before | |
8205 recursing. */ | |
8206 NO_LOC_P (value) = false; | |
8207 VALUE_RECURSED_INTO (value) = false; | |
8208 | |
8209 gcc_checking_assert (dv_changed_p (dv)); | |
8210 } | |
8211 else | |
8212 { | |
8213 gcc_checking_assert (dv_onepart_p (dv) != NOT_ONEPART); | |
8214 if (!dv_changed_p (dv)) | |
8215 continue; | |
8216 } | |
8217 | |
8218 var = vars->find_with_hash (dv, dv_htab_hash (dv)); | |
8219 | |
8220 if (!var) | |
8221 var = variable_from_dropped (dv, NO_INSERT); | |
8222 | |
8223 if (var) | |
8224 notify_dependents_of_resolved_value (var, vars); | |
8225 | |
8226 if (next) | |
8227 next->pprev = led->pprev; | |
8228 if (led->pprev) | |
8229 *led->pprev = next; | |
8230 led->next = NULL; | |
8231 led->pprev = NULL; | |
8232 } | |
8233 } | |
8234 | |
8235 static rtx vt_expand_loc_callback (rtx x, bitmap regs, | |
8236 int max_depth, void *data); | |
8237 | |
8238 /* Return the combined depth, when one sub-expression evaluated to | |
8239 BEST_DEPTH and the previous known depth was SAVED_DEPTH. */ | |
8240 | |
8241 static inline expand_depth | |
8242 update_depth (expand_depth saved_depth, expand_depth best_depth) | |
8243 { | |
8244 /* If we didn't find anything, stick with what we had. */ | |
8245 if (!best_depth.complexity) | |
8246 return saved_depth; | |
8247 | |
8248 /* If we found hadn't found anything, use the depth of the current | |
8249 expression. Do NOT add one extra level, we want to compute the | |
8250 maximum depth among sub-expressions. We'll increment it later, | |
8251 if appropriate. */ | |
8252 if (!saved_depth.complexity) | |
8253 return best_depth; | |
8254 | |
8255 /* Combine the entryval count so that regardless of which one we | |
8256 return, the entryval count is accurate. */ | |
8257 best_depth.entryvals = saved_depth.entryvals | |
8258 = best_depth.entryvals + saved_depth.entryvals; | |
8259 | |
8260 if (saved_depth.complexity < best_depth.complexity) | |
8261 return best_depth; | |
8262 else | |
8263 return saved_depth; | |
8264 } | |
8265 | |
8266 /* Expand VAR to a location RTX, updating its cur_loc. Use REGS and | |
8267 DATA for cselib expand callback. If PENDRECP is given, indicate in | |
8268 it whether any sub-expression couldn't be fully evaluated because | |
8269 it is pending recursion resolution. */ | |
8270 | |
8271 static inline rtx | |
8272 vt_expand_var_loc_chain (variable *var, bitmap regs, void *data, | |
8273 bool *pendrecp) | |
8274 { | |
8275 struct expand_loc_callback_data *elcd | |
8276 = (struct expand_loc_callback_data *) data; | |
8277 location_chain *loc, *next; | |
8278 rtx result = NULL; | |
8279 int first_child, result_first_child, last_child; | |
8280 bool pending_recursion; | |
8281 rtx loc_from = NULL; | |
8282 struct elt_loc_list *cloc = NULL; | |
8283 expand_depth depth = { 0, 0 }, saved_depth = elcd->depth; | |
8284 int wanted_entryvals, found_entryvals = 0; | |
8285 | |
8286 /* Clear all backlinks pointing at this, so that we're not notified | |
8287 while we're active. */ | |
8288 loc_exp_dep_clear (var); | |
8289 | |
8290 retry: | |
8291 if (var->onepart == ONEPART_VALUE) | |
8292 { | |
8293 cselib_val *val = CSELIB_VAL_PTR (dv_as_value (var->dv)); | |
8294 | |
8295 gcc_checking_assert (cselib_preserved_value_p (val)); | |
8296 | |
8297 cloc = val->locs; | |
8298 } | |
8299 | |
8300 first_child = result_first_child = last_child | |
8301 = elcd->expanding.length (); | |
8302 | |
8303 wanted_entryvals = found_entryvals; | |
8304 | |
8305 /* Attempt to expand each available location in turn. */ | |
8306 for (next = loc = var->n_var_parts ? var->var_part[0].loc_chain : NULL; | |
8307 loc || cloc; loc = next) | |
8308 { | |
8309 result_first_child = last_child; | |
8310 | |
8311 if (!loc) | |
8312 { | |
8313 loc_from = cloc->loc; | |
8314 next = loc; | |
8315 cloc = cloc->next; | |
8316 if (unsuitable_loc (loc_from)) | |
8317 continue; | |
8318 } | |
8319 else | |
8320 { | |
8321 loc_from = loc->loc; | |
8322 next = loc->next; | |
8323 } | |
8324 | |
8325 gcc_checking_assert (!unsuitable_loc (loc_from)); | |
8326 | |
8327 elcd->depth.complexity = elcd->depth.entryvals = 0; | |
8328 result = cselib_expand_value_rtx_cb (loc_from, regs, EXPR_DEPTH, | |
8329 vt_expand_loc_callback, data); | |
8330 last_child = elcd->expanding.length (); | |
8331 | |
8332 if (result) | |
8333 { | |
8334 depth = elcd->depth; | |
8335 | |
8336 gcc_checking_assert (depth.complexity | |
8337 || result_first_child == last_child); | |
8338 | |
8339 if (last_child - result_first_child != 1) | |
8340 { | |
8341 if (!depth.complexity && GET_CODE (result) == ENTRY_VALUE) | |
8342 depth.entryvals++; | |
8343 depth.complexity++; | |
8344 } | |
8345 | |
8346 if (depth.complexity <= EXPR_USE_DEPTH) | |
8347 { | |
8348 if (depth.entryvals <= wanted_entryvals) | |
8349 break; | |
8350 else if (!found_entryvals || depth.entryvals < found_entryvals) | |
8351 found_entryvals = depth.entryvals; | |
8352 } | |
8353 | |
8354 result = NULL; | |
8355 } | |
8356 | |
8357 /* Set it up in case we leave the loop. */ | |
8358 depth.complexity = depth.entryvals = 0; | |
8359 loc_from = NULL; | |
8360 result_first_child = first_child; | |
8361 } | |
8362 | |
8363 if (!loc_from && wanted_entryvals < found_entryvals) | |
8364 { | |
8365 /* We found entries with ENTRY_VALUEs and skipped them. Since | |
8366 we could not find any expansions without ENTRY_VALUEs, but we | |
8367 found at least one with them, go back and get an entry with | |
8368 the minimum number ENTRY_VALUE count that we found. We could | |
8369 avoid looping, but since each sub-loc is already resolved, | |
8370 the re-expansion should be trivial. ??? Should we record all | |
8371 attempted locs as dependencies, so that we retry the | |
8372 expansion should any of them change, in the hope it can give | |
8373 us a new entry without an ENTRY_VALUE? */ | |
8374 elcd->expanding.truncate (first_child); | |
8375 goto retry; | |
8376 } | |
8377 | |
8378 /* Register all encountered dependencies as active. */ | |
8379 pending_recursion = loc_exp_dep_set | |
8380 (var, result, elcd->expanding.address () + result_first_child, | |
8381 last_child - result_first_child, elcd->vars); | |
8382 | |
8383 elcd->expanding.truncate (first_child); | |
8384 | |
8385 /* Record where the expansion came from. */ | |
8386 gcc_checking_assert (!result || !pending_recursion); | |
8387 VAR_LOC_FROM (var) = loc_from; | |
8388 VAR_LOC_DEPTH (var) = depth; | |
8389 | |
8390 gcc_checking_assert (!depth.complexity == !result); | |
8391 | |
8392 elcd->depth = update_depth (saved_depth, depth); | |
8393 | |
8394 /* Indicate whether any of the dependencies are pending recursion | |
8395 resolution. */ | |
8396 if (pendrecp) | |
8397 *pendrecp = pending_recursion; | |
8398 | |
8399 if (!pendrecp || !pending_recursion) | |
8400 var->var_part[0].cur_loc = result; | |
8401 | |
8402 return result; | |
8403 } | |
6931 | 8404 |
6932 /* Callback for cselib_expand_value, that looks for expressions | 8405 /* Callback for cselib_expand_value, that looks for expressions |
6933 holding the value in the var-tracking hash tables. Return X for | 8406 holding the value in the var-tracking hash tables. Return X for |
6934 standard processing, anything else is to be used as-is. */ | 8407 standard processing, anything else is to be used as-is. */ |
6935 | 8408 |
6936 static rtx | 8409 static rtx |
6937 vt_expand_loc_callback (rtx x, bitmap regs, int max_depth, void *data) | 8410 vt_expand_loc_callback (rtx x, bitmap regs, |
8411 int max_depth ATTRIBUTE_UNUSED, | |
8412 void *data) | |
6938 { | 8413 { |
6939 struct expand_loc_callback_data *elcd | 8414 struct expand_loc_callback_data *elcd |
6940 = (struct expand_loc_callback_data *) data; | 8415 = (struct expand_loc_callback_data *) data; |
6941 bool dummy = elcd->dummy; | |
6942 bool cur_loc_changed = elcd->cur_loc_changed; | |
6943 decl_or_value dv; | 8416 decl_or_value dv; |
6944 variable var; | 8417 variable *var; |
6945 location_chain loc; | 8418 rtx result, subreg; |
6946 rtx result, subreg, xret; | 8419 bool pending_recursion = false; |
8420 bool from_empty = false; | |
6947 | 8421 |
6948 switch (GET_CODE (x)) | 8422 switch (GET_CODE (x)) |
6949 { | 8423 { |
6950 case SUBREG: | 8424 case SUBREG: |
6951 if (dummy) | |
6952 { | |
6953 if (cselib_dummy_expand_value_rtx_cb (SUBREG_REG (x), regs, | |
6954 max_depth - 1, | |
6955 vt_expand_loc_callback, data)) | |
6956 return pc_rtx; | |
6957 else | |
6958 return NULL; | |
6959 } | |
6960 | |
6961 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs, | 8425 subreg = cselib_expand_value_rtx_cb (SUBREG_REG (x), regs, |
6962 max_depth - 1, | 8426 EXPR_DEPTH, |
6963 vt_expand_loc_callback, data); | 8427 vt_expand_loc_callback, data); |
6964 | 8428 |
6965 if (!subreg) | 8429 if (!subreg) |
6966 return NULL; | 8430 return NULL; |
6967 | 8431 |
6975 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x)); | 8439 result = gen_rtx_raw_SUBREG (GET_MODE (x), subreg, SUBREG_BYTE (x)); |
6976 | 8440 |
6977 return result; | 8441 return result; |
6978 | 8442 |
6979 case DEBUG_EXPR: | 8443 case DEBUG_EXPR: |
6980 dv = dv_from_decl (DEBUG_EXPR_TREE_DECL (x)); | |
6981 xret = NULL; | |
6982 break; | |
6983 | |
6984 case VALUE: | 8444 case VALUE: |
6985 dv = dv_from_value (x); | 8445 dv = dv_from_rtx (x); |
6986 xret = x; | |
6987 break; | 8446 break; |
6988 | 8447 |
6989 default: | 8448 default: |
6990 return x; | 8449 return x; |
6991 } | 8450 } |
6992 | 8451 |
6993 if (VALUE_RECURSED_INTO (x)) | 8452 elcd->expanding.safe_push (x); |
6994 return NULL; | 8453 |
6995 | 8454 /* Check that VALUE_RECURSED_INTO implies NO_LOC_P. */ |
6996 var = (variable) htab_find_with_hash (elcd->vars, dv, dv_htab_hash (dv)); | 8455 gcc_checking_assert (!VALUE_RECURSED_INTO (x) || NO_LOC_P (x)); |
8456 | |
8457 if (NO_LOC_P (x)) | |
8458 { | |
8459 gcc_checking_assert (VALUE_RECURSED_INTO (x) || !dv_changed_p (dv)); | |
8460 return NULL; | |
8461 } | |
8462 | |
8463 var = elcd->vars->find_with_hash (dv, dv_htab_hash (dv)); | |
6997 | 8464 |
6998 if (!var) | 8465 if (!var) |
6999 { | 8466 { |
7000 if (dummy && dv_changed_p (dv)) | 8467 from_empty = true; |
7001 elcd->cur_loc_changed = true; | 8468 var = variable_from_dropped (dv, INSERT); |
7002 return xret; | 8469 } |
7003 } | 8470 |
7004 | 8471 gcc_checking_assert (var); |
7005 if (var->n_var_parts == 0) | 8472 |
7006 { | 8473 if (!dv_changed_p (dv)) |
7007 if (dummy) | 8474 { |
7008 elcd->cur_loc_changed = true; | 8475 gcc_checking_assert (!NO_LOC_P (x)); |
7009 return xret; | 8476 gcc_checking_assert (var->var_part[0].cur_loc); |
7010 } | 8477 gcc_checking_assert (VAR_LOC_1PAUX (var)); |
7011 | 8478 gcc_checking_assert (VAR_LOC_1PAUX (var)->depth.complexity); |
7012 gcc_assert (var->n_var_parts == 1); | 8479 |
8480 elcd->depth = update_depth (elcd->depth, VAR_LOC_1PAUX (var)->depth); | |
8481 | |
8482 return var->var_part[0].cur_loc; | |
8483 } | |
7013 | 8484 |
7014 VALUE_RECURSED_INTO (x) = true; | 8485 VALUE_RECURSED_INTO (x) = true; |
7015 result = NULL; | 8486 /* This is tentative, but it makes some tests simpler. */ |
7016 | 8487 NO_LOC_P (x) = true; |
7017 if (var->var_part[0].cur_loc) | 8488 |
7018 { | 8489 gcc_checking_assert (var->n_var_parts == 1 || from_empty); |
7019 if (dummy) | 8490 |
7020 { | 8491 result = vt_expand_var_loc_chain (var, regs, data, &pending_recursion); |
7021 if (cselib_dummy_expand_value_rtx_cb (var->var_part[0].cur_loc, regs, | 8492 |
7022 max_depth, | 8493 if (pending_recursion) |
7023 vt_expand_loc_callback, data)) | 8494 { |
7024 result = pc_rtx; | 8495 gcc_checking_assert (!result); |
7025 } | 8496 elcd->pending.safe_push (x); |
7026 else | 8497 } |
7027 result = cselib_expand_value_rtx_cb (var->var_part[0].cur_loc, regs, | 8498 else |
7028 max_depth, | 8499 { |
7029 vt_expand_loc_callback, data); | 8500 NO_LOC_P (x) = !result; |
8501 VALUE_RECURSED_INTO (x) = false; | |
8502 set_dv_changed (dv, false); | |
8503 | |
7030 if (result) | 8504 if (result) |
7031 set_dv_changed (dv, false); | 8505 notify_dependents_of_resolved_value (var, elcd->vars); |
7032 } | 8506 } |
7033 if (!result && dv_changed_p (dv)) | 8507 |
7034 { | 8508 return result; |
8509 } | |
8510 | |
8511 /* While expanding variables, we may encounter recursion cycles | |
8512 because of mutual (possibly indirect) dependencies between two | |
8513 particular variables (or values), say A and B. If we're trying to | |
8514 expand A when we get to B, which in turn attempts to expand A, if | |
8515 we can't find any other expansion for B, we'll add B to this | |
8516 pending-recursion stack, and tentatively return NULL for its | |
8517 location. This tentative value will be used for any other | |
8518 occurrences of B, unless A gets some other location, in which case | |
8519 it will notify B that it is worth another try at computing a | |
8520 location for it, and it will use the location computed for A then. | |
8521 At the end of the expansion, the tentative NULL locations become | |
8522 final for all members of PENDING that didn't get a notification. | |
8523 This function performs this finalization of NULL locations. */ | |
8524 | |
8525 static void | |
8526 resolve_expansions_pending_recursion (vec<rtx, va_heap> *pending) | |
8527 { | |
8528 while (!pending->is_empty ()) | |
8529 { | |
8530 rtx x = pending->pop (); | |
8531 decl_or_value dv; | |
8532 | |
8533 if (!VALUE_RECURSED_INTO (x)) | |
8534 continue; | |
8535 | |
8536 gcc_checking_assert (NO_LOC_P (x)); | |
8537 VALUE_RECURSED_INTO (x) = false; | |
8538 dv = dv_from_rtx (x); | |
8539 gcc_checking_assert (dv_changed_p (dv)); | |
7035 set_dv_changed (dv, false); | 8540 set_dv_changed (dv, false); |
7036 for (loc = var->var_part[0].loc_chain; loc; loc = loc->next) | 8541 } |
7037 if (loc->loc == var->var_part[0].cur_loc) | 8542 } |
7038 continue; | 8543 |
7039 else if (dummy) | 8544 /* Initialize expand_loc_callback_data D with variable hash table V. |
7040 { | 8545 It must be a macro because of alloca (vec stack). */ |
7041 elcd->cur_loc_changed = cur_loc_changed; | 8546 #define INIT_ELCD(d, v) \ |
7042 if (cselib_dummy_expand_value_rtx_cb (loc->loc, regs, max_depth, | 8547 do \ |
7043 vt_expand_loc_callback, | 8548 { \ |
7044 data)) | 8549 (d).vars = (v); \ |
7045 { | 8550 (d).depth.complexity = (d).depth.entryvals = 0; \ |
7046 result = pc_rtx; | 8551 } \ |
7047 break; | 8552 while (0) |
7048 } | 8553 /* Finalize expand_loc_callback_data D, resolved to location L. */ |
7049 } | 8554 #define FINI_ELCD(d, l) \ |
7050 else | 8555 do \ |
7051 { | 8556 { \ |
7052 result = cselib_expand_value_rtx_cb (loc->loc, regs, max_depth, | 8557 resolve_expansions_pending_recursion (&(d).pending); \ |
7053 vt_expand_loc_callback, data); | 8558 (d).pending.release (); \ |
7054 if (result) | 8559 (d).expanding.release (); \ |
7055 break; | 8560 \ |
7056 } | 8561 if ((l) && MEM_P (l)) \ |
7057 if (dummy && (result || var->var_part[0].cur_loc)) | 8562 (l) = targetm.delegitimize_address (l); \ |
7058 var->cur_loc_changed = true; | 8563 } \ |
7059 var->var_part[0].cur_loc = loc ? loc->loc : NULL_RTX; | 8564 while (0) |
7060 } | 8565 |
7061 if (dummy) | 8566 /* Expand VALUEs and DEBUG_EXPRs in LOC to a location, using the |
7062 { | 8567 equivalences in VARS, updating their CUR_LOCs in the process. */ |
7063 if (var->cur_loc_changed) | |
7064 elcd->cur_loc_changed = true; | |
7065 else if (!result && var->var_part[0].cur_loc == NULL_RTX) | |
7066 elcd->cur_loc_changed = cur_loc_changed; | |
7067 } | |
7068 | |
7069 VALUE_RECURSED_INTO (x) = false; | |
7070 if (result) | |
7071 return result; | |
7072 else | |
7073 return xret; | |
7074 } | |
7075 | |
7076 /* Expand VALUEs in LOC, using VARS as well as cselib's equivalence | |
7077 tables. */ | |
7078 | 8568 |
7079 static rtx | 8569 static rtx |
7080 vt_expand_loc (rtx loc, htab_t vars) | 8570 vt_expand_loc (rtx loc, variable_table_type *vars) |
7081 { | 8571 { |
7082 struct expand_loc_callback_data data; | 8572 struct expand_loc_callback_data data; |
8573 rtx result; | |
7083 | 8574 |
7084 if (!MAY_HAVE_DEBUG_INSNS) | 8575 if (!MAY_HAVE_DEBUG_INSNS) |
7085 return loc; | 8576 return loc; |
7086 | 8577 |
7087 data.vars = vars; | 8578 INIT_ELCD (data, vars); |
7088 data.dummy = false; | 8579 |
7089 data.cur_loc_changed = false; | 8580 result = cselib_expand_value_rtx_cb (loc, scratch_regs, EXPR_DEPTH, |
7090 loc = cselib_expand_value_rtx_cb (loc, scratch_regs, 8, | 8581 vt_expand_loc_callback, &data); |
7091 vt_expand_loc_callback, &data); | 8582 |
7092 | 8583 FINI_ELCD (data, result); |
7093 if (loc && MEM_P (loc)) | 8584 |
7094 loc = targetm.delegitimize_address (loc); | 8585 return result; |
8586 } | |
8587 | |
8588 /* Expand the one-part VARiable to a location, using the equivalences | |
8589 in VARS, updating their CUR_LOCs in the process. */ | |
8590 | |
8591 static rtx | |
8592 vt_expand_1pvar (variable *var, variable_table_type *vars) | |
8593 { | |
8594 struct expand_loc_callback_data data; | |
8595 rtx loc; | |
8596 | |
8597 gcc_checking_assert (var->onepart && var->n_var_parts == 1); | |
8598 | |
8599 if (!dv_changed_p (var->dv)) | |
8600 return var->var_part[0].cur_loc; | |
8601 | |
8602 INIT_ELCD (data, vars); | |
8603 | |
8604 loc = vt_expand_var_loc_chain (var, scratch_regs, &data, NULL); | |
8605 | |
8606 gcc_checking_assert (data.expanding.is_empty ()); | |
8607 | |
8608 FINI_ELCD (data, loc); | |
8609 | |
7095 return loc; | 8610 return loc; |
7096 } | |
7097 | |
7098 /* Like vt_expand_loc, but only return true/false (whether vt_expand_loc | |
7099 would succeed or not, without actually allocating new rtxes. */ | |
7100 | |
7101 static bool | |
7102 vt_expand_loc_dummy (rtx loc, htab_t vars, bool *pcur_loc_changed) | |
7103 { | |
7104 struct expand_loc_callback_data data; | |
7105 bool ret; | |
7106 | |
7107 gcc_assert (MAY_HAVE_DEBUG_INSNS); | |
7108 data.vars = vars; | |
7109 data.dummy = true; | |
7110 data.cur_loc_changed = false; | |
7111 ret = cselib_dummy_expand_value_rtx_cb (loc, scratch_regs, 8, | |
7112 vt_expand_loc_callback, &data); | |
7113 *pcur_loc_changed = data.cur_loc_changed; | |
7114 return ret; | |
7115 } | 8611 } |
7116 | 8612 |
7117 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains | 8613 /* Emit the NOTE_INSN_VAR_LOCATION for variable *VARP. DATA contains |
7118 additional parameters: WHERE specifies whether the note shall be emitted | 8614 additional parameters: WHERE specifies whether the note shall be emitted |
7119 before or after instruction INSN. */ | 8615 before or after instruction INSN. */ |
7120 | 8616 |
7121 static int | 8617 int |
7122 emit_note_insn_var_location (void **varp, void *data) | 8618 emit_note_insn_var_location (variable **varp, emit_note_data *data) |
7123 { | 8619 { |
7124 variable var = (variable) *varp; | 8620 variable *var = *varp; |
7125 rtx insn = ((emit_note_data *)data)->insn; | 8621 rtx_insn *insn = data->insn; |
7126 enum emit_note_where where = ((emit_note_data *)data)->where; | 8622 enum emit_note_where where = data->where; |
7127 htab_t vars = ((emit_note_data *)data)->vars; | 8623 variable_table_type *vars = data->vars; |
7128 rtx note, note_vl; | 8624 rtx_note *note; |
8625 rtx note_vl; | |
7129 int i, j, n_var_parts; | 8626 int i, j, n_var_parts; |
7130 bool complete; | 8627 bool complete; |
7131 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED; | 8628 enum var_init_status initialized = VAR_INIT_STATUS_UNINITIALIZED; |
7132 HOST_WIDE_INT last_limit; | 8629 HOST_WIDE_INT last_limit; |
7133 tree type_size_unit; | 8630 tree type_size_unit; |
7134 HOST_WIDE_INT offsets[MAX_VAR_PARTS]; | 8631 HOST_WIDE_INT offsets[MAX_VAR_PARTS]; |
7135 rtx loc[MAX_VAR_PARTS]; | 8632 rtx loc[MAX_VAR_PARTS]; |
7136 tree decl; | 8633 tree decl; |
7137 location_chain lc; | 8634 location_chain *lc; |
7138 | 8635 |
7139 if (dv_is_value_p (var->dv)) | 8636 gcc_checking_assert (var->onepart == NOT_ONEPART |
7140 goto value_or_debug_decl; | 8637 || var->onepart == ONEPART_VDECL); |
7141 | 8638 |
7142 decl = dv_as_decl (var->dv); | 8639 decl = dv_as_decl (var->dv); |
7143 | |
7144 if (TREE_CODE (decl) == DEBUG_EXPR_DECL) | |
7145 goto value_or_debug_decl; | |
7146 | 8640 |
7147 complete = true; | 8641 complete = true; |
7148 last_limit = 0; | 8642 last_limit = 0; |
7149 n_var_parts = 0; | 8643 n_var_parts = 0; |
7150 if (!MAY_HAVE_DEBUG_INSNS) | 8644 if (!var->onepart) |
7151 { | 8645 for (i = 0; i < var->n_var_parts; i++) |
7152 for (i = 0; i < var->n_var_parts; i++) | 8646 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain) |
7153 if (var->var_part[i].cur_loc == NULL && var->var_part[i].loc_chain) | 8647 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc; |
7154 { | |
7155 var->var_part[i].cur_loc = var->var_part[i].loc_chain->loc; | |
7156 var->cur_loc_changed = true; | |
7157 } | |
7158 if (var->n_var_parts == 0) | |
7159 var->cur_loc_changed = true; | |
7160 } | |
7161 if (!var->cur_loc_changed) | |
7162 goto clear; | |
7163 for (i = 0; i < var->n_var_parts; i++) | 8648 for (i = 0; i < var->n_var_parts; i++) |
7164 { | 8649 { |
7165 enum machine_mode mode, wider_mode; | 8650 machine_mode mode, wider_mode; |
7166 rtx loc2; | 8651 rtx loc2; |
7167 | 8652 HOST_WIDE_INT offset; |
7168 if (last_limit < var->var_part[i].offset) | 8653 |
7169 { | 8654 if (i == 0 && var->onepart) |
7170 complete = false; | 8655 { |
7171 break; | 8656 gcc_checking_assert (var->n_var_parts == 1); |
7172 } | 8657 offset = 0; |
7173 else if (last_limit > var->var_part[i].offset) | 8658 initialized = VAR_INIT_STATUS_INITIALIZED; |
7174 continue; | 8659 loc2 = vt_expand_1pvar (var, vars); |
7175 offsets[n_var_parts] = var->var_part[i].offset; | 8660 } |
7176 if (!var->var_part[i].cur_loc) | 8661 else |
8662 { | |
8663 if (last_limit < VAR_PART_OFFSET (var, i)) | |
8664 { | |
8665 complete = false; | |
8666 break; | |
8667 } | |
8668 else if (last_limit > VAR_PART_OFFSET (var, i)) | |
8669 continue; | |
8670 offset = VAR_PART_OFFSET (var, i); | |
8671 loc2 = var->var_part[i].cur_loc; | |
8672 if (loc2 && GET_CODE (loc2) == MEM | |
8673 && GET_CODE (XEXP (loc2, 0)) == VALUE) | |
8674 { | |
8675 rtx depval = XEXP (loc2, 0); | |
8676 | |
8677 loc2 = vt_expand_loc (loc2, vars); | |
8678 | |
8679 if (loc2) | |
8680 loc_exp_insert_dep (var, depval, vars); | |
8681 } | |
8682 if (!loc2) | |
8683 { | |
8684 complete = false; | |
8685 continue; | |
8686 } | |
8687 gcc_checking_assert (GET_CODE (loc2) != VALUE); | |
8688 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next) | |
8689 if (var->var_part[i].cur_loc == lc->loc) | |
8690 { | |
8691 initialized = lc->init; | |
8692 break; | |
8693 } | |
8694 gcc_assert (lc); | |
8695 } | |
8696 | |
8697 offsets[n_var_parts] = offset; | |
8698 if (!loc2) | |
7177 { | 8699 { |
7178 complete = false; | 8700 complete = false; |
7179 continue; | 8701 continue; |
7180 } | 8702 } |
7181 loc2 = vt_expand_loc (var->var_part[i].cur_loc, vars); | |
7182 if (!loc2) | |
7183 { | |
7184 complete = false; | |
7185 continue; | |
7186 } | |
7187 loc[n_var_parts] = loc2; | 8703 loc[n_var_parts] = loc2; |
7188 mode = GET_MODE (var->var_part[i].cur_loc); | 8704 mode = GET_MODE (var->var_part[i].cur_loc); |
7189 if (mode == VOIDmode && dv_onepart_p (var->dv)) | 8705 if (mode == VOIDmode && var->onepart) |
7190 mode = DECL_MODE (decl); | 8706 mode = DECL_MODE (decl); |
7191 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next) | |
7192 if (var->var_part[i].cur_loc == lc->loc) | |
7193 { | |
7194 initialized = lc->init; | |
7195 break; | |
7196 } | |
7197 gcc_assert (lc); | |
7198 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode); | 8707 last_limit = offsets[n_var_parts] + GET_MODE_SIZE (mode); |
7199 | 8708 |
7200 /* Attempt to merge adjacent registers or memory. */ | 8709 /* Attempt to merge adjacent registers or memory. */ |
7201 wider_mode = GET_MODE_WIDER_MODE (mode); | |
7202 for (j = i + 1; j < var->n_var_parts; j++) | 8710 for (j = i + 1; j < var->n_var_parts; j++) |
7203 if (last_limit <= var->var_part[j].offset) | 8711 if (last_limit <= VAR_PART_OFFSET (var, j)) |
7204 break; | 8712 break; |
7205 if (j < var->n_var_parts | 8713 if (j < var->n_var_parts |
7206 && wider_mode != VOIDmode | 8714 && GET_MODE_WIDER_MODE (mode).exists (&wider_mode) |
7207 && var->var_part[j].cur_loc | 8715 && var->var_part[j].cur_loc |
7208 && mode == GET_MODE (var->var_part[j].cur_loc) | 8716 && mode == GET_MODE (var->var_part[j].cur_loc) |
7209 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts])) | 8717 && (REG_P (loc[n_var_parts]) || MEM_P (loc[n_var_parts])) |
7210 && last_limit == var->var_part[j].offset | 8718 && last_limit == (var->onepart ? 0 : VAR_PART_OFFSET (var, j)) |
7211 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars)) | 8719 && (loc2 = vt_expand_loc (var->var_part[j].cur_loc, vars)) |
7212 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2)) | 8720 && GET_CODE (loc[n_var_parts]) == GET_CODE (loc2)) |
7213 { | 8721 { |
7214 rtx new_loc = NULL; | 8722 rtx new_loc = NULL; |
7215 | 8723 |
7216 if (REG_P (loc[n_var_parts]) | 8724 if (REG_P (loc[n_var_parts]) |
7217 && hard_regno_nregs[REGNO (loc[n_var_parts])][mode] * 2 | 8725 && hard_regno_nregs (REGNO (loc[n_var_parts]), mode) * 2 |
7218 == hard_regno_nregs[REGNO (loc[n_var_parts])][wider_mode] | 8726 == hard_regno_nregs (REGNO (loc[n_var_parts]), wider_mode) |
7219 && end_hard_regno (mode, REGNO (loc[n_var_parts])) | 8727 && end_hard_regno (mode, REGNO (loc[n_var_parts])) |
7220 == REGNO (loc2)) | 8728 == REGNO (loc2)) |
7221 { | 8729 { |
7222 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN) | 8730 if (! WORDS_BIG_ENDIAN && ! BYTES_BIG_ENDIAN) |
7223 new_loc = simplify_subreg (wider_mode, loc[n_var_parts], | 8731 new_loc = simplify_subreg (wider_mode, loc[n_var_parts], |
7271 if (! flag_var_tracking_uninit) | 8779 if (! flag_var_tracking_uninit) |
7272 initialized = VAR_INIT_STATUS_INITIALIZED; | 8780 initialized = VAR_INIT_STATUS_INITIALIZED; |
7273 | 8781 |
7274 note_vl = NULL_RTX; | 8782 note_vl = NULL_RTX; |
7275 if (!complete) | 8783 if (!complete) |
7276 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX, | 8784 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, NULL_RTX, initialized); |
7277 (int) initialized); | |
7278 else if (n_var_parts == 1) | 8785 else if (n_var_parts == 1) |
7279 { | 8786 { |
7280 rtx expr_list; | 8787 rtx expr_list; |
7281 | 8788 |
7282 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL) | 8789 if (offsets[0] || GET_CODE (loc[0]) == PARALLEL) |
7283 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0])); | 8790 expr_list = gen_rtx_EXPR_LIST (VOIDmode, loc[0], GEN_INT (offsets[0])); |
7284 else | 8791 else |
7285 expr_list = loc[0]; | 8792 expr_list = loc[0]; |
7286 | 8793 |
7287 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list, | 8794 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, expr_list, initialized); |
7288 (int) initialized); | |
7289 } | 8795 } |
7290 else if (n_var_parts) | 8796 else if (n_var_parts) |
7291 { | 8797 { |
7292 rtx parallel; | 8798 rtx parallel; |
7293 | 8799 |
7296 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i])); | 8802 = gen_rtx_EXPR_LIST (VOIDmode, loc[i], GEN_INT (offsets[i])); |
7297 | 8803 |
7298 parallel = gen_rtx_PARALLEL (VOIDmode, | 8804 parallel = gen_rtx_PARALLEL (VOIDmode, |
7299 gen_rtvec_v (n_var_parts, loc)); | 8805 gen_rtvec_v (n_var_parts, loc)); |
7300 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, | 8806 note_vl = gen_rtx_VAR_LOCATION (VOIDmode, decl, |
7301 parallel, (int) initialized); | 8807 parallel, initialized); |
7302 } | 8808 } |
7303 | 8809 |
7304 if (where != EMIT_NOTE_BEFORE_INSN) | 8810 if (where != EMIT_NOTE_BEFORE_INSN) |
7305 { | 8811 { |
7306 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn); | 8812 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn); |
7310 else | 8816 else |
7311 { | 8817 { |
7312 /* Make sure that the call related notes come first. */ | 8818 /* Make sure that the call related notes come first. */ |
7313 while (NEXT_INSN (insn) | 8819 while (NEXT_INSN (insn) |
7314 && NOTE_P (insn) | 8820 && NOTE_P (insn) |
7315 && NOTE_DURING_CALL_P (insn)) | 8821 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION |
8822 && NOTE_DURING_CALL_P (insn)) | |
8823 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION)) | |
7316 insn = NEXT_INSN (insn); | 8824 insn = NEXT_INSN (insn); |
7317 if (NOTE_P (insn) && NOTE_DURING_CALL_P (insn)) | 8825 if (NOTE_P (insn) |
8826 && ((NOTE_KIND (insn) == NOTE_INSN_VAR_LOCATION | |
8827 && NOTE_DURING_CALL_P (insn)) | |
8828 || NOTE_KIND (insn) == NOTE_INSN_CALL_ARG_LOCATION)) | |
7318 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn); | 8829 note = emit_note_after (NOTE_INSN_VAR_LOCATION, insn); |
7319 else | 8830 else |
7320 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn); | 8831 note = emit_note_before (NOTE_INSN_VAR_LOCATION, insn); |
7321 } | 8832 } |
7322 NOTE_VAR_LOCATION (note) = note_vl; | 8833 NOTE_VAR_LOCATION (note) = note_vl; |
7323 | 8834 |
7324 clear: | |
7325 set_dv_changed (var->dv, false); | 8835 set_dv_changed (var->dv, false); |
7326 var->cur_loc_changed = false; | |
7327 gcc_assert (var->in_changed_variables); | 8836 gcc_assert (var->in_changed_variables); |
7328 var->in_changed_variables = false; | 8837 var->in_changed_variables = false; |
7329 htab_clear_slot (changed_variables, varp); | 8838 changed_variables->clear_slot (varp); |
7330 | 8839 |
7331 /* Continue traversing the hash table. */ | 8840 /* Continue traversing the hash table. */ |
7332 return 1; | 8841 return 1; |
7333 | 8842 } |
7334 value_or_debug_decl: | 8843 |
7335 if (dv_changed_p (var->dv) && var->n_var_parts) | 8844 /* While traversing changed_variables, push onto DATA (a stack of RTX |
7336 { | 8845 values) entries that aren't user variables. */ |
7337 location_chain lc; | 8846 |
7338 bool cur_loc_changed; | 8847 int |
7339 | 8848 var_track_values_to_stack (variable **slot, |
7340 if (var->var_part[0].cur_loc | 8849 vec<rtx, va_heap> *changed_values_stack) |
7341 && vt_expand_loc_dummy (var->var_part[0].cur_loc, vars, | 8850 { |
7342 &cur_loc_changed)) | 8851 variable *var = *slot; |
7343 goto clear; | 8852 |
7344 for (lc = var->var_part[0].loc_chain; lc; lc = lc->next) | 8853 if (var->onepart == ONEPART_VALUE) |
7345 if (lc->loc != var->var_part[0].cur_loc | 8854 changed_values_stack->safe_push (dv_as_value (var->dv)); |
7346 && vt_expand_loc_dummy (lc->loc, vars, &cur_loc_changed)) | 8855 else if (var->onepart == ONEPART_DEXPR) |
8856 changed_values_stack->safe_push (DECL_RTL_KNOWN_SET (dv_as_decl (var->dv))); | |
8857 | |
8858 return 1; | |
8859 } | |
8860 | |
8861 /* Remove from changed_variables the entry whose DV corresponds to | |
8862 value or debug_expr VAL. */ | |
8863 static void | |
8864 remove_value_from_changed_variables (rtx val) | |
8865 { | |
8866 decl_or_value dv = dv_from_rtx (val); | |
8867 variable **slot; | |
8868 variable *var; | |
8869 | |
8870 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv), | |
8871 NO_INSERT); | |
8872 var = *slot; | |
8873 var->in_changed_variables = false; | |
8874 changed_variables->clear_slot (slot); | |
8875 } | |
8876 | |
8877 /* If VAL (a value or debug_expr) has backlinks to variables actively | |
8878 dependent on it in HTAB or in CHANGED_VARIABLES, mark them as | |
8879 changed, adding to CHANGED_VALUES_STACK any dependencies that may | |
8880 have dependencies of their own to notify. */ | |
8881 | |
8882 static void | |
8883 notify_dependents_of_changed_value (rtx val, variable_table_type *htab, | |
8884 vec<rtx, va_heap> *changed_values_stack) | |
8885 { | |
8886 variable **slot; | |
8887 variable *var; | |
8888 loc_exp_dep *led; | |
8889 decl_or_value dv = dv_from_rtx (val); | |
8890 | |
8891 slot = changed_variables->find_slot_with_hash (dv, dv_htab_hash (dv), | |
8892 NO_INSERT); | |
8893 if (!slot) | |
8894 slot = htab->find_slot_with_hash (dv, dv_htab_hash (dv), NO_INSERT); | |
8895 if (!slot) | |
8896 slot = dropped_values->find_slot_with_hash (dv, dv_htab_hash (dv), | |
8897 NO_INSERT); | |
8898 var = *slot; | |
8899 | |
8900 while ((led = VAR_LOC_DEP_LST (var))) | |
8901 { | |
8902 decl_or_value ldv = led->dv; | |
8903 variable *ivar; | |
8904 | |
8905 /* Deactivate and remove the backlink, as it was “used up”. It | |
8906 makes no sense to attempt to notify the same entity again: | |
8907 either it will be recomputed and re-register an active | |
8908 dependency, or it will still have the changed mark. */ | |
8909 if (led->next) | |
8910 led->next->pprev = led->pprev; | |
8911 if (led->pprev) | |
8912 *led->pprev = led->next; | |
8913 led->next = NULL; | |
8914 led->pprev = NULL; | |
8915 | |
8916 if (dv_changed_p (ldv)) | |
8917 continue; | |
8918 | |
8919 switch (dv_onepart_p (ldv)) | |
8920 { | |
8921 case ONEPART_VALUE: | |
8922 case ONEPART_DEXPR: | |
8923 set_dv_changed (ldv, true); | |
8924 changed_values_stack->safe_push (dv_as_rtx (ldv)); | |
7347 break; | 8925 break; |
7348 var->var_part[0].cur_loc = lc ? lc->loc : NULL_RTX; | 8926 |
7349 } | 8927 case ONEPART_VDECL: |
7350 goto clear; | 8928 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv)); |
7351 } | 8929 gcc_checking_assert (!VAR_LOC_DEP_LST (ivar)); |
7352 | 8930 variable_was_changed (ivar, NULL); |
7353 DEF_VEC_P (variable); | 8931 break; |
7354 DEF_VEC_ALLOC_P (variable, heap); | 8932 |
7355 | 8933 case NOT_ONEPART: |
7356 /* Stack of variable_def pointers that need processing with | 8934 delete led; |
7357 check_changed_vars_2. */ | 8935 ivar = htab->find_with_hash (ldv, dv_htab_hash (ldv)); |
7358 | 8936 if (ivar) |
7359 static VEC (variable, heap) *changed_variables_stack; | 8937 { |
7360 | 8938 int i = ivar->n_var_parts; |
7361 /* VALUEs with no variables that need set_dv_changed (val, false) | 8939 while (i--) |
7362 called before check_changed_vars_3. */ | 8940 { |
7363 | 8941 rtx loc = ivar->var_part[i].cur_loc; |
7364 static VEC (rtx, heap) *changed_values_stack; | 8942 |
7365 | 8943 if (loc && GET_CODE (loc) == MEM |
7366 /* Helper function for check_changed_vars_1 and check_changed_vars_2. */ | 8944 && XEXP (loc, 0) == val) |
8945 { | |
8946 variable_was_changed (ivar, NULL); | |
8947 break; | |
8948 } | |
8949 } | |
8950 } | |
8951 break; | |
8952 | |
8953 default: | |
8954 gcc_unreachable (); | |
8955 } | |
8956 } | |
8957 } | |
8958 | |
8959 /* Take out of changed_variables any entries that don't refer to use | |
8960 variables. Back-propagate change notifications from values and | |
8961 debug_exprs to their active dependencies in HTAB or in | |
8962 CHANGED_VARIABLES. */ | |
7367 | 8963 |
7368 static void | 8964 static void |
7369 check_changed_vars_0 (decl_or_value dv, htab_t htab) | 8965 process_changed_values (variable_table_type *htab) |
7370 { | 8966 { |
7371 value_chain vc | 8967 int i, n; |
7372 = (value_chain) htab_find_with_hash (value_chains, dv, dv_htab_hash (dv)); | 8968 rtx val; |
7373 | 8969 auto_vec<rtx, 20> changed_values_stack; |
7374 if (vc == NULL) | 8970 |
8971 /* Move values from changed_variables to changed_values_stack. */ | |
8972 changed_variables | |
8973 ->traverse <vec<rtx, va_heap>*, var_track_values_to_stack> | |
8974 (&changed_values_stack); | |
8975 | |
8976 /* Back-propagate change notifications in values while popping | |
8977 them from the stack. */ | |
8978 for (n = i = changed_values_stack.length (); | |
8979 i > 0; i = changed_values_stack.length ()) | |
8980 { | |
8981 val = changed_values_stack.pop (); | |
8982 notify_dependents_of_changed_value (val, htab, &changed_values_stack); | |
8983 | |
8984 /* This condition will hold when visiting each of the entries | |
8985 originally in changed_variables. We can't remove them | |
8986 earlier because this could drop the backlinks before we got a | |
8987 chance to use them. */ | |
8988 if (i == n) | |
8989 { | |
8990 remove_value_from_changed_variables (val); | |
8991 n--; | |
8992 } | |
8993 } | |
8994 } | |
8995 | |
8996 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain | |
8997 CHANGED_VARIABLES and delete this chain. WHERE specifies whether | |
8998 the notes shall be emitted before of after instruction INSN. */ | |
8999 | |
9000 static void | |
9001 emit_notes_for_changes (rtx_insn *insn, enum emit_note_where where, | |
9002 shared_hash *vars) | |
9003 { | |
9004 emit_note_data data; | |
9005 variable_table_type *htab = shared_hash_htab (vars); | |
9006 | |
9007 if (!changed_variables->elements ()) | |
7375 return; | 9008 return; |
7376 for (vc = vc->next; vc; vc = vc->next) | |
7377 if (!dv_changed_p (vc->dv)) | |
7378 { | |
7379 variable vcvar | |
7380 = (variable) htab_find_with_hash (htab, vc->dv, | |
7381 dv_htab_hash (vc->dv)); | |
7382 if (vcvar) | |
7383 { | |
7384 set_dv_changed (vc->dv, true); | |
7385 VEC_safe_push (variable, heap, changed_variables_stack, vcvar); | |
7386 } | |
7387 else if (dv_is_value_p (vc->dv)) | |
7388 { | |
7389 set_dv_changed (vc->dv, true); | |
7390 VEC_safe_push (rtx, heap, changed_values_stack, | |
7391 dv_as_value (vc->dv)); | |
7392 check_changed_vars_0 (vc->dv, htab); | |
7393 } | |
7394 } | |
7395 } | |
7396 | |
7397 /* Populate changed_variables_stack with variable_def pointers | |
7398 that need variable_was_changed called on them. */ | |
7399 | |
7400 static int | |
7401 check_changed_vars_1 (void **slot, void *data) | |
7402 { | |
7403 variable var = (variable) *slot; | |
7404 htab_t htab = (htab_t) data; | |
7405 | |
7406 if (dv_is_value_p (var->dv) | |
7407 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL) | |
7408 check_changed_vars_0 (var->dv, htab); | |
7409 return 1; | |
7410 } | |
7411 | |
7412 /* Add VAR to changed_variables and also for VALUEs add recursively | |
7413 all DVs that aren't in changed_variables yet but reference the | |
7414 VALUE from its loc_chain. */ | |
7415 | |
7416 static void | |
7417 check_changed_vars_2 (variable var, htab_t htab) | |
7418 { | |
7419 variable_was_changed (var, NULL); | |
7420 if (dv_is_value_p (var->dv) | |
7421 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL) | |
7422 check_changed_vars_0 (var->dv, htab); | |
7423 } | |
7424 | |
7425 /* For each changed decl (except DEBUG_EXPR_DECLs) recompute | |
7426 cur_loc if needed (and cur_loc of all VALUEs and DEBUG_EXPR_DECLs | |
7427 it needs and are also in changed variables) and track whether | |
7428 cur_loc (or anything it uses to compute location) had to change | |
7429 during the current emit_notes_for_changes call. */ | |
7430 | |
7431 static int | |
7432 check_changed_vars_3 (void **slot, void *data) | |
7433 { | |
7434 variable var = (variable) *slot; | |
7435 htab_t vars = (htab_t) data; | |
7436 int i; | |
7437 location_chain lc; | |
7438 bool cur_loc_changed; | |
7439 | |
7440 if (dv_is_value_p (var->dv) | |
7441 || TREE_CODE (dv_as_decl (var->dv)) == DEBUG_EXPR_DECL) | |
7442 return 1; | |
7443 | |
7444 for (i = 0; i < var->n_var_parts; i++) | |
7445 { | |
7446 if (var->var_part[i].cur_loc | |
7447 && vt_expand_loc_dummy (var->var_part[i].cur_loc, vars, | |
7448 &cur_loc_changed)) | |
7449 { | |
7450 if (cur_loc_changed) | |
7451 var->cur_loc_changed = true; | |
7452 continue; | |
7453 } | |
7454 for (lc = var->var_part[i].loc_chain; lc; lc = lc->next) | |
7455 if (lc->loc != var->var_part[i].cur_loc | |
7456 && vt_expand_loc_dummy (lc->loc, vars, &cur_loc_changed)) | |
7457 break; | |
7458 if (lc || var->var_part[i].cur_loc) | |
7459 var->cur_loc_changed = true; | |
7460 var->var_part[i].cur_loc = lc ? lc->loc : NULL_RTX; | |
7461 } | |
7462 if (var->n_var_parts == 0) | |
7463 var->cur_loc_changed = true; | |
7464 return 1; | |
7465 } | |
7466 | |
7467 /* Emit NOTE_INSN_VAR_LOCATION note for each variable from a chain | |
7468 CHANGED_VARIABLES and delete this chain. WHERE specifies whether the notes | |
7469 shall be emitted before of after instruction INSN. */ | |
7470 | |
7471 static void | |
7472 emit_notes_for_changes (rtx insn, enum emit_note_where where, | |
7473 shared_hash vars) | |
7474 { | |
7475 emit_note_data data; | |
7476 htab_t htab = shared_hash_htab (vars); | |
7477 | |
7478 if (!htab_elements (changed_variables)) | |
7479 return; | |
7480 | 9009 |
7481 if (MAY_HAVE_DEBUG_INSNS) | 9010 if (MAY_HAVE_DEBUG_INSNS) |
7482 { | 9011 process_changed_values (htab); |
7483 /* Unfortunately this has to be done in two steps, because | |
7484 we can't traverse a hashtab into which we are inserting | |
7485 through variable_was_changed. */ | |
7486 htab_traverse (changed_variables, check_changed_vars_1, htab); | |
7487 while (VEC_length (variable, changed_variables_stack) > 0) | |
7488 check_changed_vars_2 (VEC_pop (variable, changed_variables_stack), | |
7489 htab); | |
7490 while (VEC_length (rtx, changed_values_stack) > 0) | |
7491 set_dv_changed (dv_from_value (VEC_pop (rtx, changed_values_stack)), | |
7492 false); | |
7493 htab_traverse (changed_variables, check_changed_vars_3, htab); | |
7494 } | |
7495 | 9012 |
7496 data.insn = insn; | 9013 data.insn = insn; |
7497 data.where = where; | 9014 data.where = where; |
7498 data.vars = htab; | 9015 data.vars = htab; |
7499 | 9016 |
7500 htab_traverse (changed_variables, emit_note_insn_var_location, &data); | 9017 changed_variables |
9018 ->traverse <emit_note_data*, emit_note_insn_var_location> (&data); | |
7501 } | 9019 } |
7502 | 9020 |
7503 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the | 9021 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it differs from the |
7504 same variable in hash table DATA or is not there at all. */ | 9022 same variable in hash table DATA or is not there at all. */ |
7505 | 9023 |
7506 static int | 9024 int |
7507 emit_notes_for_differences_1 (void **slot, void *data) | 9025 emit_notes_for_differences_1 (variable **slot, variable_table_type *new_vars) |
7508 { | 9026 { |
7509 htab_t new_vars = (htab_t) data; | 9027 variable *old_var, *new_var; |
7510 variable old_var, new_var; | 9028 |
7511 | 9029 old_var = *slot; |
7512 old_var = (variable) *slot; | 9030 new_var = new_vars->find_with_hash (old_var->dv, dv_htab_hash (old_var->dv)); |
7513 new_var = (variable) htab_find_with_hash (new_vars, old_var->dv, | |
7514 dv_htab_hash (old_var->dv)); | |
7515 | 9031 |
7516 if (!new_var) | 9032 if (!new_var) |
7517 { | 9033 { |
7518 /* Variable has disappeared. */ | 9034 /* Variable has disappeared. */ |
7519 variable empty_var; | 9035 variable *empty_var = NULL; |
7520 | 9036 |
7521 empty_var = (variable) pool_alloc (dv_pool (old_var->dv)); | 9037 if (old_var->onepart == ONEPART_VALUE |
7522 empty_var->dv = old_var->dv; | 9038 || old_var->onepart == ONEPART_DEXPR) |
7523 empty_var->refcount = 0; | 9039 { |
7524 empty_var->n_var_parts = 0; | 9040 empty_var = variable_from_dropped (old_var->dv, NO_INSERT); |
7525 empty_var->cur_loc_changed = false; | 9041 if (empty_var) |
7526 empty_var->in_changed_variables = false; | 9042 { |
7527 if (dv_onepart_p (old_var->dv)) | 9043 gcc_checking_assert (!empty_var->in_changed_variables); |
7528 { | 9044 if (!VAR_LOC_1PAUX (old_var)) |
7529 location_chain lc; | 9045 { |
7530 | 9046 VAR_LOC_1PAUX (old_var) = VAR_LOC_1PAUX (empty_var); |
7531 gcc_assert (old_var->n_var_parts == 1); | 9047 VAR_LOC_1PAUX (empty_var) = NULL; |
7532 for (lc = old_var->var_part[0].loc_chain; lc; lc = lc->next) | 9048 } |
7533 remove_value_chains (old_var->dv, lc->loc); | 9049 else |
9050 gcc_checking_assert (!VAR_LOC_1PAUX (empty_var)); | |
9051 } | |
9052 } | |
9053 | |
9054 if (!empty_var) | |
9055 { | |
9056 empty_var = onepart_pool_allocate (old_var->onepart); | |
9057 empty_var->dv = old_var->dv; | |
9058 empty_var->refcount = 0; | |
9059 empty_var->n_var_parts = 0; | |
9060 empty_var->onepart = old_var->onepart; | |
9061 empty_var->in_changed_variables = false; | |
9062 } | |
9063 | |
9064 if (empty_var->onepart) | |
9065 { | |
9066 /* Propagate the auxiliary data to (ultimately) | |
9067 changed_variables. */ | |
9068 empty_var->var_part[0].loc_chain = NULL; | |
9069 empty_var->var_part[0].cur_loc = NULL; | |
9070 VAR_LOC_1PAUX (empty_var) = VAR_LOC_1PAUX (old_var); | |
9071 VAR_LOC_1PAUX (old_var) = NULL; | |
7534 } | 9072 } |
7535 variable_was_changed (empty_var, NULL); | 9073 variable_was_changed (empty_var, NULL); |
7536 /* Continue traversing the hash table. */ | 9074 /* Continue traversing the hash table. */ |
7537 return 1; | 9075 return 1; |
7538 } | 9076 } |
9077 /* Update cur_loc and one-part auxiliary data, before new_var goes | |
9078 through variable_was_changed. */ | |
9079 if (old_var != new_var && new_var->onepart) | |
9080 { | |
9081 gcc_checking_assert (VAR_LOC_1PAUX (new_var) == NULL); | |
9082 VAR_LOC_1PAUX (new_var) = VAR_LOC_1PAUX (old_var); | |
9083 VAR_LOC_1PAUX (old_var) = NULL; | |
9084 new_var->var_part[0].cur_loc = old_var->var_part[0].cur_loc; | |
9085 } | |
7539 if (variable_different_p (old_var, new_var)) | 9086 if (variable_different_p (old_var, new_var)) |
7540 { | 9087 variable_was_changed (new_var, NULL); |
7541 if (dv_onepart_p (old_var->dv)) | |
7542 { | |
7543 location_chain lc1, lc2; | |
7544 | |
7545 gcc_assert (old_var->n_var_parts == 1 | |
7546 && new_var->n_var_parts == 1); | |
7547 lc1 = old_var->var_part[0].loc_chain; | |
7548 lc2 = new_var->var_part[0].loc_chain; | |
7549 while (lc1 | |
7550 && lc2 | |
7551 && ((REG_P (lc1->loc) && REG_P (lc2->loc)) | |
7552 || rtx_equal_p (lc1->loc, lc2->loc))) | |
7553 { | |
7554 lc1 = lc1->next; | |
7555 lc2 = lc2->next; | |
7556 } | |
7557 for (; lc2; lc2 = lc2->next) | |
7558 add_value_chains (old_var->dv, lc2->loc); | |
7559 for (; lc1; lc1 = lc1->next) | |
7560 remove_value_chains (old_var->dv, lc1->loc); | |
7561 } | |
7562 variable_was_changed (new_var, NULL); | |
7563 } | |
7564 /* Update cur_loc. */ | |
7565 if (old_var != new_var) | |
7566 { | |
7567 int i; | |
7568 for (i = 0; i < new_var->n_var_parts; i++) | |
7569 { | |
7570 new_var->var_part[i].cur_loc = NULL; | |
7571 if (old_var->n_var_parts != new_var->n_var_parts | |
7572 || old_var->var_part[i].offset != new_var->var_part[i].offset) | |
7573 new_var->cur_loc_changed = true; | |
7574 else if (old_var->var_part[i].cur_loc != NULL) | |
7575 { | |
7576 location_chain lc; | |
7577 rtx cur_loc = old_var->var_part[i].cur_loc; | |
7578 | |
7579 for (lc = new_var->var_part[i].loc_chain; lc; lc = lc->next) | |
7580 if (lc->loc == cur_loc | |
7581 || rtx_equal_p (cur_loc, lc->loc)) | |
7582 { | |
7583 new_var->var_part[i].cur_loc = lc->loc; | |
7584 break; | |
7585 } | |
7586 if (lc == NULL) | |
7587 new_var->cur_loc_changed = true; | |
7588 } | |
7589 } | |
7590 } | |
7591 | 9088 |
7592 /* Continue traversing the hash table. */ | 9089 /* Continue traversing the hash table. */ |
7593 return 1; | 9090 return 1; |
7594 } | 9091 } |
7595 | 9092 |
7596 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash | 9093 /* Add variable *SLOT to the chain CHANGED_VARIABLES if it is not in hash |
7597 table DATA. */ | 9094 table DATA. */ |
7598 | 9095 |
7599 static int | 9096 int |
7600 emit_notes_for_differences_2 (void **slot, void *data) | 9097 emit_notes_for_differences_2 (variable **slot, variable_table_type *old_vars) |
7601 { | 9098 { |
7602 htab_t old_vars = (htab_t) data; | 9099 variable *old_var, *new_var; |
7603 variable old_var, new_var; | 9100 |
7604 | 9101 new_var = *slot; |
7605 new_var = (variable) *slot; | 9102 old_var = old_vars->find_with_hash (new_var->dv, dv_htab_hash (new_var->dv)); |
7606 old_var = (variable) htab_find_with_hash (old_vars, new_var->dv, | |
7607 dv_htab_hash (new_var->dv)); | |
7608 if (!old_var) | 9103 if (!old_var) |
7609 { | 9104 { |
7610 int i; | 9105 int i; |
7611 /* Variable has appeared. */ | |
7612 if (dv_onepart_p (new_var->dv)) | |
7613 { | |
7614 location_chain lc; | |
7615 | |
7616 gcc_assert (new_var->n_var_parts == 1); | |
7617 for (lc = new_var->var_part[0].loc_chain; lc; lc = lc->next) | |
7618 add_value_chains (new_var->dv, lc->loc); | |
7619 } | |
7620 for (i = 0; i < new_var->n_var_parts; i++) | 9106 for (i = 0; i < new_var->n_var_parts; i++) |
7621 new_var->var_part[i].cur_loc = NULL; | 9107 new_var->var_part[i].cur_loc = NULL; |
7622 variable_was_changed (new_var, NULL); | 9108 variable_was_changed (new_var, NULL); |
7623 } | 9109 } |
7624 | 9110 |
7628 | 9114 |
7629 /* Emit notes before INSN for differences between dataflow sets OLD_SET and | 9115 /* Emit notes before INSN for differences between dataflow sets OLD_SET and |
7630 NEW_SET. */ | 9116 NEW_SET. */ |
7631 | 9117 |
7632 static void | 9118 static void |
7633 emit_notes_for_differences (rtx insn, dataflow_set *old_set, | 9119 emit_notes_for_differences (rtx_insn *insn, dataflow_set *old_set, |
7634 dataflow_set *new_set) | 9120 dataflow_set *new_set) |
7635 { | 9121 { |
7636 htab_traverse (shared_hash_htab (old_set->vars), | 9122 shared_hash_htab (old_set->vars) |
7637 emit_notes_for_differences_1, | 9123 ->traverse <variable_table_type *, emit_notes_for_differences_1> |
7638 shared_hash_htab (new_set->vars)); | 9124 (shared_hash_htab (new_set->vars)); |
7639 htab_traverse (shared_hash_htab (new_set->vars), | 9125 shared_hash_htab (new_set->vars) |
7640 emit_notes_for_differences_2, | 9126 ->traverse <variable_table_type *, emit_notes_for_differences_2> |
7641 shared_hash_htab (old_set->vars)); | 9127 (shared_hash_htab (old_set->vars)); |
7642 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars); | 9128 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, new_set->vars); |
9129 } | |
9130 | |
9131 /* Return the next insn after INSN that is not a NOTE_INSN_VAR_LOCATION. */ | |
9132 | |
9133 static rtx_insn * | |
9134 next_non_note_insn_var_location (rtx_insn *insn) | |
9135 { | |
9136 while (insn) | |
9137 { | |
9138 insn = NEXT_INSN (insn); | |
9139 if (insn == 0 | |
9140 || !NOTE_P (insn) | |
9141 || NOTE_KIND (insn) != NOTE_INSN_VAR_LOCATION) | |
9142 break; | |
9143 } | |
9144 | |
9145 return insn; | |
7643 } | 9146 } |
7644 | 9147 |
7645 /* Emit the notes for changes of location parts in the basic block BB. */ | 9148 /* Emit the notes for changes of location parts in the basic block BB. */ |
7646 | 9149 |
7647 static void | 9150 static void |
7651 micro_operation *mo; | 9154 micro_operation *mo; |
7652 | 9155 |
7653 dataflow_set_clear (set); | 9156 dataflow_set_clear (set); |
7654 dataflow_set_copy (set, &VTI (bb)->in); | 9157 dataflow_set_copy (set, &VTI (bb)->in); |
7655 | 9158 |
7656 FOR_EACH_VEC_ELT (micro_operation, VTI (bb)->mos, i, mo) | 9159 FOR_EACH_VEC_ELT (VTI (bb)->mos, i, mo) |
7657 { | 9160 { |
7658 rtx insn = mo->insn; | 9161 rtx_insn *insn = mo->insn; |
9162 rtx_insn *next_insn = next_non_note_insn_var_location (insn); | |
7659 | 9163 |
7660 switch (mo->type) | 9164 switch (mo->type) |
7661 { | 9165 { |
7662 case MO_CALL: | 9166 case MO_CALL: |
7663 dataflow_set_clear_at_call (set); | 9167 dataflow_set_clear_at_call (set, insn); |
7664 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars); | 9168 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_CALL_INSN, set->vars); |
9169 { | |
9170 rtx arguments = mo->u.loc, *p = &arguments; | |
9171 rtx_note *note; | |
9172 while (*p) | |
9173 { | |
9174 XEXP (XEXP (*p, 0), 1) | |
9175 = vt_expand_loc (XEXP (XEXP (*p, 0), 1), | |
9176 shared_hash_htab (set->vars)); | |
9177 /* If expansion is successful, keep it in the list. */ | |
9178 if (XEXP (XEXP (*p, 0), 1)) | |
9179 p = &XEXP (*p, 1); | |
9180 /* Otherwise, if the following item is data_value for it, | |
9181 drop it too too. */ | |
9182 else if (XEXP (*p, 1) | |
9183 && REG_P (XEXP (XEXP (*p, 0), 0)) | |
9184 && MEM_P (XEXP (XEXP (XEXP (*p, 1), 0), 0)) | |
9185 && REG_P (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), 0), | |
9186 0)) | |
9187 && REGNO (XEXP (XEXP (*p, 0), 0)) | |
9188 == REGNO (XEXP (XEXP (XEXP (XEXP (*p, 1), 0), | |
9189 0), 0))) | |
9190 *p = XEXP (XEXP (*p, 1), 1); | |
9191 /* Just drop this item. */ | |
9192 else | |
9193 *p = XEXP (*p, 1); | |
9194 } | |
9195 note = emit_note_after (NOTE_INSN_CALL_ARG_LOCATION, insn); | |
9196 NOTE_VAR_LOCATION (note) = arguments; | |
9197 } | |
7665 break; | 9198 break; |
7666 | 9199 |
7667 case MO_USE: | 9200 case MO_USE: |
7668 { | 9201 { |
7669 rtx loc = mo->u.loc; | 9202 rtx loc = mo->u.loc; |
7671 if (REG_P (loc)) | 9204 if (REG_P (loc)) |
7672 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL); | 9205 var_reg_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL); |
7673 else | 9206 else |
7674 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL); | 9207 var_mem_set (set, loc, VAR_INIT_STATUS_UNINITIALIZED, NULL); |
7675 | 9208 |
7676 emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN, set->vars); | 9209 emit_notes_for_changes (insn, EMIT_NOTE_BEFORE_INSN, set->vars); |
7677 } | 9210 } |
7678 break; | 9211 break; |
7679 | 9212 |
7680 case MO_VAL_LOC: | 9213 case MO_VAL_LOC: |
7681 { | 9214 { |
7750 break; | 9283 break; |
7751 | 9284 |
7752 case MO_VAL_SET: | 9285 case MO_VAL_SET: |
7753 { | 9286 { |
7754 rtx loc = mo->u.loc; | 9287 rtx loc = mo->u.loc; |
7755 rtx val, vloc, uloc, reverse = NULL_RTX; | 9288 rtx val, vloc, uloc; |
9289 rtx dstv, srcv; | |
7756 | 9290 |
7757 vloc = loc; | 9291 vloc = loc; |
7758 if (VAL_EXPR_HAS_REVERSE (loc)) | |
7759 { | |
7760 reverse = XEXP (loc, 1); | |
7761 vloc = XEXP (loc, 0); | |
7762 } | |
7763 uloc = XEXP (vloc, 1); | 9292 uloc = XEXP (vloc, 1); |
7764 val = XEXP (vloc, 0); | 9293 val = XEXP (vloc, 0); |
7765 vloc = uloc; | 9294 vloc = uloc; |
7766 | 9295 |
9296 if (GET_CODE (uloc) == SET) | |
9297 { | |
9298 dstv = SET_DEST (uloc); | |
9299 srcv = SET_SRC (uloc); | |
9300 } | |
9301 else | |
9302 { | |
9303 dstv = uloc; | |
9304 srcv = NULL; | |
9305 } | |
9306 | |
7767 if (GET_CODE (val) == CONCAT) | 9307 if (GET_CODE (val) == CONCAT) |
7768 { | 9308 { |
7769 vloc = XEXP (val, 1); | 9309 dstv = vloc = XEXP (val, 1); |
7770 val = XEXP (val, 0); | 9310 val = XEXP (val, 0); |
7771 } | 9311 } |
7772 | 9312 |
7773 if (GET_CODE (vloc) == SET) | 9313 if (GET_CODE (vloc) == SET) |
7774 { | 9314 { |
7775 rtx vsrc = SET_SRC (vloc); | 9315 srcv = SET_SRC (vloc); |
7776 | 9316 |
7777 gcc_assert (val != vsrc); | 9317 gcc_assert (val != srcv); |
7778 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc)); | 9318 gcc_assert (vloc == uloc || VAL_NEEDS_RESOLUTION (loc)); |
7779 | 9319 |
7780 vloc = SET_DEST (vloc); | 9320 dstv = vloc = SET_DEST (vloc); |
7781 | 9321 |
7782 if (VAL_NEEDS_RESOLUTION (loc)) | 9322 if (VAL_NEEDS_RESOLUTION (loc)) |
7783 val_resolve (set, val, vsrc, insn); | 9323 val_resolve (set, val, srcv, insn); |
7784 } | 9324 } |
7785 else if (VAL_NEEDS_RESOLUTION (loc)) | 9325 else if (VAL_NEEDS_RESOLUTION (loc)) |
7786 { | 9326 { |
7787 gcc_assert (GET_CODE (uloc) == SET | 9327 gcc_assert (GET_CODE (uloc) == SET |
7788 && GET_CODE (SET_SRC (uloc)) == REG); | 9328 && GET_CODE (SET_SRC (uloc)) == REG); |
7794 if (VAL_EXPR_IS_CLOBBERED (loc)) | 9334 if (VAL_EXPR_IS_CLOBBERED (loc)) |
7795 { | 9335 { |
7796 if (REG_P (uloc)) | 9336 if (REG_P (uloc)) |
7797 var_reg_delete (set, uloc, true); | 9337 var_reg_delete (set, uloc, true); |
7798 else if (MEM_P (uloc)) | 9338 else if (MEM_P (uloc)) |
7799 var_mem_delete (set, uloc, true); | 9339 { |
9340 gcc_assert (MEM_P (dstv)); | |
9341 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (uloc)); | |
9342 var_mem_delete (set, dstv, true); | |
9343 } | |
7800 } | 9344 } |
7801 else | 9345 else |
7802 { | 9346 { |
7803 bool copied_p = VAL_EXPR_IS_COPIED (loc); | 9347 bool copied_p = VAL_EXPR_IS_COPIED (loc); |
7804 rtx set_src = NULL; | 9348 rtx src = NULL, dst = uloc; |
7805 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED; | 9349 enum var_init_status status = VAR_INIT_STATUS_INITIALIZED; |
7806 | 9350 |
7807 if (GET_CODE (uloc) == SET) | 9351 if (GET_CODE (uloc) == SET) |
7808 { | 9352 { |
7809 set_src = SET_SRC (uloc); | 9353 src = SET_SRC (uloc); |
7810 uloc = SET_DEST (uloc); | 9354 dst = SET_DEST (uloc); |
7811 } | 9355 } |
7812 | 9356 |
7813 if (copied_p) | 9357 if (copied_p) |
7814 { | 9358 { |
7815 status = find_src_status (set, set_src); | 9359 status = find_src_status (set, src); |
7816 | 9360 |
7817 set_src = find_src_set_src (set, set_src); | 9361 src = find_src_set_src (set, src); |
7818 } | 9362 } |
7819 | 9363 |
7820 if (REG_P (uloc)) | 9364 if (REG_P (dst)) |
7821 var_reg_delete_and_set (set, uloc, !copied_p, | 9365 var_reg_delete_and_set (set, dst, !copied_p, |
7822 status, set_src); | 9366 status, srcv); |
7823 else if (MEM_P (uloc)) | 9367 else if (MEM_P (dst)) |
7824 var_mem_delete_and_set (set, uloc, !copied_p, | 9368 { |
7825 status, set_src); | 9369 gcc_assert (MEM_P (dstv)); |
9370 gcc_assert (MEM_ATTRS (dstv) == MEM_ATTRS (dst)); | |
9371 var_mem_delete_and_set (set, dstv, !copied_p, | |
9372 status, srcv); | |
9373 } | |
7826 } | 9374 } |
7827 } | 9375 } |
7828 else if (REG_P (uloc)) | 9376 else if (REG_P (uloc)) |
7829 var_regno_delete (set, REGNO (uloc)); | 9377 var_regno_delete (set, REGNO (uloc)); |
7830 | 9378 else if (MEM_P (uloc)) |
7831 val_store (set, val, vloc, insn, true); | 9379 { |
7832 | 9380 gcc_checking_assert (GET_CODE (vloc) == MEM); |
7833 if (reverse) | 9381 gcc_checking_assert (vloc == dstv); |
7834 val_store (set, XEXP (reverse, 0), XEXP (reverse, 1), | 9382 if (vloc != dstv) |
7835 insn, false); | 9383 clobber_overlapping_mems (set, vloc); |
7836 | 9384 } |
7837 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN, | 9385 |
9386 val_store (set, val, dstv, insn, true); | |
9387 | |
9388 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN, | |
7838 set->vars); | 9389 set->vars); |
7839 } | 9390 } |
7840 break; | 9391 break; |
7841 | 9392 |
7842 case MO_SET: | 9393 case MO_SET: |
7855 set_src); | 9406 set_src); |
7856 else | 9407 else |
7857 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED, | 9408 var_mem_delete_and_set (set, loc, true, VAR_INIT_STATUS_INITIALIZED, |
7858 set_src); | 9409 set_src); |
7859 | 9410 |
7860 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN, | 9411 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN, |
7861 set->vars); | 9412 set->vars); |
7862 } | 9413 } |
7863 break; | 9414 break; |
7864 | 9415 |
7865 case MO_COPY: | 9416 case MO_COPY: |
7880 if (REG_P (loc)) | 9431 if (REG_P (loc)) |
7881 var_reg_delete_and_set (set, loc, false, src_status, set_src); | 9432 var_reg_delete_and_set (set, loc, false, src_status, set_src); |
7882 else | 9433 else |
7883 var_mem_delete_and_set (set, loc, false, src_status, set_src); | 9434 var_mem_delete_and_set (set, loc, false, src_status, set_src); |
7884 | 9435 |
7885 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN, | 9436 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN, |
7886 set->vars); | 9437 set->vars); |
7887 } | 9438 } |
7888 break; | 9439 break; |
7889 | 9440 |
7890 case MO_USE_NO_VAR: | 9441 case MO_USE_NO_VAR: |
7907 if (REG_P (loc)) | 9458 if (REG_P (loc)) |
7908 var_reg_delete (set, loc, true); | 9459 var_reg_delete (set, loc, true); |
7909 else | 9460 else |
7910 var_mem_delete (set, loc, true); | 9461 var_mem_delete (set, loc, true); |
7911 | 9462 |
7912 emit_notes_for_changes (NEXT_INSN (insn), EMIT_NOTE_BEFORE_INSN, | 9463 emit_notes_for_changes (next_insn, EMIT_NOTE_BEFORE_INSN, |
7913 set->vars); | 9464 set->vars); |
7914 } | 9465 } |
7915 break; | 9466 break; |
7916 | 9467 |
7917 case MO_ADJUST: | 9468 case MO_ADJUST: |
7927 vt_emit_notes (void) | 9478 vt_emit_notes (void) |
7928 { | 9479 { |
7929 basic_block bb; | 9480 basic_block bb; |
7930 dataflow_set cur; | 9481 dataflow_set cur; |
7931 | 9482 |
7932 gcc_assert (!htab_elements (changed_variables)); | 9483 gcc_assert (!changed_variables->elements ()); |
7933 | 9484 |
7934 /* Free memory occupied by the out hash tables, as they aren't used | 9485 /* Free memory occupied by the out hash tables, as they aren't used |
7935 anymore. */ | 9486 anymore. */ |
7936 FOR_EACH_BB (bb) | 9487 FOR_EACH_BB_FN (bb, cfun) |
7937 dataflow_set_clear (&VTI (bb)->out); | 9488 dataflow_set_clear (&VTI (bb)->out); |
7938 | 9489 |
7939 /* Enable emitting notes by functions (mainly by set_variable_part and | 9490 /* Enable emitting notes by functions (mainly by set_variable_part and |
7940 delete_variable_part). */ | 9491 delete_variable_part). */ |
7941 emit_notes = true; | 9492 emit_notes = true; |
7942 | 9493 |
7943 if (MAY_HAVE_DEBUG_INSNS) | 9494 if (MAY_HAVE_DEBUG_INSNS) |
7944 { | 9495 { |
7945 unsigned int i; | 9496 dropped_values = new variable_table_type (cselib_get_next_uid () * 2); |
7946 rtx val; | |
7947 | |
7948 FOR_EACH_VEC_ELT (rtx, preserved_values, i, val) | |
7949 add_cselib_value_chains (dv_from_value (val)); | |
7950 changed_variables_stack = VEC_alloc (variable, heap, 40); | |
7951 changed_values_stack = VEC_alloc (rtx, heap, 40); | |
7952 } | 9497 } |
7953 | 9498 |
7954 dataflow_set_init (&cur); | 9499 dataflow_set_init (&cur); |
7955 | 9500 |
7956 FOR_EACH_BB (bb) | 9501 FOR_EACH_BB_FN (bb, cfun) |
7957 { | 9502 { |
7958 /* Emit the notes for changes of variable locations between two | 9503 /* Emit the notes for changes of variable locations between two |
7959 subsequent basic blocks. */ | 9504 subsequent basic blocks. */ |
7960 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in); | 9505 emit_notes_for_differences (BB_HEAD (bb), &cur, &VTI (bb)->in); |
7961 | 9506 |
9507 if (MAY_HAVE_DEBUG_INSNS) | |
9508 local_get_addr_cache = new hash_map<rtx, rtx>; | |
9509 | |
7962 /* Emit the notes for the changes in the basic block itself. */ | 9510 /* Emit the notes for the changes in the basic block itself. */ |
7963 emit_notes_in_bb (bb, &cur); | 9511 emit_notes_in_bb (bb, &cur); |
9512 | |
9513 if (MAY_HAVE_DEBUG_INSNS) | |
9514 delete local_get_addr_cache; | |
9515 local_get_addr_cache = NULL; | |
7964 | 9516 |
7965 /* Free memory occupied by the in hash table, we won't need it | 9517 /* Free memory occupied by the in hash table, we won't need it |
7966 again. */ | 9518 again. */ |
7967 dataflow_set_clear (&VTI (bb)->in); | 9519 dataflow_set_clear (&VTI (bb)->in); |
7968 } | 9520 } |
7969 #ifdef ENABLE_CHECKING | 9521 |
7970 htab_traverse (shared_hash_htab (cur.vars), | 9522 if (flag_checking) |
7971 emit_notes_for_differences_1, | 9523 shared_hash_htab (cur.vars) |
7972 shared_hash_htab (empty_shared_hash)); | 9524 ->traverse <variable_table_type *, emit_notes_for_differences_1> |
9525 (shared_hash_htab (empty_shared_hash)); | |
9526 | |
9527 dataflow_set_destroy (&cur); | |
9528 | |
7973 if (MAY_HAVE_DEBUG_INSNS) | 9529 if (MAY_HAVE_DEBUG_INSNS) |
7974 { | 9530 delete dropped_values; |
7975 unsigned int i; | 9531 dropped_values = NULL; |
7976 rtx val; | |
7977 | |
7978 FOR_EACH_VEC_ELT (rtx, preserved_values, i, val) | |
7979 remove_cselib_value_chains (dv_from_value (val)); | |
7980 gcc_assert (htab_elements (value_chains) == 0); | |
7981 } | |
7982 #endif | |
7983 dataflow_set_destroy (&cur); | |
7984 | |
7985 if (MAY_HAVE_DEBUG_INSNS) | |
7986 { | |
7987 VEC_free (variable, heap, changed_variables_stack); | |
7988 VEC_free (rtx, heap, changed_values_stack); | |
7989 } | |
7990 | 9532 |
7991 emit_notes = false; | 9533 emit_notes = false; |
7992 } | 9534 } |
7993 | 9535 |
7994 /* If there is a declaration and offset associated with register/memory RTL | 9536 /* If there is a declaration and offset associated with register/memory RTL |
8004 *declp = REG_EXPR (rtl); | 9546 *declp = REG_EXPR (rtl); |
8005 *offsetp = REG_OFFSET (rtl); | 9547 *offsetp = REG_OFFSET (rtl); |
8006 return true; | 9548 return true; |
8007 } | 9549 } |
8008 } | 9550 } |
9551 else if (GET_CODE (rtl) == PARALLEL) | |
9552 { | |
9553 tree decl = NULL_TREE; | |
9554 HOST_WIDE_INT offset = MAX_VAR_PARTS; | |
9555 int len = XVECLEN (rtl, 0), i; | |
9556 | |
9557 for (i = 0; i < len; i++) | |
9558 { | |
9559 rtx reg = XEXP (XVECEXP (rtl, 0, i), 0); | |
9560 if (!REG_P (reg) || !REG_ATTRS (reg)) | |
9561 break; | |
9562 if (!decl) | |
9563 decl = REG_EXPR (reg); | |
9564 if (REG_EXPR (reg) != decl) | |
9565 break; | |
9566 if (REG_OFFSET (reg) < offset) | |
9567 offset = REG_OFFSET (reg); | |
9568 } | |
9569 | |
9570 if (i == len) | |
9571 { | |
9572 *declp = decl; | |
9573 *offsetp = offset; | |
9574 return true; | |
9575 } | |
9576 } | |
8009 else if (MEM_P (rtl)) | 9577 else if (MEM_P (rtl)) |
8010 { | 9578 { |
8011 if (MEM_ATTRS (rtl)) | 9579 if (MEM_ATTRS (rtl)) |
8012 { | 9580 { |
8013 *declp = MEM_EXPR (rtl); | 9581 *declp = MEM_EXPR (rtl); |
8016 } | 9584 } |
8017 } | 9585 } |
8018 return false; | 9586 return false; |
8019 } | 9587 } |
8020 | 9588 |
9589 /* Record the value for the ENTRY_VALUE of RTL as a global equivalence | |
9590 of VAL. */ | |
9591 | |
9592 static void | |
9593 record_entry_value (cselib_val *val, rtx rtl) | |
9594 { | |
9595 rtx ev = gen_rtx_ENTRY_VALUE (GET_MODE (rtl)); | |
9596 | |
9597 ENTRY_VALUE_EXP (ev) = rtl; | |
9598 | |
9599 cselib_add_permanent_equiv (val, ev, get_insns ()); | |
9600 } | |
9601 | |
8021 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */ | 9602 /* Insert function parameter PARM in IN and OUT sets of ENTRY_BLOCK. */ |
8022 | 9603 |
8023 static void | 9604 static void |
8024 vt_add_function_parameter (tree parm) | 9605 vt_add_function_parameter (tree parm) |
8025 { | 9606 { |
8026 rtx decl_rtl = DECL_RTL_IF_SET (parm); | 9607 rtx decl_rtl = DECL_RTL_IF_SET (parm); |
8027 rtx incoming = DECL_INCOMING_RTL (parm); | 9608 rtx incoming = DECL_INCOMING_RTL (parm); |
8028 tree decl; | 9609 tree decl; |
8029 enum machine_mode mode; | 9610 machine_mode mode; |
8030 HOST_WIDE_INT offset; | 9611 HOST_WIDE_INT offset; |
8031 dataflow_set *out; | 9612 dataflow_set *out; |
8032 decl_or_value dv; | 9613 decl_or_value dv; |
8033 | 9614 |
8034 if (TREE_CODE (parm) != PARM_DECL) | 9615 if (TREE_CODE (parm) != PARM_DECL) |
8038 return; | 9619 return; |
8039 | 9620 |
8040 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode) | 9621 if (GET_MODE (decl_rtl) == BLKmode || GET_MODE (incoming) == BLKmode) |
8041 return; | 9622 return; |
8042 | 9623 |
9624 /* If there is a DRAP register or a pseudo in internal_arg_pointer, | |
9625 rewrite the incoming location of parameters passed on the stack | |
9626 into MEMs based on the argument pointer, so that incoming doesn't | |
9627 depend on a pseudo. */ | |
9628 if (MEM_P (incoming) | |
9629 && (XEXP (incoming, 0) == crtl->args.internal_arg_pointer | |
9630 || (GET_CODE (XEXP (incoming, 0)) == PLUS | |
9631 && XEXP (XEXP (incoming, 0), 0) | |
9632 == crtl->args.internal_arg_pointer | |
9633 && CONST_INT_P (XEXP (XEXP (incoming, 0), 1))))) | |
9634 { | |
9635 HOST_WIDE_INT off = -FIRST_PARM_OFFSET (current_function_decl); | |
9636 if (GET_CODE (XEXP (incoming, 0)) == PLUS) | |
9637 off += INTVAL (XEXP (XEXP (incoming, 0), 1)); | |
9638 incoming | |
9639 = replace_equiv_address_nv (incoming, | |
9640 plus_constant (Pmode, | |
9641 arg_pointer_rtx, off)); | |
9642 } | |
9643 | |
9644 #ifdef HAVE_window_save | |
9645 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers. | |
9646 If the target machine has an explicit window save instruction, the | |
9647 actual entry value is the corresponding OUTGOING_REGNO instead. */ | |
9648 if (HAVE_window_save && !crtl->uses_only_leaf_regs) | |
9649 { | |
9650 if (REG_P (incoming) | |
9651 && HARD_REGISTER_P (incoming) | |
9652 && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming)) | |
9653 { | |
9654 parm_reg p; | |
9655 p.incoming = incoming; | |
9656 incoming | |
9657 = gen_rtx_REG_offset (incoming, GET_MODE (incoming), | |
9658 OUTGOING_REGNO (REGNO (incoming)), 0); | |
9659 p.outgoing = incoming; | |
9660 vec_safe_push (windowed_parm_regs, p); | |
9661 } | |
9662 else if (GET_CODE (incoming) == PARALLEL) | |
9663 { | |
9664 rtx outgoing | |
9665 = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (XVECLEN (incoming, 0))); | |
9666 int i; | |
9667 | |
9668 for (i = 0; i < XVECLEN (incoming, 0); i++) | |
9669 { | |
9670 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0); | |
9671 parm_reg p; | |
9672 p.incoming = reg; | |
9673 reg = gen_rtx_REG_offset (reg, GET_MODE (reg), | |
9674 OUTGOING_REGNO (REGNO (reg)), 0); | |
9675 p.outgoing = reg; | |
9676 XVECEXP (outgoing, 0, i) | |
9677 = gen_rtx_EXPR_LIST (VOIDmode, reg, | |
9678 XEXP (XVECEXP (incoming, 0, i), 1)); | |
9679 vec_safe_push (windowed_parm_regs, p); | |
9680 } | |
9681 | |
9682 incoming = outgoing; | |
9683 } | |
9684 else if (MEM_P (incoming) | |
9685 && REG_P (XEXP (incoming, 0)) | |
9686 && HARD_REGISTER_P (XEXP (incoming, 0))) | |
9687 { | |
9688 rtx reg = XEXP (incoming, 0); | |
9689 if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg)) | |
9690 { | |
9691 parm_reg p; | |
9692 p.incoming = reg; | |
9693 reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg))); | |
9694 p.outgoing = reg; | |
9695 vec_safe_push (windowed_parm_regs, p); | |
9696 incoming = replace_equiv_address_nv (incoming, reg); | |
9697 } | |
9698 } | |
9699 } | |
9700 #endif | |
9701 | |
8043 if (!vt_get_decl_and_offset (incoming, &decl, &offset)) | 9702 if (!vt_get_decl_and_offset (incoming, &decl, &offset)) |
8044 { | 9703 { |
8045 if (REG_P (incoming) || MEM_P (incoming)) | 9704 if (MEM_P (incoming)) |
8046 { | 9705 { |
8047 /* This means argument is passed by invisible reference. */ | 9706 /* This means argument is passed by invisible reference. */ |
8048 offset = 0; | 9707 offset = 0; |
8049 decl = parm; | 9708 decl = parm; |
8050 incoming = gen_rtx_MEM (GET_MODE (decl_rtl), incoming); | |
8051 } | 9709 } |
8052 else | 9710 else |
8053 { | 9711 { |
8054 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset)) | 9712 if (!vt_get_decl_and_offset (decl_rtl, &decl, &offset)) |
8055 return; | 9713 return; |
8061 if (!decl) | 9719 if (!decl) |
8062 return; | 9720 return; |
8063 | 9721 |
8064 if (parm != decl) | 9722 if (parm != decl) |
8065 { | 9723 { |
8066 /* Assume that DECL_RTL was a pseudo that got spilled to | 9724 /* If that DECL_RTL wasn't a pseudo that got spilled to |
8067 memory. The spill slot sharing code will force the | 9725 memory, bail out. Otherwise, the spill slot sharing code |
8068 memory to reference spill_slot_decl (%sfp), so we don't | 9726 will force the memory to reference spill_slot_decl (%sfp), |
8069 match above. That's ok, the pseudo must have referenced | 9727 so we don't match above. That's ok, the pseudo must have |
8070 the entire parameter, so just reset OFFSET. */ | 9728 referenced the entire parameter, so just reset OFFSET. */ |
8071 gcc_assert (decl == get_spill_slot_decl (false)); | 9729 if (decl != get_spill_slot_decl (false)) |
9730 return; | |
8072 offset = 0; | 9731 offset = 0; |
8073 } | 9732 } |
8074 | 9733 |
8075 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset)) | 9734 if (!track_loc_p (incoming, parm, offset, false, &mode, &offset)) |
8076 return; | 9735 return; |
8077 | 9736 |
8078 out = &VTI (ENTRY_BLOCK_PTR)->out; | 9737 out = &VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->out; |
8079 | 9738 |
8080 dv = dv_from_decl (parm); | 9739 dv = dv_from_decl (parm); |
8081 | 9740 |
8082 if (target_for_debug_bind (parm) | 9741 if (target_for_debug_bind (parm) |
8083 /* We can't deal with these right now, because this kind of | 9742 /* We can't deal with these right now, because this kind of |
8085 that describe multiple locations for the same single | 9744 that describe multiple locations for the same single |
8086 value, but ATM we don't. */ | 9745 value, but ATM we don't. */ |
8087 && GET_CODE (incoming) != PARALLEL) | 9746 && GET_CODE (incoming) != PARALLEL) |
8088 { | 9747 { |
8089 cselib_val *val; | 9748 cselib_val *val; |
9749 rtx lowpart; | |
8090 | 9750 |
8091 /* ??? We shouldn't ever hit this, but it may happen because | 9751 /* ??? We shouldn't ever hit this, but it may happen because |
8092 arguments passed by invisible reference aren't dealt with | 9752 arguments passed by invisible reference aren't dealt with |
8093 above: incoming-rtl will have Pmode rather than the | 9753 above: incoming-rtl will have Pmode rather than the |
8094 expected mode for the type. */ | 9754 expected mode for the type. */ |
8095 if (offset) | 9755 if (offset) |
8096 return; | 9756 return; |
8097 | 9757 |
8098 val = cselib_lookup (var_lowpart (mode, incoming), mode, true, | 9758 lowpart = var_lowpart (mode, incoming); |
8099 VOIDmode); | 9759 if (!lowpart) |
9760 return; | |
9761 | |
9762 val = cselib_lookup_from_insn (lowpart, mode, true, | |
9763 VOIDmode, get_insns ()); | |
8100 | 9764 |
8101 /* ??? Float-typed values in memory are not handled by | 9765 /* ??? Float-typed values in memory are not handled by |
8102 cselib. */ | 9766 cselib. */ |
8103 if (val) | 9767 if (val) |
8104 { | 9768 { |
8105 preserve_value (val); | 9769 preserve_value (val); |
8106 set_variable_part (out, val->val_rtx, dv, offset, | 9770 set_variable_part (out, val->val_rtx, dv, offset, |
8107 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT); | 9771 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT); |
8108 dv = dv_from_value (val->val_rtx); | 9772 dv = dv_from_value (val->val_rtx); |
9773 } | |
9774 | |
9775 if (MEM_P (incoming)) | |
9776 { | |
9777 val = cselib_lookup_from_insn (XEXP (incoming, 0), mode, true, | |
9778 VOIDmode, get_insns ()); | |
9779 if (val) | |
9780 { | |
9781 preserve_value (val); | |
9782 incoming = replace_equiv_address_nv (incoming, val->val_rtx); | |
9783 } | |
8109 } | 9784 } |
8110 } | 9785 } |
8111 | 9786 |
8112 if (REG_P (incoming)) | 9787 if (REG_P (incoming)) |
8113 { | 9788 { |
8115 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER); | 9790 gcc_assert (REGNO (incoming) < FIRST_PSEUDO_REGISTER); |
8116 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset, | 9791 attrs_list_insert (&out->regs[REGNO (incoming)], dv, offset, |
8117 incoming); | 9792 incoming); |
8118 set_variable_part (out, incoming, dv, offset, | 9793 set_variable_part (out, incoming, dv, offset, |
8119 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT); | 9794 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT); |
9795 if (dv_is_value_p (dv)) | |
9796 { | |
9797 record_entry_value (CSELIB_VAL_PTR (dv_as_value (dv)), incoming); | |
9798 if (TREE_CODE (TREE_TYPE (parm)) == REFERENCE_TYPE | |
9799 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (parm)))) | |
9800 { | |
9801 machine_mode indmode | |
9802 = TYPE_MODE (TREE_TYPE (TREE_TYPE (parm))); | |
9803 rtx mem = gen_rtx_MEM (indmode, incoming); | |
9804 cselib_val *val = cselib_lookup_from_insn (mem, indmode, true, | |
9805 VOIDmode, | |
9806 get_insns ()); | |
9807 if (val) | |
9808 { | |
9809 preserve_value (val); | |
9810 record_entry_value (val, mem); | |
9811 set_variable_part (out, mem, dv_from_value (val->val_rtx), 0, | |
9812 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT); | |
9813 } | |
9814 } | |
9815 } | |
9816 } | |
9817 else if (GET_CODE (incoming) == PARALLEL && !dv_onepart_p (dv)) | |
9818 { | |
9819 int i; | |
9820 | |
9821 for (i = 0; i < XVECLEN (incoming, 0); i++) | |
9822 { | |
9823 rtx reg = XEXP (XVECEXP (incoming, 0, i), 0); | |
9824 offset = REG_OFFSET (reg); | |
9825 gcc_assert (REGNO (reg) < FIRST_PSEUDO_REGISTER); | |
9826 attrs_list_insert (&out->regs[REGNO (reg)], dv, offset, reg); | |
9827 set_variable_part (out, reg, dv, offset, | |
9828 VAR_INIT_STATUS_INITIALIZED, NULL, INSERT); | |
9829 } | |
8120 } | 9830 } |
8121 else if (MEM_P (incoming)) | 9831 else if (MEM_P (incoming)) |
8122 { | 9832 { |
8123 incoming = var_lowpart (mode, incoming); | 9833 incoming = var_lowpart (mode, incoming); |
8124 set_variable_part (out, incoming, dv, offset, | 9834 set_variable_part (out, incoming, dv, offset, |
8133 { | 9843 { |
8134 tree parm; | 9844 tree parm; |
8135 | 9845 |
8136 for (parm = DECL_ARGUMENTS (current_function_decl); | 9846 for (parm = DECL_ARGUMENTS (current_function_decl); |
8137 parm; parm = DECL_CHAIN (parm)) | 9847 parm; parm = DECL_CHAIN (parm)) |
8138 vt_add_function_parameter (parm); | 9848 if (!POINTER_BOUNDS_P (parm)) |
9849 vt_add_function_parameter (parm); | |
8139 | 9850 |
8140 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl))) | 9851 if (DECL_HAS_VALUE_EXPR_P (DECL_RESULT (current_function_decl))) |
8141 { | 9852 { |
8142 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl)); | 9853 tree vexpr = DECL_VALUE_EXPR (DECL_RESULT (current_function_decl)); |
8143 | 9854 |
8148 && DECL_ARTIFICIAL (vexpr) | 9859 && DECL_ARTIFICIAL (vexpr) |
8149 && !DECL_IGNORED_P (vexpr) | 9860 && !DECL_IGNORED_P (vexpr) |
8150 && DECL_NAMELESS (vexpr)) | 9861 && DECL_NAMELESS (vexpr)) |
8151 vt_add_function_parameter (vexpr); | 9862 vt_add_function_parameter (vexpr); |
8152 } | 9863 } |
8153 | |
8154 if (MAY_HAVE_DEBUG_INSNS) | |
8155 { | |
8156 cselib_preserve_only_values (); | |
8157 cselib_reset_table (cselib_get_next_uid ()); | |
8158 } | |
8159 | |
8160 } | |
8161 | |
8162 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */ | |
8163 | |
8164 static bool | |
8165 fp_setter (rtx insn) | |
8166 { | |
8167 rtx pat = PATTERN (insn); | |
8168 if (RTX_FRAME_RELATED_P (insn)) | |
8169 { | |
8170 rtx expr = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX); | |
8171 if (expr) | |
8172 pat = XEXP (expr, 0); | |
8173 } | |
8174 if (GET_CODE (pat) == SET) | |
8175 return SET_DEST (pat) == hard_frame_pointer_rtx; | |
8176 else if (GET_CODE (pat) == PARALLEL) | |
8177 { | |
8178 int i; | |
8179 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--) | |
8180 if (GET_CODE (XVECEXP (pat, 0, i)) == SET | |
8181 && SET_DEST (XVECEXP (pat, 0, i)) == hard_frame_pointer_rtx) | |
8182 return true; | |
8183 } | |
8184 return false; | |
8185 } | 9864 } |
8186 | 9865 |
8187 /* Initialize cfa_base_rtx, create a preserved VALUE for it and | 9866 /* Initialize cfa_base_rtx, create a preserved VALUE for it and |
8188 ensure it isn't flushed during cselib_reset_table. | 9867 ensure it isn't flushed during cselib_reset_table. |
8189 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx | 9868 Can be called only if frame_pointer_rtx resp. arg_pointer_rtx |
8210 if (!MAY_HAVE_DEBUG_INSNS) | 9889 if (!MAY_HAVE_DEBUG_INSNS) |
8211 return; | 9890 return; |
8212 | 9891 |
8213 /* Tell alias analysis that cfa_base_rtx should share | 9892 /* Tell alias analysis that cfa_base_rtx should share |
8214 find_base_term value with stack pointer or hard frame pointer. */ | 9893 find_base_term value with stack pointer or hard frame pointer. */ |
8215 vt_equate_reg_base_value (cfa_base_rtx, | 9894 if (!frame_pointer_needed) |
8216 frame_pointer_needed | 9895 vt_equate_reg_base_value (cfa_base_rtx, stack_pointer_rtx); |
8217 ? hard_frame_pointer_rtx : stack_pointer_rtx); | 9896 else if (!crtl->stack_realign_tried) |
9897 vt_equate_reg_base_value (cfa_base_rtx, hard_frame_pointer_rtx); | |
9898 | |
8218 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1, | 9899 val = cselib_lookup_from_insn (cfa_base_rtx, GET_MODE (cfa_base_rtx), 1, |
8219 VOIDmode, get_insns ()); | 9900 VOIDmode, get_insns ()); |
8220 preserve_value (val); | 9901 preserve_value (val); |
8221 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx)); | 9902 cselib_preserve_cfa_base_value (val, REGNO (cfa_base_rtx)); |
8222 var_reg_decl_set (&VTI (ENTRY_BLOCK_PTR)->out, cfa_base_rtx, | |
8223 VAR_INIT_STATUS_INITIALIZED, dv_from_value (val->val_rtx), | |
8224 0, NULL_RTX, INSERT); | |
8225 } | 9903 } |
8226 | 9904 |
8227 /* Allocate and initialize the data structures for variable tracking | 9905 /* Allocate and initialize the data structures for variable tracking |
8228 and parse the RTL to get the micro operations. */ | 9906 and parse the RTL to get the micro operations. */ |
8229 | 9907 |
8230 static bool | 9908 static bool |
8231 vt_initialize (void) | 9909 vt_initialize (void) |
8232 { | 9910 { |
8233 basic_block bb, prologue_bb = NULL; | 9911 basic_block bb; |
8234 HOST_WIDE_INT fp_cfa_offset = -1; | 9912 HOST_WIDE_INT fp_cfa_offset = -1; |
8235 | 9913 |
8236 alloc_aux_for_blocks (sizeof (struct variable_tracking_info_def)); | 9914 alloc_aux_for_blocks (sizeof (variable_tracking_info)); |
8237 | 9915 |
8238 attrs_pool = create_alloc_pool ("attrs_def pool", | 9916 empty_shared_hash = shared_hash_pool.allocate (); |
8239 sizeof (struct attrs_def), 1024); | |
8240 var_pool = create_alloc_pool ("variable_def pool", | |
8241 sizeof (struct variable_def) | |
8242 + (MAX_VAR_PARTS - 1) | |
8243 * sizeof (((variable)NULL)->var_part[0]), 64); | |
8244 loc_chain_pool = create_alloc_pool ("location_chain_def pool", | |
8245 sizeof (struct location_chain_def), | |
8246 1024); | |
8247 shared_hash_pool = create_alloc_pool ("shared_hash_def pool", | |
8248 sizeof (struct shared_hash_def), 256); | |
8249 empty_shared_hash = (shared_hash) pool_alloc (shared_hash_pool); | |
8250 empty_shared_hash->refcount = 1; | 9917 empty_shared_hash->refcount = 1; |
8251 empty_shared_hash->htab | 9918 empty_shared_hash->htab = new variable_table_type (1); |
8252 = htab_create (1, variable_htab_hash, variable_htab_eq, | 9919 changed_variables = new variable_table_type (10); |
8253 variable_htab_free); | |
8254 changed_variables = htab_create (10, variable_htab_hash, variable_htab_eq, | |
8255 variable_htab_free); | |
8256 if (MAY_HAVE_DEBUG_INSNS) | |
8257 { | |
8258 value_chain_pool = create_alloc_pool ("value_chain_def pool", | |
8259 sizeof (struct value_chain_def), | |
8260 1024); | |
8261 value_chains = htab_create (32, value_chain_htab_hash, | |
8262 value_chain_htab_eq, NULL); | |
8263 } | |
8264 | 9920 |
8265 /* Init the IN and OUT sets. */ | 9921 /* Init the IN and OUT sets. */ |
8266 FOR_ALL_BB (bb) | 9922 FOR_ALL_BB_FN (bb, cfun) |
8267 { | 9923 { |
8268 VTI (bb)->visited = false; | 9924 VTI (bb)->visited = false; |
8269 VTI (bb)->flooded = false; | 9925 VTI (bb)->flooded = false; |
8270 dataflow_set_init (&VTI (bb)->in); | 9926 dataflow_set_init (&VTI (bb)->in); |
8271 dataflow_set_init (&VTI (bb)->out); | 9927 dataflow_set_init (&VTI (bb)->out); |
8274 | 9930 |
8275 if (MAY_HAVE_DEBUG_INSNS) | 9931 if (MAY_HAVE_DEBUG_INSNS) |
8276 { | 9932 { |
8277 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS); | 9933 cselib_init (CSELIB_RECORD_MEMORY | CSELIB_PRESERVE_CONSTANTS); |
8278 scratch_regs = BITMAP_ALLOC (NULL); | 9934 scratch_regs = BITMAP_ALLOC (NULL); |
8279 valvar_pool = create_alloc_pool ("small variable_def pool", | 9935 preserved_values.create (256); |
8280 sizeof (struct variable_def), 256); | 9936 global_get_addr_cache = new hash_map<rtx, rtx>; |
8281 preserved_values = VEC_alloc (rtx, heap, 256); | |
8282 } | 9937 } |
8283 else | 9938 else |
8284 { | 9939 { |
8285 scratch_regs = NULL; | 9940 scratch_regs = NULL; |
8286 valvar_pool = NULL; | 9941 global_get_addr_cache = NULL; |
8287 } | 9942 } |
8288 | 9943 |
9944 if (MAY_HAVE_DEBUG_INSNS) | |
9945 { | |
9946 rtx reg, expr; | |
9947 int ofst; | |
9948 cselib_val *val; | |
9949 | |
9950 #ifdef FRAME_POINTER_CFA_OFFSET | |
9951 reg = frame_pointer_rtx; | |
9952 ofst = FRAME_POINTER_CFA_OFFSET (current_function_decl); | |
9953 #else | |
9954 reg = arg_pointer_rtx; | |
9955 ofst = ARG_POINTER_CFA_OFFSET (current_function_decl); | |
9956 #endif | |
9957 | |
9958 ofst -= INCOMING_FRAME_SP_OFFSET; | |
9959 | |
9960 val = cselib_lookup_from_insn (reg, GET_MODE (reg), 1, | |
9961 VOIDmode, get_insns ()); | |
9962 preserve_value (val); | |
9963 if (reg != hard_frame_pointer_rtx && fixed_regs[REGNO (reg)]) | |
9964 cselib_preserve_cfa_base_value (val, REGNO (reg)); | |
9965 expr = plus_constant (GET_MODE (stack_pointer_rtx), | |
9966 stack_pointer_rtx, -ofst); | |
9967 cselib_add_permanent_equiv (val, expr, get_insns ()); | |
9968 | |
9969 if (ofst) | |
9970 { | |
9971 val = cselib_lookup_from_insn (stack_pointer_rtx, | |
9972 GET_MODE (stack_pointer_rtx), 1, | |
9973 VOIDmode, get_insns ()); | |
9974 preserve_value (val); | |
9975 expr = plus_constant (GET_MODE (reg), reg, ofst); | |
9976 cselib_add_permanent_equiv (val, expr, get_insns ()); | |
9977 } | |
9978 } | |
9979 | |
9980 /* In order to factor out the adjustments made to the stack pointer or to | |
9981 the hard frame pointer and thus be able to use DW_OP_fbreg operations | |
9982 instead of individual location lists, we're going to rewrite MEMs based | |
9983 on them into MEMs based on the CFA by de-eliminating stack_pointer_rtx | |
9984 or hard_frame_pointer_rtx to the virtual CFA pointer frame_pointer_rtx | |
9985 resp. arg_pointer_rtx. We can do this either when there is no frame | |
9986 pointer in the function and stack adjustments are consistent for all | |
9987 basic blocks or when there is a frame pointer and no stack realignment. | |
9988 But we first have to check that frame_pointer_rtx resp. arg_pointer_rtx | |
9989 has been eliminated. */ | |
8289 if (!frame_pointer_needed) | 9990 if (!frame_pointer_needed) |
8290 { | 9991 { |
8291 rtx reg, elim; | 9992 rtx reg, elim; |
8292 | 9993 |
8293 if (!vt_stack_adjustments ()) | 9994 if (!vt_stack_adjustments ()) |
8326 fp_cfa_offset -= INTVAL (XEXP (elim, 1)); | 10027 fp_cfa_offset -= INTVAL (XEXP (elim, 1)); |
8327 elim = XEXP (elim, 0); | 10028 elim = XEXP (elim, 0); |
8328 } | 10029 } |
8329 if (elim != hard_frame_pointer_rtx) | 10030 if (elim != hard_frame_pointer_rtx) |
8330 fp_cfa_offset = -1; | 10031 fp_cfa_offset = -1; |
8331 else | 10032 } |
8332 prologue_bb = single_succ (ENTRY_BLOCK_PTR); | 10033 else |
10034 fp_cfa_offset = -1; | |
10035 } | |
10036 | |
10037 /* If the stack is realigned and a DRAP register is used, we're going to | |
10038 rewrite MEMs based on it representing incoming locations of parameters | |
10039 passed on the stack into MEMs based on the argument pointer. Although | |
10040 we aren't going to rewrite other MEMs, we still need to initialize the | |
10041 virtual CFA pointer in order to ensure that the argument pointer will | |
10042 be seen as a constant throughout the function. | |
10043 | |
10044 ??? This doesn't work if FRAME_POINTER_CFA_OFFSET is defined. */ | |
10045 else if (stack_realign_drap) | |
10046 { | |
10047 rtx reg, elim; | |
10048 | |
10049 #ifdef FRAME_POINTER_CFA_OFFSET | |
10050 reg = frame_pointer_rtx; | |
10051 #else | |
10052 reg = arg_pointer_rtx; | |
10053 #endif | |
10054 elim = eliminate_regs (reg, VOIDmode, NULL_RTX); | |
10055 if (elim != reg) | |
10056 { | |
10057 if (GET_CODE (elim) == PLUS) | |
10058 elim = XEXP (elim, 0); | |
10059 if (elim == hard_frame_pointer_rtx) | |
10060 vt_init_cfa_base (); | |
8333 } | 10061 } |
8334 } | 10062 } |
8335 | 10063 |
8336 hard_frame_pointer_adjustment = -1; | 10064 hard_frame_pointer_adjustment = -1; |
8337 | 10065 |
8338 FOR_EACH_BB (bb) | 10066 vt_add_function_parameters (); |
8339 { | 10067 |
8340 rtx insn; | 10068 FOR_EACH_BB_FN (bb, cfun) |
10069 { | |
10070 rtx_insn *insn; | |
8341 HOST_WIDE_INT pre, post = 0; | 10071 HOST_WIDE_INT pre, post = 0; |
8342 basic_block first_bb, last_bb; | 10072 basic_block first_bb, last_bb; |
8343 | 10073 |
8344 if (MAY_HAVE_DEBUG_INSNS) | 10074 if (MAY_HAVE_DEBUG_INSNS) |
8345 { | 10075 { |
8351 | 10081 |
8352 first_bb = bb; | 10082 first_bb = bb; |
8353 for (;;) | 10083 for (;;) |
8354 { | 10084 { |
8355 edge e; | 10085 edge e; |
8356 if (bb->next_bb == EXIT_BLOCK_PTR | 10086 if (bb->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun) |
8357 || ! single_pred_p (bb->next_bb)) | 10087 || ! single_pred_p (bb->next_bb)) |
8358 break; | 10088 break; |
8359 e = find_edge (bb, bb->next_bb); | 10089 e = find_edge (bb, bb->next_bb); |
8360 if (! e || (e->flags & EDGE_FALLTHRU) == 0) | 10090 if (! e || (e->flags & EDGE_FALLTHRU) == 0) |
8361 break; | 10091 break; |
8383 mo.u.adjust = pre; | 10113 mo.u.adjust = pre; |
8384 mo.insn = insn; | 10114 mo.insn = insn; |
8385 if (dump_file && (dump_flags & TDF_DETAILS)) | 10115 if (dump_file && (dump_flags & TDF_DETAILS)) |
8386 log_op_type (PATTERN (insn), bb, insn, | 10116 log_op_type (PATTERN (insn), bb, insn, |
8387 MO_ADJUST, dump_file); | 10117 MO_ADJUST, dump_file); |
8388 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, | 10118 VTI (bb)->mos.safe_push (mo); |
8389 &mo); | |
8390 VTI (bb)->out.stack_adjust += pre; | 10119 VTI (bb)->out.stack_adjust += pre; |
8391 } | 10120 } |
8392 } | 10121 } |
8393 | 10122 |
8394 cselib_hook_called = false; | 10123 cselib_hook_called = false; |
8395 adjust_insn (bb, insn); | 10124 adjust_insn (bb, insn); |
8396 if (MAY_HAVE_DEBUG_INSNS) | 10125 if (MAY_HAVE_DEBUG_INSNS) |
8397 { | 10126 { |
10127 if (CALL_P (insn)) | |
10128 prepare_call_arguments (bb, insn); | |
8398 cselib_process_insn (insn); | 10129 cselib_process_insn (insn); |
8399 if (dump_file && (dump_flags & TDF_DETAILS)) | 10130 if (dump_file && (dump_flags & TDF_DETAILS)) |
8400 { | 10131 { |
8401 print_rtl_single (dump_file, insn); | 10132 print_rtl_single (dump_file, insn); |
8402 dump_cselib_table (dump_file); | 10133 dump_cselib_table (dump_file); |
8413 mo.u.adjust = post; | 10144 mo.u.adjust = post; |
8414 mo.insn = insn; | 10145 mo.insn = insn; |
8415 if (dump_file && (dump_flags & TDF_DETAILS)) | 10146 if (dump_file && (dump_flags & TDF_DETAILS)) |
8416 log_op_type (PATTERN (insn), bb, insn, | 10147 log_op_type (PATTERN (insn), bb, insn, |
8417 MO_ADJUST, dump_file); | 10148 MO_ADJUST, dump_file); |
8418 VEC_safe_push (micro_operation, heap, VTI (bb)->mos, | 10149 VTI (bb)->mos.safe_push (mo); |
8419 &mo); | |
8420 VTI (bb)->out.stack_adjust += post; | 10150 VTI (bb)->out.stack_adjust += post; |
8421 } | 10151 } |
8422 | 10152 |
8423 if (bb == prologue_bb | 10153 if (fp_cfa_offset != -1 |
8424 && hard_frame_pointer_adjustment == -1 | 10154 && hard_frame_pointer_adjustment == -1 |
8425 && RTX_FRAME_RELATED_P (insn) | 10155 && fp_setter_insn (insn)) |
8426 && fp_setter (insn)) | |
8427 { | 10156 { |
8428 vt_init_cfa_base (); | 10157 vt_init_cfa_base (); |
8429 hard_frame_pointer_adjustment = fp_cfa_offset; | 10158 hard_frame_pointer_adjustment = fp_cfa_offset; |
10159 /* Disassociate sp from fp now. */ | |
10160 if (MAY_HAVE_DEBUG_INSNS) | |
10161 { | |
10162 cselib_val *v; | |
10163 cselib_invalidate_rtx (stack_pointer_rtx); | |
10164 v = cselib_lookup (stack_pointer_rtx, Pmode, 1, | |
10165 VOIDmode); | |
10166 if (v && !cselib_preserved_value_p (v)) | |
10167 { | |
10168 cselib_set_value_sp_based (v); | |
10169 preserve_value (v); | |
10170 } | |
10171 } | |
8430 } | 10172 } |
8431 } | 10173 } |
8432 } | 10174 } |
8433 gcc_assert (offset == VTI (bb)->out.stack_adjust); | 10175 gcc_assert (offset == VTI (bb)->out.stack_adjust); |
8434 } | 10176 } |
8442 cselib_record_sets_hook = NULL; | 10184 cselib_record_sets_hook = NULL; |
8443 } | 10185 } |
8444 } | 10186 } |
8445 | 10187 |
8446 hard_frame_pointer_adjustment = -1; | 10188 hard_frame_pointer_adjustment = -1; |
8447 VTI (ENTRY_BLOCK_PTR)->flooded = true; | 10189 VTI (ENTRY_BLOCK_PTR_FOR_FN (cfun))->flooded = true; |
8448 vt_add_function_parameters (); | |
8449 cfa_base_rtx = NULL_RTX; | 10190 cfa_base_rtx = NULL_RTX; |
8450 return true; | 10191 return true; |
8451 } | 10192 } |
8452 | 10193 |
10194 /* This is *not* reset after each function. It gives each | |
10195 NOTE_INSN_DELETED_DEBUG_LABEL in the entire compilation | |
10196 a unique label number. */ | |
10197 | |
10198 static int debug_label_num = 1; | |
10199 | |
8453 /* Get rid of all debug insns from the insn stream. */ | 10200 /* Get rid of all debug insns from the insn stream. */ |
8454 | 10201 |
8455 static void | 10202 static void |
8456 delete_debug_insns (void) | 10203 delete_debug_insns (void) |
8457 { | 10204 { |
8458 basic_block bb; | 10205 basic_block bb; |
8459 rtx insn, next; | 10206 rtx_insn *insn, *next; |
8460 | 10207 |
8461 if (!MAY_HAVE_DEBUG_INSNS) | 10208 if (!MAY_HAVE_DEBUG_INSNS) |
8462 return; | 10209 return; |
8463 | 10210 |
8464 FOR_EACH_BB (bb) | 10211 FOR_EACH_BB_FN (bb, cfun) |
8465 { | 10212 { |
8466 FOR_BB_INSNS_SAFE (bb, insn, next) | 10213 FOR_BB_INSNS_SAFE (bb, insn, next) |
8467 if (DEBUG_INSN_P (insn)) | 10214 if (DEBUG_INSN_P (insn)) |
8468 delete_insn (insn); | 10215 { |
10216 tree decl = INSN_VAR_LOCATION_DECL (insn); | |
10217 if (TREE_CODE (decl) == LABEL_DECL | |
10218 && DECL_NAME (decl) | |
10219 && !DECL_RTL_SET_P (decl)) | |
10220 { | |
10221 PUT_CODE (insn, NOTE); | |
10222 NOTE_KIND (insn) = NOTE_INSN_DELETED_DEBUG_LABEL; | |
10223 NOTE_DELETED_LABEL_NAME (insn) | |
10224 = IDENTIFIER_POINTER (DECL_NAME (decl)); | |
10225 SET_DECL_RTL (decl, insn); | |
10226 CODE_LABEL_NUMBER (insn) = debug_label_num++; | |
10227 } | |
10228 else | |
10229 delete_insn (insn); | |
10230 } | |
8469 } | 10231 } |
8470 } | 10232 } |
8471 | 10233 |
8472 /* Run a fast, BB-local only version of var tracking, to take care of | 10234 /* Run a fast, BB-local only version of var tracking, to take care of |
8473 information that we don't do global analysis on, such that not all | 10235 information that we don't do global analysis on, such that not all |
8487 static void | 10249 static void |
8488 vt_finalize (void) | 10250 vt_finalize (void) |
8489 { | 10251 { |
8490 basic_block bb; | 10252 basic_block bb; |
8491 | 10253 |
8492 FOR_EACH_BB (bb) | 10254 FOR_EACH_BB_FN (bb, cfun) |
8493 { | 10255 { |
8494 VEC_free (micro_operation, heap, VTI (bb)->mos); | 10256 VTI (bb)->mos.release (); |
8495 } | 10257 } |
8496 | 10258 |
8497 FOR_ALL_BB (bb) | 10259 FOR_ALL_BB_FN (bb, cfun) |
8498 { | 10260 { |
8499 dataflow_set_destroy (&VTI (bb)->in); | 10261 dataflow_set_destroy (&VTI (bb)->in); |
8500 dataflow_set_destroy (&VTI (bb)->out); | 10262 dataflow_set_destroy (&VTI (bb)->out); |
8501 if (VTI (bb)->permp) | 10263 if (VTI (bb)->permp) |
8502 { | 10264 { |
8503 dataflow_set_destroy (VTI (bb)->permp); | 10265 dataflow_set_destroy (VTI (bb)->permp); |
8504 XDELETE (VTI (bb)->permp); | 10266 XDELETE (VTI (bb)->permp); |
8505 } | 10267 } |
8506 } | 10268 } |
8507 free_aux_for_blocks (); | 10269 free_aux_for_blocks (); |
8508 htab_delete (empty_shared_hash->htab); | 10270 delete empty_shared_hash->htab; |
8509 htab_delete (changed_variables); | 10271 empty_shared_hash->htab = NULL; |
8510 free_alloc_pool (attrs_pool); | 10272 delete changed_variables; |
8511 free_alloc_pool (var_pool); | 10273 changed_variables = NULL; |
8512 free_alloc_pool (loc_chain_pool); | 10274 attrs_pool.release (); |
8513 free_alloc_pool (shared_hash_pool); | 10275 var_pool.release (); |
10276 location_chain_pool.release (); | |
10277 shared_hash_pool.release (); | |
8514 | 10278 |
8515 if (MAY_HAVE_DEBUG_INSNS) | 10279 if (MAY_HAVE_DEBUG_INSNS) |
8516 { | 10280 { |
8517 htab_delete (value_chains); | 10281 if (global_get_addr_cache) |
8518 free_alloc_pool (value_chain_pool); | 10282 delete global_get_addr_cache; |
8519 free_alloc_pool (valvar_pool); | 10283 global_get_addr_cache = NULL; |
8520 VEC_free (rtx, heap, preserved_values); | 10284 loc_exp_dep_pool.release (); |
10285 valvar_pool.release (); | |
10286 preserved_values.release (); | |
8521 cselib_finish (); | 10287 cselib_finish (); |
8522 BITMAP_FREE (scratch_regs); | 10288 BITMAP_FREE (scratch_regs); |
8523 scratch_regs = NULL; | 10289 scratch_regs = NULL; |
8524 } | 10290 } |
10291 | |
10292 #ifdef HAVE_window_save | |
10293 vec_free (windowed_parm_regs); | |
10294 #endif | |
8525 | 10295 |
8526 if (vui_vec) | 10296 if (vui_vec) |
8527 XDELETEVEC (vui_vec); | 10297 XDELETEVEC (vui_vec); |
8528 vui_vec = NULL; | 10298 vui_vec = NULL; |
8529 vui_allocated = 0; | 10299 vui_allocated = 0; |
8534 static inline unsigned int | 10304 static inline unsigned int |
8535 variable_tracking_main_1 (void) | 10305 variable_tracking_main_1 (void) |
8536 { | 10306 { |
8537 bool success; | 10307 bool success; |
8538 | 10308 |
8539 if (flag_var_tracking_assignments < 0) | 10309 if (flag_var_tracking_assignments < 0 |
10310 /* Var-tracking right now assumes the IR doesn't contain | |
10311 any pseudos at this point. */ | |
10312 || targetm.no_register_allocation) | |
8540 { | 10313 { |
8541 delete_debug_insns (); | 10314 delete_debug_insns (); |
8542 return 0; | 10315 return 0; |
8543 } | 10316 } |
8544 | 10317 |
8545 if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20) | 10318 if (n_basic_blocks_for_fn (cfun) > 500 && |
10319 n_edges_for_fn (cfun) / n_basic_blocks_for_fn (cfun) >= 20) | |
8546 { | 10320 { |
8547 vt_debug_insns_local (true); | 10321 vt_debug_insns_local (true); |
8548 return 0; | 10322 return 0; |
8549 } | 10323 } |
8550 | 10324 |
8581 } | 10355 } |
8582 | 10356 |
8583 if (dump_file && (dump_flags & TDF_DETAILS)) | 10357 if (dump_file && (dump_flags & TDF_DETAILS)) |
8584 { | 10358 { |
8585 dump_dataflow_sets (); | 10359 dump_dataflow_sets (); |
10360 dump_reg_info (dump_file); | |
8586 dump_flow_info (dump_file, dump_flags); | 10361 dump_flow_info (dump_file, dump_flags); |
8587 } | 10362 } |
8588 | 10363 |
8589 timevar_push (TV_VAR_TRACKING_EMIT); | 10364 timevar_push (TV_VAR_TRACKING_EMIT); |
8590 vt_emit_notes (); | 10365 vt_emit_notes (); |
8606 flag_var_tracking_assignments = save; | 10381 flag_var_tracking_assignments = save; |
8607 | 10382 |
8608 return ret; | 10383 return ret; |
8609 } | 10384 } |
8610 | 10385 |
8611 static bool | 10386 namespace { |
8612 gate_handle_var_tracking (void) | 10387 |
8613 { | 10388 const pass_data pass_data_variable_tracking = |
8614 return (flag_var_tracking); | 10389 { |
8615 } | 10390 RTL_PASS, /* type */ |
8616 | 10391 "vartrack", /* name */ |
8617 | 10392 OPTGROUP_NONE, /* optinfo_flags */ |
8618 | 10393 TV_VAR_TRACKING, /* tv_id */ |
8619 struct rtl_opt_pass pass_variable_tracking = | 10394 0, /* properties_required */ |
8620 { | 10395 0, /* properties_provided */ |
8621 { | 10396 0, /* properties_destroyed */ |
8622 RTL_PASS, | 10397 0, /* todo_flags_start */ |
8623 "vartrack", /* name */ | 10398 0, /* todo_flags_finish */ |
8624 gate_handle_var_tracking, /* gate */ | |
8625 variable_tracking_main, /* execute */ | |
8626 NULL, /* sub */ | |
8627 NULL, /* next */ | |
8628 0, /* static_pass_number */ | |
8629 TV_VAR_TRACKING, /* tv_id */ | |
8630 0, /* properties_required */ | |
8631 0, /* properties_provided */ | |
8632 0, /* properties_destroyed */ | |
8633 0, /* todo_flags_start */ | |
8634 TODO_dump_func | TODO_verify_rtl_sharing/* todo_flags_finish */ | |
8635 } | |
8636 }; | 10399 }; |
10400 | |
10401 class pass_variable_tracking : public rtl_opt_pass | |
10402 { | |
10403 public: | |
10404 pass_variable_tracking (gcc::context *ctxt) | |
10405 : rtl_opt_pass (pass_data_variable_tracking, ctxt) | |
10406 {} | |
10407 | |
10408 /* opt_pass methods: */ | |
10409 virtual bool gate (function *) | |
10410 { | |
10411 return (flag_var_tracking && !targetm.delay_vartrack); | |
10412 } | |
10413 | |
10414 virtual unsigned int execute (function *) | |
10415 { | |
10416 return variable_tracking_main (); | |
10417 } | |
10418 | |
10419 }; // class pass_variable_tracking | |
10420 | |
10421 } // anon namespace | |
10422 | |
10423 rtl_opt_pass * | |
10424 make_pass_variable_tracking (gcc::context *ctxt) | |
10425 { | |
10426 return new pass_variable_tracking (ctxt); | |
10427 } |