Mercurial > hg > CbC > CbC_gcc
comparison gcc/cselib.c @ 0:a06113de4d67
first commit
author | kent <kent@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Fri, 17 Jul 2009 14:47:48 +0900 |
parents | |
children | 58ad6c70ea60 |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:a06113de4d67 |
---|---|
1 /* Common subexpression elimination library for GNU compiler. | |
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998, | |
3 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008 | |
4 Free Software Foundation, Inc. | |
5 | |
6 This file is part of GCC. | |
7 | |
8 GCC is free software; you can redistribute it and/or modify it under | |
9 the terms of the GNU General Public License as published by the Free | |
10 Software Foundation; either version 3, or (at your option) any later | |
11 version. | |
12 | |
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 for more details. | |
17 | |
18 You should have received a copy of the GNU General Public License | |
19 along with GCC; see the file COPYING3. If not see | |
20 <http://www.gnu.org/licenses/>. */ | |
21 | |
22 #include "config.h" | |
23 #include "system.h" | |
24 #include "coretypes.h" | |
25 #include "tm.h" | |
26 | |
27 #include "rtl.h" | |
28 #include "tm_p.h" | |
29 #include "regs.h" | |
30 #include "hard-reg-set.h" | |
31 #include "flags.h" | |
32 #include "real.h" | |
33 #include "insn-config.h" | |
34 #include "recog.h" | |
35 #include "function.h" | |
36 #include "emit-rtl.h" | |
37 #include "toplev.h" | |
38 #include "output.h" | |
39 #include "ggc.h" | |
40 #include "hashtab.h" | |
41 #include "cselib.h" | |
42 #include "params.h" | |
43 #include "alloc-pool.h" | |
44 #include "target.h" | |
45 | |
46 static bool cselib_record_memory; | |
47 static int entry_and_rtx_equal_p (const void *, const void *); | |
48 static hashval_t get_value_hash (const void *); | |
49 static struct elt_list *new_elt_list (struct elt_list *, cselib_val *); | |
50 static struct elt_loc_list *new_elt_loc_list (struct elt_loc_list *, rtx); | |
51 static void unchain_one_value (cselib_val *); | |
52 static void unchain_one_elt_list (struct elt_list **); | |
53 static void unchain_one_elt_loc_list (struct elt_loc_list **); | |
54 static int discard_useless_locs (void **, void *); | |
55 static int discard_useless_values (void **, void *); | |
56 static void remove_useless_values (void); | |
57 static rtx wrap_constant (enum machine_mode, rtx); | |
58 static unsigned int cselib_hash_rtx (rtx, int); | |
59 static cselib_val *new_cselib_val (unsigned int, enum machine_mode); | |
60 static void add_mem_for_addr (cselib_val *, cselib_val *, rtx); | |
61 static cselib_val *cselib_lookup_mem (rtx, int); | |
62 static void cselib_invalidate_regno (unsigned int, enum machine_mode); | |
63 static void cselib_invalidate_mem (rtx); | |
64 static void cselib_record_set (rtx, cselib_val *, cselib_val *); | |
65 static void cselib_record_sets (rtx); | |
66 | |
67 /* There are three ways in which cselib can look up an rtx: | |
68 - for a REG, the reg_values table (which is indexed by regno) is used | |
69 - for a MEM, we recursively look up its address and then follow the | |
70 addr_list of that value | |
71 - for everything else, we compute a hash value and go through the hash | |
72 table. Since different rtx's can still have the same hash value, | |
73 this involves walking the table entries for a given value and comparing | |
74 the locations of the entries with the rtx we are looking up. */ | |
75 | |
76 /* A table that enables us to look up elts by their value. */ | |
77 static htab_t cselib_hash_table; | |
78 | |
79 /* This is a global so we don't have to pass this through every function. | |
80 It is used in new_elt_loc_list to set SETTING_INSN. */ | |
81 static rtx cselib_current_insn; | |
82 | |
83 /* Every new unknown value gets a unique number. */ | |
84 static unsigned int next_unknown_value; | |
85 | |
86 /* The number of registers we had when the varrays were last resized. */ | |
87 static unsigned int cselib_nregs; | |
88 | |
89 /* Count values without known locations. Whenever this grows too big, we | |
90 remove these useless values from the table. */ | |
91 static int n_useless_values; | |
92 | |
93 /* Number of useless values before we remove them from the hash table. */ | |
94 #define MAX_USELESS_VALUES 32 | |
95 | |
96 /* This table maps from register number to values. It does not | |
97 contain pointers to cselib_val structures, but rather elt_lists. | |
98 The purpose is to be able to refer to the same register in | |
99 different modes. The first element of the list defines the mode in | |
100 which the register was set; if the mode is unknown or the value is | |
101 no longer valid in that mode, ELT will be NULL for the first | |
102 element. */ | |
103 static struct elt_list **reg_values; | |
104 static unsigned int reg_values_size; | |
105 #define REG_VALUES(i) reg_values[i] | |
106 | |
107 /* The largest number of hard regs used by any entry added to the | |
108 REG_VALUES table. Cleared on each cselib_clear_table() invocation. */ | |
109 static unsigned int max_value_regs; | |
110 | |
111 /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used | |
112 in cselib_clear_table() for fast emptying. */ | |
113 static unsigned int *used_regs; | |
114 static unsigned int n_used_regs; | |
115 | |
116 /* We pass this to cselib_invalidate_mem to invalidate all of | |
117 memory for a non-const call instruction. */ | |
118 static GTY(()) rtx callmem; | |
119 | |
120 /* Set by discard_useless_locs if it deleted the last location of any | |
121 value. */ | |
122 static int values_became_useless; | |
123 | |
124 /* Used as stop element of the containing_mem list so we can check | |
125 presence in the list by checking the next pointer. */ | |
126 static cselib_val dummy_val; | |
127 | |
128 /* Used to list all values that contain memory reference. | |
129 May or may not contain the useless values - the list is compacted | |
130 each time memory is invalidated. */ | |
131 static cselib_val *first_containing_mem = &dummy_val; | |
132 static alloc_pool elt_loc_list_pool, elt_list_pool, cselib_val_pool, value_pool; | |
133 | |
134 /* If nonnull, cselib will call this function before freeing useless | |
135 VALUEs. A VALUE is deemed useless if its "locs" field is null. */ | |
136 void (*cselib_discard_hook) (cselib_val *); | |
137 | |
138 | |
139 /* Allocate a struct elt_list and fill in its two elements with the | |
140 arguments. */ | |
141 | |
142 static inline struct elt_list * | |
143 new_elt_list (struct elt_list *next, cselib_val *elt) | |
144 { | |
145 struct elt_list *el; | |
146 el = (struct elt_list *) pool_alloc (elt_list_pool); | |
147 el->next = next; | |
148 el->elt = elt; | |
149 return el; | |
150 } | |
151 | |
152 /* Allocate a struct elt_loc_list and fill in its two elements with the | |
153 arguments. */ | |
154 | |
155 static inline struct elt_loc_list * | |
156 new_elt_loc_list (struct elt_loc_list *next, rtx loc) | |
157 { | |
158 struct elt_loc_list *el; | |
159 el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool); | |
160 el->next = next; | |
161 el->loc = loc; | |
162 el->setting_insn = cselib_current_insn; | |
163 return el; | |
164 } | |
165 | |
166 /* The elt_list at *PL is no longer needed. Unchain it and free its | |
167 storage. */ | |
168 | |
169 static inline void | |
170 unchain_one_elt_list (struct elt_list **pl) | |
171 { | |
172 struct elt_list *l = *pl; | |
173 | |
174 *pl = l->next; | |
175 pool_free (elt_list_pool, l); | |
176 } | |
177 | |
178 /* Likewise for elt_loc_lists. */ | |
179 | |
180 static void | |
181 unchain_one_elt_loc_list (struct elt_loc_list **pl) | |
182 { | |
183 struct elt_loc_list *l = *pl; | |
184 | |
185 *pl = l->next; | |
186 pool_free (elt_loc_list_pool, l); | |
187 } | |
188 | |
189 /* Likewise for cselib_vals. This also frees the addr_list associated with | |
190 V. */ | |
191 | |
192 static void | |
193 unchain_one_value (cselib_val *v) | |
194 { | |
195 while (v->addr_list) | |
196 unchain_one_elt_list (&v->addr_list); | |
197 | |
198 pool_free (cselib_val_pool, v); | |
199 } | |
200 | |
201 /* Remove all entries from the hash table. Also used during | |
202 initialization. If CLEAR_ALL isn't set, then only clear the entries | |
203 which are known to have been used. */ | |
204 | |
205 void | |
206 cselib_clear_table (void) | |
207 { | |
208 unsigned int i; | |
209 | |
210 for (i = 0; i < n_used_regs; i++) | |
211 REG_VALUES (used_regs[i]) = 0; | |
212 | |
213 max_value_regs = 0; | |
214 | |
215 n_used_regs = 0; | |
216 | |
217 htab_empty (cselib_hash_table); | |
218 | |
219 n_useless_values = 0; | |
220 | |
221 next_unknown_value = 0; | |
222 | |
223 first_containing_mem = &dummy_val; | |
224 } | |
225 | |
226 /* The equality test for our hash table. The first argument ENTRY is a table | |
227 element (i.e. a cselib_val), while the second arg X is an rtx. We know | |
228 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a | |
229 CONST of an appropriate mode. */ | |
230 | |
231 static int | |
232 entry_and_rtx_equal_p (const void *entry, const void *x_arg) | |
233 { | |
234 struct elt_loc_list *l; | |
235 const cselib_val *const v = (const cselib_val *) entry; | |
236 rtx x = CONST_CAST_RTX ((const_rtx)x_arg); | |
237 enum machine_mode mode = GET_MODE (x); | |
238 | |
239 gcc_assert (GET_CODE (x) != CONST_INT && GET_CODE (x) != CONST_FIXED | |
240 && (mode != VOIDmode || GET_CODE (x) != CONST_DOUBLE)); | |
241 | |
242 if (mode != GET_MODE (v->val_rtx)) | |
243 return 0; | |
244 | |
245 /* Unwrap X if necessary. */ | |
246 if (GET_CODE (x) == CONST | |
247 && (GET_CODE (XEXP (x, 0)) == CONST_INT | |
248 || GET_CODE (XEXP (x, 0)) == CONST_FIXED | |
249 || GET_CODE (XEXP (x, 0)) == CONST_DOUBLE)) | |
250 x = XEXP (x, 0); | |
251 | |
252 /* We don't guarantee that distinct rtx's have different hash values, | |
253 so we need to do a comparison. */ | |
254 for (l = v->locs; l; l = l->next) | |
255 if (rtx_equal_for_cselib_p (l->loc, x)) | |
256 return 1; | |
257 | |
258 return 0; | |
259 } | |
260 | |
261 /* The hash function for our hash table. The value is always computed with | |
262 cselib_hash_rtx when adding an element; this function just extracts the | |
263 hash value from a cselib_val structure. */ | |
264 | |
265 static hashval_t | |
266 get_value_hash (const void *entry) | |
267 { | |
268 const cselib_val *const v = (const cselib_val *) entry; | |
269 return v->value; | |
270 } | |
271 | |
272 /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we | |
273 only return true for values which point to a cselib_val whose value | |
274 element has been set to zero, which implies the cselib_val will be | |
275 removed. */ | |
276 | |
277 int | |
278 references_value_p (const_rtx x, int only_useless) | |
279 { | |
280 const enum rtx_code code = GET_CODE (x); | |
281 const char *fmt = GET_RTX_FORMAT (code); | |
282 int i, j; | |
283 | |
284 if (GET_CODE (x) == VALUE | |
285 && (! only_useless || CSELIB_VAL_PTR (x)->locs == 0)) | |
286 return 1; | |
287 | |
288 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
289 { | |
290 if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless)) | |
291 return 1; | |
292 else if (fmt[i] == 'E') | |
293 for (j = 0; j < XVECLEN (x, i); j++) | |
294 if (references_value_p (XVECEXP (x, i, j), only_useless)) | |
295 return 1; | |
296 } | |
297 | |
298 return 0; | |
299 } | |
300 | |
301 /* For all locations found in X, delete locations that reference useless | |
302 values (i.e. values without any location). Called through | |
303 htab_traverse. */ | |
304 | |
305 static int | |
306 discard_useless_locs (void **x, void *info ATTRIBUTE_UNUSED) | |
307 { | |
308 cselib_val *v = (cselib_val *)*x; | |
309 struct elt_loc_list **p = &v->locs; | |
310 int had_locs = v->locs != 0; | |
311 | |
312 while (*p) | |
313 { | |
314 if (references_value_p ((*p)->loc, 1)) | |
315 unchain_one_elt_loc_list (p); | |
316 else | |
317 p = &(*p)->next; | |
318 } | |
319 | |
320 if (had_locs && v->locs == 0) | |
321 { | |
322 n_useless_values++; | |
323 values_became_useless = 1; | |
324 } | |
325 return 1; | |
326 } | |
327 | |
328 /* If X is a value with no locations, remove it from the hashtable. */ | |
329 | |
330 static int | |
331 discard_useless_values (void **x, void *info ATTRIBUTE_UNUSED) | |
332 { | |
333 cselib_val *v = (cselib_val *)*x; | |
334 | |
335 if (v->locs == 0) | |
336 { | |
337 if (cselib_discard_hook) | |
338 cselib_discard_hook (v); | |
339 | |
340 CSELIB_VAL_PTR (v->val_rtx) = NULL; | |
341 htab_clear_slot (cselib_hash_table, x); | |
342 unchain_one_value (v); | |
343 n_useless_values--; | |
344 } | |
345 | |
346 return 1; | |
347 } | |
348 | |
349 /* Clean out useless values (i.e. those which no longer have locations | |
350 associated with them) from the hash table. */ | |
351 | |
352 static void | |
353 remove_useless_values (void) | |
354 { | |
355 cselib_val **p, *v; | |
356 /* First pass: eliminate locations that reference the value. That in | |
357 turn can make more values useless. */ | |
358 do | |
359 { | |
360 values_became_useless = 0; | |
361 htab_traverse (cselib_hash_table, discard_useless_locs, 0); | |
362 } | |
363 while (values_became_useless); | |
364 | |
365 /* Second pass: actually remove the values. */ | |
366 | |
367 p = &first_containing_mem; | |
368 for (v = *p; v != &dummy_val; v = v->next_containing_mem) | |
369 if (v->locs) | |
370 { | |
371 *p = v; | |
372 p = &(*p)->next_containing_mem; | |
373 } | |
374 *p = &dummy_val; | |
375 | |
376 htab_traverse (cselib_hash_table, discard_useless_values, 0); | |
377 | |
378 gcc_assert (!n_useless_values); | |
379 } | |
380 | |
381 /* Return the mode in which a register was last set. If X is not a | |
382 register, return its mode. If the mode in which the register was | |
383 set is not known, or the value was already clobbered, return | |
384 VOIDmode. */ | |
385 | |
386 enum machine_mode | |
387 cselib_reg_set_mode (const_rtx x) | |
388 { | |
389 if (!REG_P (x)) | |
390 return GET_MODE (x); | |
391 | |
392 if (REG_VALUES (REGNO (x)) == NULL | |
393 || REG_VALUES (REGNO (x))->elt == NULL) | |
394 return VOIDmode; | |
395 | |
396 return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx); | |
397 } | |
398 | |
399 /* Return nonzero if we can prove that X and Y contain the same value, taking | |
400 our gathered information into account. */ | |
401 | |
402 int | |
403 rtx_equal_for_cselib_p (rtx x, rtx y) | |
404 { | |
405 enum rtx_code code; | |
406 const char *fmt; | |
407 int i; | |
408 | |
409 if (REG_P (x) || MEM_P (x)) | |
410 { | |
411 cselib_val *e = cselib_lookup (x, GET_MODE (x), 0); | |
412 | |
413 if (e) | |
414 x = e->val_rtx; | |
415 } | |
416 | |
417 if (REG_P (y) || MEM_P (y)) | |
418 { | |
419 cselib_val *e = cselib_lookup (y, GET_MODE (y), 0); | |
420 | |
421 if (e) | |
422 y = e->val_rtx; | |
423 } | |
424 | |
425 if (x == y) | |
426 return 1; | |
427 | |
428 if (GET_CODE (x) == VALUE && GET_CODE (y) == VALUE) | |
429 return CSELIB_VAL_PTR (x) == CSELIB_VAL_PTR (y); | |
430 | |
431 if (GET_CODE (x) == VALUE) | |
432 { | |
433 cselib_val *e = CSELIB_VAL_PTR (x); | |
434 struct elt_loc_list *l; | |
435 | |
436 for (l = e->locs; l; l = l->next) | |
437 { | |
438 rtx t = l->loc; | |
439 | |
440 /* Avoid infinite recursion. */ | |
441 if (REG_P (t) || MEM_P (t)) | |
442 continue; | |
443 else if (rtx_equal_for_cselib_p (t, y)) | |
444 return 1; | |
445 } | |
446 | |
447 return 0; | |
448 } | |
449 | |
450 if (GET_CODE (y) == VALUE) | |
451 { | |
452 cselib_val *e = CSELIB_VAL_PTR (y); | |
453 struct elt_loc_list *l; | |
454 | |
455 for (l = e->locs; l; l = l->next) | |
456 { | |
457 rtx t = l->loc; | |
458 | |
459 if (REG_P (t) || MEM_P (t)) | |
460 continue; | |
461 else if (rtx_equal_for_cselib_p (x, t)) | |
462 return 1; | |
463 } | |
464 | |
465 return 0; | |
466 } | |
467 | |
468 if (GET_CODE (x) != GET_CODE (y) || GET_MODE (x) != GET_MODE (y)) | |
469 return 0; | |
470 | |
471 /* These won't be handled correctly by the code below. */ | |
472 switch (GET_CODE (x)) | |
473 { | |
474 case CONST_DOUBLE: | |
475 case CONST_FIXED: | |
476 return 0; | |
477 | |
478 case LABEL_REF: | |
479 return XEXP (x, 0) == XEXP (y, 0); | |
480 | |
481 default: | |
482 break; | |
483 } | |
484 | |
485 code = GET_CODE (x); | |
486 fmt = GET_RTX_FORMAT (code); | |
487 | |
488 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
489 { | |
490 int j; | |
491 | |
492 switch (fmt[i]) | |
493 { | |
494 case 'w': | |
495 if (XWINT (x, i) != XWINT (y, i)) | |
496 return 0; | |
497 break; | |
498 | |
499 case 'n': | |
500 case 'i': | |
501 if (XINT (x, i) != XINT (y, i)) | |
502 return 0; | |
503 break; | |
504 | |
505 case 'V': | |
506 case 'E': | |
507 /* Two vectors must have the same length. */ | |
508 if (XVECLEN (x, i) != XVECLEN (y, i)) | |
509 return 0; | |
510 | |
511 /* And the corresponding elements must match. */ | |
512 for (j = 0; j < XVECLEN (x, i); j++) | |
513 if (! rtx_equal_for_cselib_p (XVECEXP (x, i, j), | |
514 XVECEXP (y, i, j))) | |
515 return 0; | |
516 break; | |
517 | |
518 case 'e': | |
519 if (i == 1 | |
520 && targetm.commutative_p (x, UNKNOWN) | |
521 && rtx_equal_for_cselib_p (XEXP (x, 1), XEXP (y, 0)) | |
522 && rtx_equal_for_cselib_p (XEXP (x, 0), XEXP (y, 1))) | |
523 return 1; | |
524 if (! rtx_equal_for_cselib_p (XEXP (x, i), XEXP (y, i))) | |
525 return 0; | |
526 break; | |
527 | |
528 case 'S': | |
529 case 's': | |
530 if (strcmp (XSTR (x, i), XSTR (y, i))) | |
531 return 0; | |
532 break; | |
533 | |
534 case 'u': | |
535 /* These are just backpointers, so they don't matter. */ | |
536 break; | |
537 | |
538 case '0': | |
539 case 't': | |
540 break; | |
541 | |
542 /* It is believed that rtx's at this level will never | |
543 contain anything but integers and other rtx's, | |
544 except for within LABEL_REFs and SYMBOL_REFs. */ | |
545 default: | |
546 gcc_unreachable (); | |
547 } | |
548 } | |
549 return 1; | |
550 } | |
551 | |
552 /* We need to pass down the mode of constants through the hash table | |
553 functions. For that purpose, wrap them in a CONST of the appropriate | |
554 mode. */ | |
555 static rtx | |
556 wrap_constant (enum machine_mode mode, rtx x) | |
557 { | |
558 if (GET_CODE (x) != CONST_INT && GET_CODE (x) != CONST_FIXED | |
559 && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode)) | |
560 return x; | |
561 gcc_assert (mode != VOIDmode); | |
562 return gen_rtx_CONST (mode, x); | |
563 } | |
564 | |
565 /* Hash an rtx. Return 0 if we couldn't hash the rtx. | |
566 For registers and memory locations, we look up their cselib_val structure | |
567 and return its VALUE element. | |
568 Possible reasons for return 0 are: the object is volatile, or we couldn't | |
569 find a register or memory location in the table and CREATE is zero. If | |
570 CREATE is nonzero, table elts are created for regs and mem. | |
571 N.B. this hash function returns the same hash value for RTXes that | |
572 differ only in the order of operands, thus it is suitable for comparisons | |
573 that take commutativity into account. | |
574 If we wanted to also support associative rules, we'd have to use a different | |
575 strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) . | |
576 We used to have a MODE argument for hashing for CONST_INTs, but that | |
577 didn't make sense, since it caused spurious hash differences between | |
578 (set (reg:SI 1) (const_int)) | |
579 (plus:SI (reg:SI 2) (reg:SI 1)) | |
580 and | |
581 (plus:SI (reg:SI 2) (const_int)) | |
582 If the mode is important in any context, it must be checked specifically | |
583 in a comparison anyway, since relying on hash differences is unsafe. */ | |
584 | |
585 static unsigned int | |
586 cselib_hash_rtx (rtx x, int create) | |
587 { | |
588 cselib_val *e; | |
589 int i, j; | |
590 enum rtx_code code; | |
591 const char *fmt; | |
592 unsigned int hash = 0; | |
593 | |
594 code = GET_CODE (x); | |
595 hash += (unsigned) code + (unsigned) GET_MODE (x); | |
596 | |
597 switch (code) | |
598 { | |
599 case MEM: | |
600 case REG: | |
601 e = cselib_lookup (x, GET_MODE (x), create); | |
602 if (! e) | |
603 return 0; | |
604 | |
605 return e->value; | |
606 | |
607 case CONST_INT: | |
608 hash += ((unsigned) CONST_INT << 7) + INTVAL (x); | |
609 return hash ? hash : (unsigned int) CONST_INT; | |
610 | |
611 case CONST_DOUBLE: | |
612 /* This is like the general case, except that it only counts | |
613 the integers representing the constant. */ | |
614 hash += (unsigned) code + (unsigned) GET_MODE (x); | |
615 if (GET_MODE (x) != VOIDmode) | |
616 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x)); | |
617 else | |
618 hash += ((unsigned) CONST_DOUBLE_LOW (x) | |
619 + (unsigned) CONST_DOUBLE_HIGH (x)); | |
620 return hash ? hash : (unsigned int) CONST_DOUBLE; | |
621 | |
622 case CONST_FIXED: | |
623 hash += (unsigned int) code + (unsigned int) GET_MODE (x); | |
624 hash += fixed_hash (CONST_FIXED_VALUE (x)); | |
625 return hash ? hash : (unsigned int) CONST_FIXED; | |
626 | |
627 case CONST_VECTOR: | |
628 { | |
629 int units; | |
630 rtx elt; | |
631 | |
632 units = CONST_VECTOR_NUNITS (x); | |
633 | |
634 for (i = 0; i < units; ++i) | |
635 { | |
636 elt = CONST_VECTOR_ELT (x, i); | |
637 hash += cselib_hash_rtx (elt, 0); | |
638 } | |
639 | |
640 return hash; | |
641 } | |
642 | |
643 /* Assume there is only one rtx object for any given label. */ | |
644 case LABEL_REF: | |
645 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap | |
646 differences and differences between each stage's debugging dumps. */ | |
647 hash += (((unsigned int) LABEL_REF << 7) | |
648 + CODE_LABEL_NUMBER (XEXP (x, 0))); | |
649 return hash ? hash : (unsigned int) LABEL_REF; | |
650 | |
651 case SYMBOL_REF: | |
652 { | |
653 /* Don't hash on the symbol's address to avoid bootstrap differences. | |
654 Different hash values may cause expressions to be recorded in | |
655 different orders and thus different registers to be used in the | |
656 final assembler. This also avoids differences in the dump files | |
657 between various stages. */ | |
658 unsigned int h = 0; | |
659 const unsigned char *p = (const unsigned char *) XSTR (x, 0); | |
660 | |
661 while (*p) | |
662 h += (h << 7) + *p++; /* ??? revisit */ | |
663 | |
664 hash += ((unsigned int) SYMBOL_REF << 7) + h; | |
665 return hash ? hash : (unsigned int) SYMBOL_REF; | |
666 } | |
667 | |
668 case PRE_DEC: | |
669 case PRE_INC: | |
670 case POST_DEC: | |
671 case POST_INC: | |
672 case POST_MODIFY: | |
673 case PRE_MODIFY: | |
674 case PC: | |
675 case CC0: | |
676 case CALL: | |
677 case UNSPEC_VOLATILE: | |
678 return 0; | |
679 | |
680 case ASM_OPERANDS: | |
681 if (MEM_VOLATILE_P (x)) | |
682 return 0; | |
683 | |
684 break; | |
685 | |
686 default: | |
687 break; | |
688 } | |
689 | |
690 i = GET_RTX_LENGTH (code) - 1; | |
691 fmt = GET_RTX_FORMAT (code); | |
692 for (; i >= 0; i--) | |
693 { | |
694 switch (fmt[i]) | |
695 { | |
696 case 'e': | |
697 { | |
698 rtx tem = XEXP (x, i); | |
699 unsigned int tem_hash = cselib_hash_rtx (tem, create); | |
700 | |
701 if (tem_hash == 0) | |
702 return 0; | |
703 | |
704 hash += tem_hash; | |
705 } | |
706 break; | |
707 case 'E': | |
708 for (j = 0; j < XVECLEN (x, i); j++) | |
709 { | |
710 unsigned int tem_hash | |
711 = cselib_hash_rtx (XVECEXP (x, i, j), create); | |
712 | |
713 if (tem_hash == 0) | |
714 return 0; | |
715 | |
716 hash += tem_hash; | |
717 } | |
718 break; | |
719 | |
720 case 's': | |
721 { | |
722 const unsigned char *p = (const unsigned char *) XSTR (x, i); | |
723 | |
724 if (p) | |
725 while (*p) | |
726 hash += *p++; | |
727 break; | |
728 } | |
729 | |
730 case 'i': | |
731 hash += XINT (x, i); | |
732 break; | |
733 | |
734 case '0': | |
735 case 't': | |
736 /* unused */ | |
737 break; | |
738 | |
739 default: | |
740 gcc_unreachable (); | |
741 } | |
742 } | |
743 | |
744 return hash ? hash : 1 + (unsigned int) GET_CODE (x); | |
745 } | |
746 | |
747 /* Create a new value structure for VALUE and initialize it. The mode of the | |
748 value is MODE. */ | |
749 | |
750 static inline cselib_val * | |
751 new_cselib_val (unsigned int value, enum machine_mode mode) | |
752 { | |
753 cselib_val *e = (cselib_val *) pool_alloc (cselib_val_pool); | |
754 | |
755 gcc_assert (value); | |
756 | |
757 e->value = value; | |
758 /* We use an alloc pool to allocate this RTL construct because it | |
759 accounts for about 8% of the overall memory usage. We know | |
760 precisely when we can have VALUE RTXen (when cselib is active) | |
761 so we don't need to put them in garbage collected memory. | |
762 ??? Why should a VALUE be an RTX in the first place? */ | |
763 e->val_rtx = (rtx) pool_alloc (value_pool); | |
764 memset (e->val_rtx, 0, RTX_HDR_SIZE); | |
765 PUT_CODE (e->val_rtx, VALUE); | |
766 PUT_MODE (e->val_rtx, mode); | |
767 CSELIB_VAL_PTR (e->val_rtx) = e; | |
768 e->addr_list = 0; | |
769 e->locs = 0; | |
770 e->next_containing_mem = 0; | |
771 return e; | |
772 } | |
773 | |
774 /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that | |
775 contains the data at this address. X is a MEM that represents the | |
776 value. Update the two value structures to represent this situation. */ | |
777 | |
778 static void | |
779 add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x) | |
780 { | |
781 struct elt_loc_list *l; | |
782 | |
783 /* Avoid duplicates. */ | |
784 for (l = mem_elt->locs; l; l = l->next) | |
785 if (MEM_P (l->loc) | |
786 && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt) | |
787 return; | |
788 | |
789 addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt); | |
790 mem_elt->locs | |
791 = new_elt_loc_list (mem_elt->locs, | |
792 replace_equiv_address_nv (x, addr_elt->val_rtx)); | |
793 if (mem_elt->next_containing_mem == NULL) | |
794 { | |
795 mem_elt->next_containing_mem = first_containing_mem; | |
796 first_containing_mem = mem_elt; | |
797 } | |
798 } | |
799 | |
800 /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx. | |
801 If CREATE, make a new one if we haven't seen it before. */ | |
802 | |
803 static cselib_val * | |
804 cselib_lookup_mem (rtx x, int create) | |
805 { | |
806 enum machine_mode mode = GET_MODE (x); | |
807 void **slot; | |
808 cselib_val *addr; | |
809 cselib_val *mem_elt; | |
810 struct elt_list *l; | |
811 | |
812 if (MEM_VOLATILE_P (x) || mode == BLKmode | |
813 || !cselib_record_memory | |
814 || (FLOAT_MODE_P (mode) && flag_float_store)) | |
815 return 0; | |
816 | |
817 /* Look up the value for the address. */ | |
818 addr = cselib_lookup (XEXP (x, 0), mode, create); | |
819 if (! addr) | |
820 return 0; | |
821 | |
822 /* Find a value that describes a value of our mode at that address. */ | |
823 for (l = addr->addr_list; l; l = l->next) | |
824 if (GET_MODE (l->elt->val_rtx) == mode) | |
825 return l->elt; | |
826 | |
827 if (! create) | |
828 return 0; | |
829 | |
830 mem_elt = new_cselib_val (++next_unknown_value, mode); | |
831 add_mem_for_addr (addr, mem_elt, x); | |
832 slot = htab_find_slot_with_hash (cselib_hash_table, wrap_constant (mode, x), | |
833 mem_elt->value, INSERT); | |
834 *slot = mem_elt; | |
835 return mem_elt; | |
836 } | |
837 | |
838 /* Search thru the possible substitutions in P. We prefer a non reg | |
839 substitution because this allows us to expand the tree further. If | |
840 we find, just a reg, take the lowest regno. There may be several | |
841 non-reg results, we just take the first one because they will all | |
842 expand to the same place. */ | |
843 | |
844 static rtx | |
845 expand_loc (struct elt_loc_list *p, bitmap regs_active, int max_depth) | |
846 { | |
847 rtx reg_result = NULL; | |
848 unsigned int regno = UINT_MAX; | |
849 struct elt_loc_list *p_in = p; | |
850 | |
851 for (; p; p = p -> next) | |
852 { | |
853 /* Avoid infinite recursion trying to expand a reg into a | |
854 the same reg. */ | |
855 if ((REG_P (p->loc)) | |
856 && (REGNO (p->loc) < regno) | |
857 && !bitmap_bit_p (regs_active, REGNO (p->loc))) | |
858 { | |
859 reg_result = p->loc; | |
860 regno = REGNO (p->loc); | |
861 } | |
862 /* Avoid infinite recursion and do not try to expand the | |
863 value. */ | |
864 else if (GET_CODE (p->loc) == VALUE | |
865 && CSELIB_VAL_PTR (p->loc)->locs == p_in) | |
866 continue; | |
867 else if (!REG_P (p->loc)) | |
868 { | |
869 rtx result, note; | |
870 if (dump_file) | |
871 { | |
872 print_inline_rtx (dump_file, p->loc, 0); | |
873 fprintf (dump_file, "\n"); | |
874 } | |
875 if (GET_CODE (p->loc) == LO_SUM | |
876 && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF | |
877 && p->setting_insn | |
878 && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX)) | |
879 && XEXP (note, 0) == XEXP (p->loc, 1)) | |
880 return XEXP (p->loc, 1); | |
881 result = cselib_expand_value_rtx (p->loc, regs_active, max_depth - 1); | |
882 if (result) | |
883 return result; | |
884 } | |
885 | |
886 } | |
887 | |
888 if (regno != UINT_MAX) | |
889 { | |
890 rtx result; | |
891 if (dump_file) | |
892 fprintf (dump_file, "r%d\n", regno); | |
893 | |
894 result = cselib_expand_value_rtx (reg_result, regs_active, max_depth - 1); | |
895 if (result) | |
896 return result; | |
897 } | |
898 | |
899 if (dump_file) | |
900 { | |
901 if (reg_result) | |
902 { | |
903 print_inline_rtx (dump_file, reg_result, 0); | |
904 fprintf (dump_file, "\n"); | |
905 } | |
906 else | |
907 fprintf (dump_file, "NULL\n"); | |
908 } | |
909 return reg_result; | |
910 } | |
911 | |
912 | |
913 /* Forward substitute and expand an expression out to its roots. | |
914 This is the opposite of common subexpression. Because local value | |
915 numbering is such a weak optimization, the expanded expression is | |
916 pretty much unique (not from a pointer equals point of view but | |
917 from a tree shape point of view. | |
918 | |
919 This function returns NULL if the expansion fails. The expansion | |
920 will fail if there is no value number for one of the operands or if | |
921 one of the operands has been overwritten between the current insn | |
922 and the beginning of the basic block. For instance x has no | |
923 expansion in: | |
924 | |
925 r1 <- r1 + 3 | |
926 x <- r1 + 8 | |
927 | |
928 REGS_ACTIVE is a scratch bitmap that should be clear when passing in. | |
929 It is clear on return. */ | |
930 | |
931 rtx | |
932 cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth) | |
933 { | |
934 rtx copy, scopy; | |
935 int i, j; | |
936 RTX_CODE code; | |
937 const char *format_ptr; | |
938 enum machine_mode mode; | |
939 | |
940 code = GET_CODE (orig); | |
941 | |
942 /* For the context of dse, if we end up expand into a huge tree, we | |
943 will not have a useful address, so we might as well just give up | |
944 quickly. */ | |
945 if (max_depth <= 0) | |
946 return NULL; | |
947 | |
948 switch (code) | |
949 { | |
950 case REG: | |
951 { | |
952 struct elt_list *l = REG_VALUES (REGNO (orig)); | |
953 | |
954 if (l && l->elt == NULL) | |
955 l = l->next; | |
956 for (; l; l = l->next) | |
957 if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig)) | |
958 { | |
959 rtx result; | |
960 int regno = REGNO (orig); | |
961 | |
962 /* The only thing that we are not willing to do (this | |
963 is requirement of dse and if others potential uses | |
964 need this function we should add a parm to control | |
965 it) is that we will not substitute the | |
966 STACK_POINTER_REGNUM, FRAME_POINTER or the | |
967 HARD_FRAME_POINTER. | |
968 | |
969 These expansions confuses the code that notices that | |
970 stores into the frame go dead at the end of the | |
971 function and that the frame is not effected by calls | |
972 to subroutines. If you allow the | |
973 STACK_POINTER_REGNUM substitution, then dse will | |
974 think that parameter pushing also goes dead which is | |
975 wrong. If you allow the FRAME_POINTER or the | |
976 HARD_FRAME_POINTER then you lose the opportunity to | |
977 make the frame assumptions. */ | |
978 if (regno == STACK_POINTER_REGNUM | |
979 || regno == FRAME_POINTER_REGNUM | |
980 || regno == HARD_FRAME_POINTER_REGNUM) | |
981 return orig; | |
982 | |
983 bitmap_set_bit (regs_active, regno); | |
984 | |
985 if (dump_file) | |
986 fprintf (dump_file, "expanding: r%d into: ", regno); | |
987 | |
988 result = expand_loc (l->elt->locs, regs_active, max_depth); | |
989 bitmap_clear_bit (regs_active, regno); | |
990 | |
991 if (result) | |
992 return result; | |
993 else | |
994 return orig; | |
995 } | |
996 } | |
997 | |
998 case CONST_INT: | |
999 case CONST_DOUBLE: | |
1000 case CONST_VECTOR: | |
1001 case SYMBOL_REF: | |
1002 case CODE_LABEL: | |
1003 case PC: | |
1004 case CC0: | |
1005 case SCRATCH: | |
1006 /* SCRATCH must be shared because they represent distinct values. */ | |
1007 return orig; | |
1008 case CLOBBER: | |
1009 if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0)))) | |
1010 return orig; | |
1011 break; | |
1012 | |
1013 case CONST: | |
1014 if (shared_const_p (orig)) | |
1015 return orig; | |
1016 break; | |
1017 | |
1018 case SUBREG: | |
1019 { | |
1020 rtx subreg = cselib_expand_value_rtx (SUBREG_REG (orig), regs_active, | |
1021 max_depth - 1); | |
1022 if (!subreg) | |
1023 return NULL; | |
1024 scopy = simplify_gen_subreg (GET_MODE (orig), subreg, | |
1025 GET_MODE (SUBREG_REG (orig)), | |
1026 SUBREG_BYTE (orig)); | |
1027 if (scopy == NULL | |
1028 || (GET_CODE (scopy) == SUBREG | |
1029 && !REG_P (SUBREG_REG (scopy)) | |
1030 && !MEM_P (SUBREG_REG (scopy)))) | |
1031 return shallow_copy_rtx (orig); | |
1032 return scopy; | |
1033 } | |
1034 | |
1035 case VALUE: | |
1036 if (dump_file) | |
1037 fprintf (dump_file, "expanding value %s into: ", | |
1038 GET_MODE_NAME (GET_MODE (orig))); | |
1039 | |
1040 return expand_loc (CSELIB_VAL_PTR (orig)->locs, regs_active, max_depth); | |
1041 | |
1042 default: | |
1043 break; | |
1044 } | |
1045 | |
1046 /* Copy the various flags, fields, and other information. We assume | |
1047 that all fields need copying, and then clear the fields that should | |
1048 not be copied. That is the sensible default behavior, and forces | |
1049 us to explicitly document why we are *not* copying a flag. */ | |
1050 copy = shallow_copy_rtx (orig); | |
1051 | |
1052 format_ptr = GET_RTX_FORMAT (code); | |
1053 | |
1054 for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
1055 switch (*format_ptr++) | |
1056 { | |
1057 case 'e': | |
1058 if (XEXP (orig, i) != NULL) | |
1059 { | |
1060 rtx result = cselib_expand_value_rtx (XEXP (orig, i), regs_active, max_depth - 1); | |
1061 if (!result) | |
1062 return NULL; | |
1063 XEXP (copy, i) = result; | |
1064 } | |
1065 break; | |
1066 | |
1067 case 'E': | |
1068 case 'V': | |
1069 if (XVEC (orig, i) != NULL) | |
1070 { | |
1071 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i)); | |
1072 for (j = 0; j < XVECLEN (copy, i); j++) | |
1073 { | |
1074 rtx result = cselib_expand_value_rtx (XVECEXP (orig, i, j), regs_active, max_depth - 1); | |
1075 if (!result) | |
1076 return NULL; | |
1077 XVECEXP (copy, i, j) = result; | |
1078 } | |
1079 } | |
1080 break; | |
1081 | |
1082 case 't': | |
1083 case 'w': | |
1084 case 'i': | |
1085 case 's': | |
1086 case 'S': | |
1087 case 'T': | |
1088 case 'u': | |
1089 case 'B': | |
1090 case '0': | |
1091 /* These are left unchanged. */ | |
1092 break; | |
1093 | |
1094 default: | |
1095 gcc_unreachable (); | |
1096 } | |
1097 | |
1098 mode = GET_MODE (copy); | |
1099 /* If an operand has been simplified into CONST_INT, which doesn't | |
1100 have a mode and the mode isn't derivable from whole rtx's mode, | |
1101 try simplify_*_operation first with mode from original's operand | |
1102 and as a fallback wrap CONST_INT into gen_rtx_CONST. */ | |
1103 scopy = copy; | |
1104 switch (GET_RTX_CLASS (code)) | |
1105 { | |
1106 case RTX_UNARY: | |
1107 if (CONST_INT_P (XEXP (copy, 0)) | |
1108 && GET_MODE (XEXP (orig, 0)) != VOIDmode) | |
1109 { | |
1110 scopy = simplify_unary_operation (code, mode, XEXP (copy, 0), | |
1111 GET_MODE (XEXP (orig, 0))); | |
1112 if (scopy) | |
1113 return scopy; | |
1114 } | |
1115 break; | |
1116 case RTX_COMM_ARITH: | |
1117 case RTX_BIN_ARITH: | |
1118 /* These expressions can derive operand modes from the whole rtx's mode. */ | |
1119 break; | |
1120 case RTX_TERNARY: | |
1121 case RTX_BITFIELD_OPS: | |
1122 if (CONST_INT_P (XEXP (copy, 0)) | |
1123 && GET_MODE (XEXP (orig, 0)) != VOIDmode) | |
1124 { | |
1125 scopy = simplify_ternary_operation (code, mode, | |
1126 GET_MODE (XEXP (orig, 0)), | |
1127 XEXP (copy, 0), XEXP (copy, 1), | |
1128 XEXP (copy, 2)); | |
1129 if (scopy) | |
1130 return scopy; | |
1131 } | |
1132 break; | |
1133 case RTX_COMPARE: | |
1134 case RTX_COMM_COMPARE: | |
1135 if (CONST_INT_P (XEXP (copy, 0)) | |
1136 && GET_MODE (XEXP (copy, 1)) == VOIDmode | |
1137 && (GET_MODE (XEXP (orig, 0)) != VOIDmode | |
1138 || GET_MODE (XEXP (orig, 1)) != VOIDmode)) | |
1139 { | |
1140 scopy = simplify_relational_operation (code, mode, | |
1141 (GET_MODE (XEXP (orig, 0)) | |
1142 != VOIDmode) | |
1143 ? GET_MODE (XEXP (orig, 0)) | |
1144 : GET_MODE (XEXP (orig, 1)), | |
1145 XEXP (copy, 0), | |
1146 XEXP (copy, 1)); | |
1147 if (scopy) | |
1148 return scopy; | |
1149 } | |
1150 break; | |
1151 default: | |
1152 break; | |
1153 } | |
1154 if (scopy == NULL_RTX) | |
1155 { | |
1156 XEXP (copy, 0) | |
1157 = gen_rtx_CONST (GET_MODE (XEXP (orig, 0)), XEXP (copy, 0)); | |
1158 if (dump_file) | |
1159 fprintf (dump_file, " wrapping const_int result in const to preserve mode %s\n", | |
1160 GET_MODE_NAME (GET_MODE (XEXP (copy, 0)))); | |
1161 } | |
1162 scopy = simplify_rtx (copy); | |
1163 if (scopy) | |
1164 return scopy; | |
1165 return copy; | |
1166 } | |
1167 | |
1168 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions | |
1169 with VALUE expressions. This way, it becomes independent of changes | |
1170 to registers and memory. | |
1171 X isn't actually modified; if modifications are needed, new rtl is | |
1172 allocated. However, the return value can share rtl with X. */ | |
1173 | |
1174 rtx | |
1175 cselib_subst_to_values (rtx x) | |
1176 { | |
1177 enum rtx_code code = GET_CODE (x); | |
1178 const char *fmt = GET_RTX_FORMAT (code); | |
1179 cselib_val *e; | |
1180 struct elt_list *l; | |
1181 rtx copy = x; | |
1182 int i; | |
1183 | |
1184 switch (code) | |
1185 { | |
1186 case REG: | |
1187 l = REG_VALUES (REGNO (x)); | |
1188 if (l && l->elt == NULL) | |
1189 l = l->next; | |
1190 for (; l; l = l->next) | |
1191 if (GET_MODE (l->elt->val_rtx) == GET_MODE (x)) | |
1192 return l->elt->val_rtx; | |
1193 | |
1194 gcc_unreachable (); | |
1195 | |
1196 case MEM: | |
1197 e = cselib_lookup_mem (x, 0); | |
1198 if (! e) | |
1199 { | |
1200 /* This happens for autoincrements. Assign a value that doesn't | |
1201 match any other. */ | |
1202 e = new_cselib_val (++next_unknown_value, GET_MODE (x)); | |
1203 } | |
1204 return e->val_rtx; | |
1205 | |
1206 case CONST_DOUBLE: | |
1207 case CONST_VECTOR: | |
1208 case CONST_INT: | |
1209 case CONST_FIXED: | |
1210 return x; | |
1211 | |
1212 case POST_INC: | |
1213 case PRE_INC: | |
1214 case POST_DEC: | |
1215 case PRE_DEC: | |
1216 case POST_MODIFY: | |
1217 case PRE_MODIFY: | |
1218 e = new_cselib_val (++next_unknown_value, GET_MODE (x)); | |
1219 return e->val_rtx; | |
1220 | |
1221 default: | |
1222 break; | |
1223 } | |
1224 | |
1225 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) | |
1226 { | |
1227 if (fmt[i] == 'e') | |
1228 { | |
1229 rtx t = cselib_subst_to_values (XEXP (x, i)); | |
1230 | |
1231 if (t != XEXP (x, i) && x == copy) | |
1232 copy = shallow_copy_rtx (x); | |
1233 | |
1234 XEXP (copy, i) = t; | |
1235 } | |
1236 else if (fmt[i] == 'E') | |
1237 { | |
1238 int j, k; | |
1239 | |
1240 for (j = 0; j < XVECLEN (x, i); j++) | |
1241 { | |
1242 rtx t = cselib_subst_to_values (XVECEXP (x, i, j)); | |
1243 | |
1244 if (t != XVECEXP (x, i, j) && XVEC (x, i) == XVEC (copy, i)) | |
1245 { | |
1246 if (x == copy) | |
1247 copy = shallow_copy_rtx (x); | |
1248 | |
1249 XVEC (copy, i) = rtvec_alloc (XVECLEN (x, i)); | |
1250 for (k = 0; k < j; k++) | |
1251 XVECEXP (copy, i, k) = XVECEXP (x, i, k); | |
1252 } | |
1253 | |
1254 XVECEXP (copy, i, j) = t; | |
1255 } | |
1256 } | |
1257 } | |
1258 | |
1259 return copy; | |
1260 } | |
1261 | |
1262 /* Look up the rtl expression X in our tables and return the value it has. | |
1263 If CREATE is zero, we return NULL if we don't know the value. Otherwise, | |
1264 we create a new one if possible, using mode MODE if X doesn't have a mode | |
1265 (i.e. because it's a constant). */ | |
1266 | |
1267 cselib_val * | |
1268 cselib_lookup (rtx x, enum machine_mode mode, int create) | |
1269 { | |
1270 void **slot; | |
1271 cselib_val *e; | |
1272 unsigned int hashval; | |
1273 | |
1274 if (GET_MODE (x) != VOIDmode) | |
1275 mode = GET_MODE (x); | |
1276 | |
1277 if (GET_CODE (x) == VALUE) | |
1278 return CSELIB_VAL_PTR (x); | |
1279 | |
1280 if (REG_P (x)) | |
1281 { | |
1282 struct elt_list *l; | |
1283 unsigned int i = REGNO (x); | |
1284 | |
1285 l = REG_VALUES (i); | |
1286 if (l && l->elt == NULL) | |
1287 l = l->next; | |
1288 for (; l; l = l->next) | |
1289 if (mode == GET_MODE (l->elt->val_rtx)) | |
1290 return l->elt; | |
1291 | |
1292 if (! create) | |
1293 return 0; | |
1294 | |
1295 if (i < FIRST_PSEUDO_REGISTER) | |
1296 { | |
1297 unsigned int n = hard_regno_nregs[i][mode]; | |
1298 | |
1299 if (n > max_value_regs) | |
1300 max_value_regs = n; | |
1301 } | |
1302 | |
1303 e = new_cselib_val (++next_unknown_value, GET_MODE (x)); | |
1304 e->locs = new_elt_loc_list (e->locs, x); | |
1305 if (REG_VALUES (i) == 0) | |
1306 { | |
1307 /* Maintain the invariant that the first entry of | |
1308 REG_VALUES, if present, must be the value used to set the | |
1309 register, or NULL. */ | |
1310 used_regs[n_used_regs++] = i; | |
1311 REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL); | |
1312 } | |
1313 REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e); | |
1314 slot = htab_find_slot_with_hash (cselib_hash_table, x, e->value, INSERT); | |
1315 *slot = e; | |
1316 return e; | |
1317 } | |
1318 | |
1319 if (MEM_P (x)) | |
1320 return cselib_lookup_mem (x, create); | |
1321 | |
1322 hashval = cselib_hash_rtx (x, create); | |
1323 /* Can't even create if hashing is not possible. */ | |
1324 if (! hashval) | |
1325 return 0; | |
1326 | |
1327 slot = htab_find_slot_with_hash (cselib_hash_table, wrap_constant (mode, x), | |
1328 hashval, create ? INSERT : NO_INSERT); | |
1329 if (slot == 0) | |
1330 return 0; | |
1331 | |
1332 e = (cselib_val *) *slot; | |
1333 if (e) | |
1334 return e; | |
1335 | |
1336 e = new_cselib_val (hashval, mode); | |
1337 | |
1338 /* We have to fill the slot before calling cselib_subst_to_values: | |
1339 the hash table is inconsistent until we do so, and | |
1340 cselib_subst_to_values will need to do lookups. */ | |
1341 *slot = (void *) e; | |
1342 e->locs = new_elt_loc_list (e->locs, cselib_subst_to_values (x)); | |
1343 return e; | |
1344 } | |
1345 | |
1346 /* Invalidate any entries in reg_values that overlap REGNO. This is called | |
1347 if REGNO is changing. MODE is the mode of the assignment to REGNO, which | |
1348 is used to determine how many hard registers are being changed. If MODE | |
1349 is VOIDmode, then only REGNO is being changed; this is used when | |
1350 invalidating call clobbered registers across a call. */ | |
1351 | |
1352 static void | |
1353 cselib_invalidate_regno (unsigned int regno, enum machine_mode mode) | |
1354 { | |
1355 unsigned int endregno; | |
1356 unsigned int i; | |
1357 | |
1358 /* If we see pseudos after reload, something is _wrong_. */ | |
1359 gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER | |
1360 || reg_renumber[regno] < 0); | |
1361 | |
1362 /* Determine the range of registers that must be invalidated. For | |
1363 pseudos, only REGNO is affected. For hard regs, we must take MODE | |
1364 into account, and we must also invalidate lower register numbers | |
1365 if they contain values that overlap REGNO. */ | |
1366 if (regno < FIRST_PSEUDO_REGISTER) | |
1367 { | |
1368 gcc_assert (mode != VOIDmode); | |
1369 | |
1370 if (regno < max_value_regs) | |
1371 i = 0; | |
1372 else | |
1373 i = regno - max_value_regs; | |
1374 | |
1375 endregno = end_hard_regno (mode, regno); | |
1376 } | |
1377 else | |
1378 { | |
1379 i = regno; | |
1380 endregno = regno + 1; | |
1381 } | |
1382 | |
1383 for (; i < endregno; i++) | |
1384 { | |
1385 struct elt_list **l = ®_VALUES (i); | |
1386 | |
1387 /* Go through all known values for this reg; if it overlaps the range | |
1388 we're invalidating, remove the value. */ | |
1389 while (*l) | |
1390 { | |
1391 cselib_val *v = (*l)->elt; | |
1392 struct elt_loc_list **p; | |
1393 unsigned int this_last = i; | |
1394 | |
1395 if (i < FIRST_PSEUDO_REGISTER && v != NULL) | |
1396 this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1; | |
1397 | |
1398 if (this_last < regno || v == NULL) | |
1399 { | |
1400 l = &(*l)->next; | |
1401 continue; | |
1402 } | |
1403 | |
1404 /* We have an overlap. */ | |
1405 if (*l == REG_VALUES (i)) | |
1406 { | |
1407 /* Maintain the invariant that the first entry of | |
1408 REG_VALUES, if present, must be the value used to set | |
1409 the register, or NULL. This is also nice because | |
1410 then we won't push the same regno onto user_regs | |
1411 multiple times. */ | |
1412 (*l)->elt = NULL; | |
1413 l = &(*l)->next; | |
1414 } | |
1415 else | |
1416 unchain_one_elt_list (l); | |
1417 | |
1418 /* Now, we clear the mapping from value to reg. It must exist, so | |
1419 this code will crash intentionally if it doesn't. */ | |
1420 for (p = &v->locs; ; p = &(*p)->next) | |
1421 { | |
1422 rtx x = (*p)->loc; | |
1423 | |
1424 if (REG_P (x) && REGNO (x) == i) | |
1425 { | |
1426 unchain_one_elt_loc_list (p); | |
1427 break; | |
1428 } | |
1429 } | |
1430 if (v->locs == 0) | |
1431 n_useless_values++; | |
1432 } | |
1433 } | |
1434 } | |
1435 | |
1436 /* Return 1 if X has a value that can vary even between two | |
1437 executions of the program. 0 means X can be compared reliably | |
1438 against certain constants or near-constants. */ | |
1439 | |
1440 static bool | |
1441 cselib_rtx_varies_p (const_rtx x ATTRIBUTE_UNUSED, bool from_alias ATTRIBUTE_UNUSED) | |
1442 { | |
1443 /* We actually don't need to verify very hard. This is because | |
1444 if X has actually changed, we invalidate the memory anyway, | |
1445 so assume that all common memory addresses are | |
1446 invariant. */ | |
1447 return 0; | |
1448 } | |
1449 | |
1450 /* Invalidate any locations in the table which are changed because of a | |
1451 store to MEM_RTX. If this is called because of a non-const call | |
1452 instruction, MEM_RTX is (mem:BLK const0_rtx). */ | |
1453 | |
1454 static void | |
1455 cselib_invalidate_mem (rtx mem_rtx) | |
1456 { | |
1457 cselib_val **vp, *v, *next; | |
1458 int num_mems = 0; | |
1459 rtx mem_addr; | |
1460 | |
1461 mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0))); | |
1462 mem_rtx = canon_rtx (mem_rtx); | |
1463 | |
1464 vp = &first_containing_mem; | |
1465 for (v = *vp; v != &dummy_val; v = next) | |
1466 { | |
1467 bool has_mem = false; | |
1468 struct elt_loc_list **p = &v->locs; | |
1469 int had_locs = v->locs != 0; | |
1470 | |
1471 while (*p) | |
1472 { | |
1473 rtx x = (*p)->loc; | |
1474 cselib_val *addr; | |
1475 struct elt_list **mem_chain; | |
1476 | |
1477 /* MEMs may occur in locations only at the top level; below | |
1478 that every MEM or REG is substituted by its VALUE. */ | |
1479 if (!MEM_P (x)) | |
1480 { | |
1481 p = &(*p)->next; | |
1482 continue; | |
1483 } | |
1484 if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS) | |
1485 && ! canon_true_dependence (mem_rtx, GET_MODE (mem_rtx), mem_addr, | |
1486 x, cselib_rtx_varies_p)) | |
1487 { | |
1488 has_mem = true; | |
1489 num_mems++; | |
1490 p = &(*p)->next; | |
1491 continue; | |
1492 } | |
1493 | |
1494 /* This one overlaps. */ | |
1495 /* We must have a mapping from this MEM's address to the | |
1496 value (E). Remove that, too. */ | |
1497 addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0); | |
1498 mem_chain = &addr->addr_list; | |
1499 for (;;) | |
1500 { | |
1501 if ((*mem_chain)->elt == v) | |
1502 { | |
1503 unchain_one_elt_list (mem_chain); | |
1504 break; | |
1505 } | |
1506 | |
1507 mem_chain = &(*mem_chain)->next; | |
1508 } | |
1509 | |
1510 unchain_one_elt_loc_list (p); | |
1511 } | |
1512 | |
1513 if (had_locs && v->locs == 0) | |
1514 n_useless_values++; | |
1515 | |
1516 next = v->next_containing_mem; | |
1517 if (has_mem) | |
1518 { | |
1519 *vp = v; | |
1520 vp = &(*vp)->next_containing_mem; | |
1521 } | |
1522 else | |
1523 v->next_containing_mem = NULL; | |
1524 } | |
1525 *vp = &dummy_val; | |
1526 } | |
1527 | |
1528 /* Invalidate DEST, which is being assigned to or clobbered. */ | |
1529 | |
1530 void | |
1531 cselib_invalidate_rtx (rtx dest) | |
1532 { | |
1533 while (GET_CODE (dest) == SUBREG | |
1534 || GET_CODE (dest) == ZERO_EXTRACT | |
1535 || GET_CODE (dest) == STRICT_LOW_PART) | |
1536 dest = XEXP (dest, 0); | |
1537 | |
1538 if (REG_P (dest)) | |
1539 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest)); | |
1540 else if (MEM_P (dest)) | |
1541 cselib_invalidate_mem (dest); | |
1542 | |
1543 /* Some machines don't define AUTO_INC_DEC, but they still use push | |
1544 instructions. We need to catch that case here in order to | |
1545 invalidate the stack pointer correctly. Note that invalidating | |
1546 the stack pointer is different from invalidating DEST. */ | |
1547 if (push_operand (dest, GET_MODE (dest))) | |
1548 cselib_invalidate_rtx (stack_pointer_rtx); | |
1549 } | |
1550 | |
1551 /* A wrapper for cselib_invalidate_rtx to be called via note_stores. */ | |
1552 | |
1553 static void | |
1554 cselib_invalidate_rtx_note_stores (rtx dest, const_rtx ignore ATTRIBUTE_UNUSED, | |
1555 void *data ATTRIBUTE_UNUSED) | |
1556 { | |
1557 cselib_invalidate_rtx (dest); | |
1558 } | |
1559 | |
1560 /* Record the result of a SET instruction. DEST is being set; the source | |
1561 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT | |
1562 describes its address. */ | |
1563 | |
1564 static void | |
1565 cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt) | |
1566 { | |
1567 int dreg = REG_P (dest) ? (int) REGNO (dest) : -1; | |
1568 | |
1569 if (src_elt == 0 || side_effects_p (dest)) | |
1570 return; | |
1571 | |
1572 if (dreg >= 0) | |
1573 { | |
1574 if (dreg < FIRST_PSEUDO_REGISTER) | |
1575 { | |
1576 unsigned int n = hard_regno_nregs[dreg][GET_MODE (dest)]; | |
1577 | |
1578 if (n > max_value_regs) | |
1579 max_value_regs = n; | |
1580 } | |
1581 | |
1582 if (REG_VALUES (dreg) == 0) | |
1583 { | |
1584 used_regs[n_used_regs++] = dreg; | |
1585 REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt); | |
1586 } | |
1587 else | |
1588 { | |
1589 /* The register should have been invalidated. */ | |
1590 gcc_assert (REG_VALUES (dreg)->elt == 0); | |
1591 REG_VALUES (dreg)->elt = src_elt; | |
1592 } | |
1593 | |
1594 if (src_elt->locs == 0) | |
1595 n_useless_values--; | |
1596 src_elt->locs = new_elt_loc_list (src_elt->locs, dest); | |
1597 } | |
1598 else if (MEM_P (dest) && dest_addr_elt != 0 | |
1599 && cselib_record_memory) | |
1600 { | |
1601 if (src_elt->locs == 0) | |
1602 n_useless_values--; | |
1603 add_mem_for_addr (dest_addr_elt, src_elt, dest); | |
1604 } | |
1605 } | |
1606 | |
1607 /* Describe a single set that is part of an insn. */ | |
1608 struct set | |
1609 { | |
1610 rtx src; | |
1611 rtx dest; | |
1612 cselib_val *src_elt; | |
1613 cselib_val *dest_addr_elt; | |
1614 }; | |
1615 | |
1616 /* There is no good way to determine how many elements there can be | |
1617 in a PARALLEL. Since it's fairly cheap, use a really large number. */ | |
1618 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2) | |
1619 | |
1620 /* Record the effects of any sets in INSN. */ | |
1621 static void | |
1622 cselib_record_sets (rtx insn) | |
1623 { | |
1624 int n_sets = 0; | |
1625 int i; | |
1626 struct set sets[MAX_SETS]; | |
1627 rtx body = PATTERN (insn); | |
1628 rtx cond = 0; | |
1629 | |
1630 body = PATTERN (insn); | |
1631 if (GET_CODE (body) == COND_EXEC) | |
1632 { | |
1633 cond = COND_EXEC_TEST (body); | |
1634 body = COND_EXEC_CODE (body); | |
1635 } | |
1636 | |
1637 /* Find all sets. */ | |
1638 if (GET_CODE (body) == SET) | |
1639 { | |
1640 sets[0].src = SET_SRC (body); | |
1641 sets[0].dest = SET_DEST (body); | |
1642 n_sets = 1; | |
1643 } | |
1644 else if (GET_CODE (body) == PARALLEL) | |
1645 { | |
1646 /* Look through the PARALLEL and record the values being | |
1647 set, if possible. Also handle any CLOBBERs. */ | |
1648 for (i = XVECLEN (body, 0) - 1; i >= 0; --i) | |
1649 { | |
1650 rtx x = XVECEXP (body, 0, i); | |
1651 | |
1652 if (GET_CODE (x) == SET) | |
1653 { | |
1654 sets[n_sets].src = SET_SRC (x); | |
1655 sets[n_sets].dest = SET_DEST (x); | |
1656 n_sets++; | |
1657 } | |
1658 } | |
1659 } | |
1660 | |
1661 if (n_sets == 1 | |
1662 && MEM_P (sets[0].src) | |
1663 && !cselib_record_memory | |
1664 && MEM_READONLY_P (sets[0].src)) | |
1665 { | |
1666 rtx note = find_reg_equal_equiv_note (insn); | |
1667 | |
1668 if (note && CONSTANT_P (XEXP (note, 0))) | |
1669 sets[0].src = XEXP (note, 0); | |
1670 } | |
1671 | |
1672 /* Look up the values that are read. Do this before invalidating the | |
1673 locations that are written. */ | |
1674 for (i = 0; i < n_sets; i++) | |
1675 { | |
1676 rtx dest = sets[i].dest; | |
1677 | |
1678 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for | |
1679 the low part after invalidating any knowledge about larger modes. */ | |
1680 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART) | |
1681 sets[i].dest = dest = XEXP (dest, 0); | |
1682 | |
1683 /* We don't know how to record anything but REG or MEM. */ | |
1684 if (REG_P (dest) | |
1685 || (MEM_P (dest) && cselib_record_memory)) | |
1686 { | |
1687 rtx src = sets[i].src; | |
1688 if (cond) | |
1689 src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest); | |
1690 sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1); | |
1691 if (MEM_P (dest)) | |
1692 sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0), Pmode, 1); | |
1693 else | |
1694 sets[i].dest_addr_elt = 0; | |
1695 } | |
1696 } | |
1697 | |
1698 /* Invalidate all locations written by this insn. Note that the elts we | |
1699 looked up in the previous loop aren't affected, just some of their | |
1700 locations may go away. */ | |
1701 note_stores (body, cselib_invalidate_rtx_note_stores, NULL); | |
1702 | |
1703 /* If this is an asm, look for duplicate sets. This can happen when the | |
1704 user uses the same value as an output multiple times. This is valid | |
1705 if the outputs are not actually used thereafter. Treat this case as | |
1706 if the value isn't actually set. We do this by smashing the destination | |
1707 to pc_rtx, so that we won't record the value later. */ | |
1708 if (n_sets >= 2 && asm_noperands (body) >= 0) | |
1709 { | |
1710 for (i = 0; i < n_sets; i++) | |
1711 { | |
1712 rtx dest = sets[i].dest; | |
1713 if (REG_P (dest) || MEM_P (dest)) | |
1714 { | |
1715 int j; | |
1716 for (j = i + 1; j < n_sets; j++) | |
1717 if (rtx_equal_p (dest, sets[j].dest)) | |
1718 { | |
1719 sets[i].dest = pc_rtx; | |
1720 sets[j].dest = pc_rtx; | |
1721 } | |
1722 } | |
1723 } | |
1724 } | |
1725 | |
1726 /* Now enter the equivalences in our tables. */ | |
1727 for (i = 0; i < n_sets; i++) | |
1728 { | |
1729 rtx dest = sets[i].dest; | |
1730 if (REG_P (dest) | |
1731 || (MEM_P (dest) && cselib_record_memory)) | |
1732 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt); | |
1733 } | |
1734 } | |
1735 | |
1736 /* Record the effects of INSN. */ | |
1737 | |
1738 void | |
1739 cselib_process_insn (rtx insn) | |
1740 { | |
1741 int i; | |
1742 rtx x; | |
1743 | |
1744 cselib_current_insn = insn; | |
1745 | |
1746 /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp. */ | |
1747 if (LABEL_P (insn) | |
1748 || (CALL_P (insn) | |
1749 && find_reg_note (insn, REG_SETJMP, NULL)) | |
1750 || (NONJUMP_INSN_P (insn) | |
1751 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS | |
1752 && MEM_VOLATILE_P (PATTERN (insn)))) | |
1753 { | |
1754 cselib_clear_table (); | |
1755 return; | |
1756 } | |
1757 | |
1758 if (! INSN_P (insn)) | |
1759 { | |
1760 cselib_current_insn = 0; | |
1761 return; | |
1762 } | |
1763 | |
1764 /* If this is a call instruction, forget anything stored in a | |
1765 call clobbered register, or, if this is not a const call, in | |
1766 memory. */ | |
1767 if (CALL_P (insn)) | |
1768 { | |
1769 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
1770 if (call_used_regs[i] | |
1771 || (REG_VALUES (i) && REG_VALUES (i)->elt | |
1772 && HARD_REGNO_CALL_PART_CLOBBERED (i, | |
1773 GET_MODE (REG_VALUES (i)->elt->val_rtx)))) | |
1774 cselib_invalidate_regno (i, reg_raw_mode[i]); | |
1775 | |
1776 /* Since it is not clear how cselib is going to be used, be | |
1777 conservative here and treat looping pure or const functions | |
1778 as if they were regular functions. */ | |
1779 if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn) | |
1780 || !(RTL_CONST_OR_PURE_CALL_P (insn))) | |
1781 cselib_invalidate_mem (callmem); | |
1782 } | |
1783 | |
1784 cselib_record_sets (insn); | |
1785 | |
1786 #ifdef AUTO_INC_DEC | |
1787 /* Clobber any registers which appear in REG_INC notes. We | |
1788 could keep track of the changes to their values, but it is | |
1789 unlikely to help. */ | |
1790 for (x = REG_NOTES (insn); x; x = XEXP (x, 1)) | |
1791 if (REG_NOTE_KIND (x) == REG_INC) | |
1792 cselib_invalidate_rtx (XEXP (x, 0)); | |
1793 #endif | |
1794 | |
1795 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only | |
1796 after we have processed the insn. */ | |
1797 if (CALL_P (insn)) | |
1798 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1)) | |
1799 if (GET_CODE (XEXP (x, 0)) == CLOBBER) | |
1800 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0)); | |
1801 | |
1802 cselib_current_insn = 0; | |
1803 | |
1804 if (n_useless_values > MAX_USELESS_VALUES | |
1805 /* remove_useless_values is linear in the hash table size. Avoid | |
1806 quadratic behavior for very large hashtables with very few | |
1807 useless elements. */ | |
1808 && (unsigned int)n_useless_values > cselib_hash_table->n_elements / 4) | |
1809 remove_useless_values (); | |
1810 } | |
1811 | |
1812 /* Initialize cselib for one pass. The caller must also call | |
1813 init_alias_analysis. */ | |
1814 | |
1815 void | |
1816 cselib_init (bool record_memory) | |
1817 { | |
1818 elt_list_pool = create_alloc_pool ("elt_list", | |
1819 sizeof (struct elt_list), 10); | |
1820 elt_loc_list_pool = create_alloc_pool ("elt_loc_list", | |
1821 sizeof (struct elt_loc_list), 10); | |
1822 cselib_val_pool = create_alloc_pool ("cselib_val_list", | |
1823 sizeof (cselib_val), 10); | |
1824 value_pool = create_alloc_pool ("value", RTX_CODE_SIZE (VALUE), 100); | |
1825 cselib_record_memory = record_memory; | |
1826 | |
1827 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything, | |
1828 see canon_true_dependence. This is only created once. */ | |
1829 if (! callmem) | |
1830 callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)); | |
1831 | |
1832 cselib_nregs = max_reg_num (); | |
1833 | |
1834 /* We preserve reg_values to allow expensive clearing of the whole thing. | |
1835 Reallocate it however if it happens to be too large. */ | |
1836 if (!reg_values || reg_values_size < cselib_nregs | |
1837 || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4)) | |
1838 { | |
1839 if (reg_values) | |
1840 free (reg_values); | |
1841 /* Some space for newly emit instructions so we don't end up | |
1842 reallocating in between passes. */ | |
1843 reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16; | |
1844 reg_values = XCNEWVEC (struct elt_list *, reg_values_size); | |
1845 } | |
1846 used_regs = XNEWVEC (unsigned int, cselib_nregs); | |
1847 n_used_regs = 0; | |
1848 cselib_hash_table = htab_create (31, get_value_hash, | |
1849 entry_and_rtx_equal_p, NULL); | |
1850 } | |
1851 | |
1852 /* Called when the current user is done with cselib. */ | |
1853 | |
1854 void | |
1855 cselib_finish (void) | |
1856 { | |
1857 cselib_discard_hook = NULL; | |
1858 free_alloc_pool (elt_list_pool); | |
1859 free_alloc_pool (elt_loc_list_pool); | |
1860 free_alloc_pool (cselib_val_pool); | |
1861 free_alloc_pool (value_pool); | |
1862 cselib_clear_table (); | |
1863 htab_delete (cselib_hash_table); | |
1864 free (used_regs); | |
1865 used_regs = 0; | |
1866 cselib_hash_table = 0; | |
1867 n_useless_values = 0; | |
1868 next_unknown_value = 0; | |
1869 } | |
1870 | |
1871 #include "gt-cselib.h" |