Mercurial > hg > CbC > CbC_gcc
comparison gcc/dce.c @ 145:1830386684a0
gcc-9.2.0
author | anatofuz |
---|---|
date | Thu, 13 Feb 2020 11:34:05 +0900 |
parents | 84e7813d76e9 |
children |
comparison
equal
deleted
inserted
replaced
131:84e7813d76e9 | 145:1830386684a0 |
---|---|
1 /* RTL dead code elimination. | 1 /* RTL dead code elimination. |
2 Copyright (C) 2005-2018 Free Software Foundation, Inc. | 2 Copyright (C) 2005-2020 Free Software Foundation, Inc. |
3 | 3 |
4 This file is part of GCC. | 4 This file is part of GCC. |
5 | 5 |
6 GCC is free software; you can redistribute it and/or modify it under | 6 GCC is free software; you can redistribute it and/or modify it under |
7 the terms of the GNU General Public License as published by the Free | 7 the terms of the GNU General Public License as published by the Free |
33 #include "cfgcleanup.h" | 33 #include "cfgcleanup.h" |
34 #include "dce.h" | 34 #include "dce.h" |
35 #include "valtrack.h" | 35 #include "valtrack.h" |
36 #include "tree-pass.h" | 36 #include "tree-pass.h" |
37 #include "dbgcnt.h" | 37 #include "dbgcnt.h" |
38 #include "rtl-iter.h" | |
38 | 39 |
39 | 40 |
40 /* ------------------------------------------------------------------------- | 41 /* ------------------------------------------------------------------------- |
41 Core mark/delete routines | 42 Core mark/delete routines |
42 ------------------------------------------------------------------------- */ | 43 ------------------------------------------------------------------------- */ |
84 default: | 85 default: |
85 return !volatile_refs_p (body); | 86 return !volatile_refs_p (body); |
86 } | 87 } |
87 } | 88 } |
88 | 89 |
90 /* Don't delete calls that may throw if we cannot do so. */ | |
91 | |
92 static bool | |
93 can_delete_call (rtx_insn *insn) | |
94 { | |
95 if (cfun->can_delete_dead_exceptions && can_alter_cfg) | |
96 return true; | |
97 if (!insn_nothrow_p (insn)) | |
98 return false; | |
99 if (can_alter_cfg) | |
100 return true; | |
101 /* If we can't alter cfg, even when the call can't throw exceptions, it | |
102 might have EDGE_ABNORMAL_CALL edges and so we shouldn't delete such | |
103 calls. */ | |
104 gcc_assert (CALL_P (insn)); | |
105 if (BLOCK_FOR_INSN (insn) && BB_END (BLOCK_FOR_INSN (insn)) == insn) | |
106 { | |
107 edge e; | |
108 edge_iterator ei; | |
109 | |
110 FOR_EACH_EDGE (e, ei, BLOCK_FOR_INSN (insn)->succs) | |
111 if ((e->flags & EDGE_ABNORMAL_CALL) != 0) | |
112 return false; | |
113 } | |
114 return true; | |
115 } | |
89 | 116 |
90 /* Return true if INSN is a normal instruction that can be deleted by | 117 /* Return true if INSN is a normal instruction that can be deleted by |
91 the DCE pass. */ | 118 the DCE pass. */ |
92 | 119 |
93 static bool | 120 static bool |
106 hard to see the result. */ | 133 hard to see the result. */ |
107 && (!SIBLING_CALL_P (insn)) | 134 && (!SIBLING_CALL_P (insn)) |
108 /* We can delete dead const or pure calls as long as they do not | 135 /* We can delete dead const or pure calls as long as they do not |
109 infinite loop. */ | 136 infinite loop. */ |
110 && (RTL_CONST_OR_PURE_CALL_P (insn) | 137 && (RTL_CONST_OR_PURE_CALL_P (insn) |
111 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn))) | 138 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)) |
139 /* Don't delete calls that may throw if we cannot do so. */ | |
140 && can_delete_call (insn)) | |
112 return find_call_stack_args (as_a <rtx_call_insn *> (insn), false, | 141 return find_call_stack_args (as_a <rtx_call_insn *> (insn), false, |
113 fast, arg_stores); | 142 fast, arg_stores); |
114 | 143 |
115 /* Don't delete jumps, notes and the like. */ | 144 /* Don't delete jumps, notes and the like. */ |
116 if (!NONJUMP_INSN_P (insn)) | 145 if (!NONJUMP_INSN_P (insn)) |
143 case USE: | 172 case USE: |
144 case VAR_LOCATION: | 173 case VAR_LOCATION: |
145 return false; | 174 return false; |
146 | 175 |
147 case CLOBBER: | 176 case CLOBBER: |
148 case CLOBBER_HIGH: | |
149 if (fast) | 177 if (fast) |
150 { | 178 { |
151 /* A CLOBBER of a dead pseudo register serves no purpose. | 179 /* A CLOBBER of a dead pseudo register serves no purpose. |
152 That is not necessarily true for hard registers until | 180 That is not necessarily true for hard registers until |
153 after reload. */ | 181 after reload. */ |
199 fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn)); | 227 fprintf (dump_file, " Adding insn %d to worklist\n", INSN_UID (insn)); |
200 if (CALL_P (insn) | 228 if (CALL_P (insn) |
201 && !df_in_progress | 229 && !df_in_progress |
202 && !SIBLING_CALL_P (insn) | 230 && !SIBLING_CALL_P (insn) |
203 && (RTL_CONST_OR_PURE_CALL_P (insn) | 231 && (RTL_CONST_OR_PURE_CALL_P (insn) |
204 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn))) | 232 && !RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)) |
233 && can_delete_call (insn)) | |
205 find_call_stack_args (as_a <rtx_call_insn *> (insn), true, fast, NULL); | 234 find_call_stack_args (as_a <rtx_call_insn *> (insn), true, fast, NULL); |
206 } | 235 } |
207 } | 236 } |
208 | 237 |
209 | 238 |
212 | 241 |
213 static void | 242 static void |
214 mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data) | 243 mark_nonreg_stores_1 (rtx dest, const_rtx pattern, void *data) |
215 { | 244 { |
216 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest)) | 245 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest)) |
217 { | 246 mark_insn ((rtx_insn *) data, true); |
218 gcc_checking_assert (GET_CODE (pattern) != CLOBBER_HIGH); | |
219 mark_insn ((rtx_insn *) data, true); | |
220 } | |
221 } | 247 } |
222 | 248 |
223 | 249 |
224 /* A note_stores callback used by mark_nonreg_stores. DATA is the | 250 /* A note_stores callback used by mark_nonreg_stores. DATA is the |
225 instruction containing DEST. */ | 251 instruction containing DEST. */ |
226 | 252 |
227 static void | 253 static void |
228 mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data) | 254 mark_nonreg_stores_2 (rtx dest, const_rtx pattern, void *data) |
229 { | 255 { |
230 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest)) | 256 if (GET_CODE (pattern) != CLOBBER && !REG_P (dest)) |
231 { | 257 mark_insn ((rtx_insn *) data, false); |
232 gcc_checking_assert (GET_CODE (pattern) != CLOBBER_HIGH); | 258 } |
233 mark_insn ((rtx_insn *) data, false); | 259 |
234 } | 260 |
235 } | 261 /* Mark INSN if it stores to a non-register destination. */ |
236 | |
237 | |
238 /* Mark INSN if BODY stores to a non-register destination. */ | |
239 | 262 |
240 static void | 263 static void |
241 mark_nonreg_stores (rtx body, rtx_insn *insn, bool fast) | 264 mark_nonreg_stores (rtx_insn *insn, bool fast) |
242 { | 265 { |
243 if (fast) | 266 if (fast) |
244 note_stores (body, mark_nonreg_stores_1, insn); | 267 note_stores (insn, mark_nonreg_stores_1, insn); |
245 else | 268 else |
246 note_stores (body, mark_nonreg_stores_2, insn); | 269 note_stores (insn, mark_nonreg_stores_2, insn); |
247 } | 270 } |
248 | 271 |
249 | 272 |
250 /* Return true if a store to SIZE bytes, starting OFF bytes from stack pointer, | 273 /* Return true if a store to SIZE bytes, starting OFF bytes from stack pointer, |
251 is a call argument store, and clear corresponding bits from SP_BYTES | 274 is a call argument store, and clear corresponding bits from SP_BYTES |
265 return false; | 288 return false; |
266 } | 289 } |
267 return true; | 290 return true; |
268 } | 291 } |
269 | 292 |
293 /* If MEM has sp address, return 0, if it has sp + const address, | |
294 return that const, if it has reg address where reg is set to sp + const | |
295 and FAST is false, return const, otherwise return | |
296 INTTYPE_MINUMUM (HOST_WIDE_INT). */ | |
297 | |
298 static HOST_WIDE_INT | |
299 sp_based_mem_offset (rtx_call_insn *call_insn, const_rtx mem, bool fast) | |
300 { | |
301 HOST_WIDE_INT off = 0; | |
302 rtx addr = XEXP (mem, 0); | |
303 if (GET_CODE (addr) == PLUS | |
304 && REG_P (XEXP (addr, 0)) | |
305 && CONST_INT_P (XEXP (addr, 1))) | |
306 { | |
307 off = INTVAL (XEXP (addr, 1)); | |
308 addr = XEXP (addr, 0); | |
309 } | |
310 if (addr == stack_pointer_rtx) | |
311 return off; | |
312 | |
313 if (!REG_P (addr) || fast) | |
314 return INTTYPE_MINIMUM (HOST_WIDE_INT); | |
315 | |
316 /* If not fast, use chains to see if addr wasn't set to sp + offset. */ | |
317 df_ref use; | |
318 FOR_EACH_INSN_USE (use, call_insn) | |
319 if (rtx_equal_p (addr, DF_REF_REG (use))) | |
320 break; | |
321 | |
322 if (use == NULL) | |
323 return INTTYPE_MINIMUM (HOST_WIDE_INT); | |
324 | |
325 struct df_link *defs; | |
326 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next) | |
327 if (! DF_REF_IS_ARTIFICIAL (defs->ref)) | |
328 break; | |
329 | |
330 if (defs == NULL) | |
331 return INTTYPE_MINIMUM (HOST_WIDE_INT); | |
332 | |
333 rtx set = single_set (DF_REF_INSN (defs->ref)); | |
334 if (!set) | |
335 return INTTYPE_MINIMUM (HOST_WIDE_INT); | |
336 | |
337 if (GET_CODE (SET_SRC (set)) != PLUS | |
338 || XEXP (SET_SRC (set), 0) != stack_pointer_rtx | |
339 || !CONST_INT_P (XEXP (SET_SRC (set), 1))) | |
340 return INTTYPE_MINIMUM (HOST_WIDE_INT); | |
341 | |
342 off += INTVAL (XEXP (SET_SRC (set), 1)); | |
343 return off; | |
344 } | |
345 | |
346 /* Data for check_argument_load called via note_uses. */ | |
347 struct check_argument_load_data { | |
348 bitmap sp_bytes; | |
349 HOST_WIDE_INT min_sp_off, max_sp_off; | |
350 rtx_call_insn *call_insn; | |
351 bool fast; | |
352 bool load_found; | |
353 }; | |
354 | |
355 /* Helper function for find_call_stack_args. Check if there are | |
356 any loads from the argument slots in between the const/pure call | |
357 and store to the argument slot, set LOAD_FOUND if any is found. */ | |
358 | |
359 static void | |
360 check_argument_load (rtx *loc, void *data) | |
361 { | |
362 struct check_argument_load_data *d | |
363 = (struct check_argument_load_data *) data; | |
364 subrtx_iterator::array_type array; | |
365 FOR_EACH_SUBRTX (iter, array, *loc, NONCONST) | |
366 { | |
367 const_rtx mem = *iter; | |
368 HOST_WIDE_INT size; | |
369 if (MEM_P (mem) | |
370 && MEM_SIZE_KNOWN_P (mem) | |
371 && MEM_SIZE (mem).is_constant (&size)) | |
372 { | |
373 HOST_WIDE_INT off = sp_based_mem_offset (d->call_insn, mem, d->fast); | |
374 if (off != INTTYPE_MINIMUM (HOST_WIDE_INT) | |
375 && off < d->max_sp_off | |
376 && off + size > d->min_sp_off) | |
377 for (HOST_WIDE_INT byte = MAX (off, d->min_sp_off); | |
378 byte < MIN (off + size, d->max_sp_off); byte++) | |
379 if (bitmap_bit_p (d->sp_bytes, byte - d->min_sp_off)) | |
380 { | |
381 d->load_found = true; | |
382 return; | |
383 } | |
384 } | |
385 } | |
386 } | |
270 | 387 |
271 /* Try to find all stack stores of CALL_INSN arguments if | 388 /* Try to find all stack stores of CALL_INSN arguments if |
272 ACCUMULATE_OUTGOING_ARGS. If all stack stores have been found | 389 ACCUMULATE_OUTGOING_ARGS. If all stack stores have been found |
273 and it is therefore safe to eliminate the call, return true, | 390 and it is therefore safe to eliminate the call, return true, |
274 otherwise return false. This function should be first called | 391 otherwise return false. This function should be first called |
302 stored arguments. */ | 419 stored arguments. */ |
303 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1)) | 420 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1)) |
304 if (GET_CODE (XEXP (p, 0)) == USE | 421 if (GET_CODE (XEXP (p, 0)) == USE |
305 && MEM_P (XEXP (XEXP (p, 0), 0))) | 422 && MEM_P (XEXP (XEXP (p, 0), 0))) |
306 { | 423 { |
307 rtx mem = XEXP (XEXP (p, 0), 0), addr; | 424 rtx mem = XEXP (XEXP (p, 0), 0); |
308 HOST_WIDE_INT off = 0, size; | 425 HOST_WIDE_INT size; |
309 if (!MEM_SIZE_KNOWN_P (mem) || !MEM_SIZE (mem).is_constant (&size)) | 426 if (!MEM_SIZE_KNOWN_P (mem) || !MEM_SIZE (mem).is_constant (&size)) |
310 return false; | 427 return false; |
311 addr = XEXP (mem, 0); | 428 HOST_WIDE_INT off = sp_based_mem_offset (call_insn, mem, fast); |
312 if (GET_CODE (addr) == PLUS | 429 if (off == INTTYPE_MINIMUM (HOST_WIDE_INT)) |
313 && REG_P (XEXP (addr, 0)) | 430 return false; |
314 && CONST_INT_P (XEXP (addr, 1))) | |
315 { | |
316 off = INTVAL (XEXP (addr, 1)); | |
317 addr = XEXP (addr, 0); | |
318 } | |
319 if (addr != stack_pointer_rtx) | |
320 { | |
321 if (!REG_P (addr)) | |
322 return false; | |
323 /* If not fast, use chains to see if addr wasn't set to | |
324 sp + offset. */ | |
325 if (!fast) | |
326 { | |
327 df_ref use; | |
328 struct df_link *defs; | |
329 rtx set; | |
330 | |
331 FOR_EACH_INSN_USE (use, call_insn) | |
332 if (rtx_equal_p (addr, DF_REF_REG (use))) | |
333 break; | |
334 | |
335 if (use == NULL) | |
336 return false; | |
337 | |
338 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next) | |
339 if (! DF_REF_IS_ARTIFICIAL (defs->ref)) | |
340 break; | |
341 | |
342 if (defs == NULL) | |
343 return false; | |
344 | |
345 set = single_set (DF_REF_INSN (defs->ref)); | |
346 if (!set) | |
347 return false; | |
348 | |
349 if (GET_CODE (SET_SRC (set)) != PLUS | |
350 || XEXP (SET_SRC (set), 0) != stack_pointer_rtx | |
351 || !CONST_INT_P (XEXP (SET_SRC (set), 1))) | |
352 return false; | |
353 | |
354 off += INTVAL (XEXP (SET_SRC (set), 1)); | |
355 } | |
356 else | |
357 return false; | |
358 } | |
359 min_sp_off = MIN (min_sp_off, off); | 431 min_sp_off = MIN (min_sp_off, off); |
360 max_sp_off = MAX (max_sp_off, off + size); | 432 max_sp_off = MAX (max_sp_off, off + size); |
361 } | 433 } |
362 | 434 |
363 if (min_sp_off >= max_sp_off) | 435 if (min_sp_off >= max_sp_off) |
369 loop. */ | 441 loop. */ |
370 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1)) | 442 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1)) |
371 if (GET_CODE (XEXP (p, 0)) == USE | 443 if (GET_CODE (XEXP (p, 0)) == USE |
372 && MEM_P (XEXP (XEXP (p, 0), 0))) | 444 && MEM_P (XEXP (XEXP (p, 0), 0))) |
373 { | 445 { |
374 rtx mem = XEXP (XEXP (p, 0), 0), addr; | 446 rtx mem = XEXP (XEXP (p, 0), 0); |
375 HOST_WIDE_INT off = 0, byte, size; | |
376 /* Checked in the previous iteration. */ | 447 /* Checked in the previous iteration. */ |
377 size = MEM_SIZE (mem).to_constant (); | 448 HOST_WIDE_INT size = MEM_SIZE (mem).to_constant (); |
378 addr = XEXP (mem, 0); | 449 HOST_WIDE_INT off = sp_based_mem_offset (call_insn, mem, fast); |
379 if (GET_CODE (addr) == PLUS | 450 gcc_checking_assert (off != INTTYPE_MINIMUM (HOST_WIDE_INT)); |
380 && REG_P (XEXP (addr, 0)) | 451 for (HOST_WIDE_INT byte = off; byte < off + size; byte++) |
381 && CONST_INT_P (XEXP (addr, 1))) | 452 if (!bitmap_set_bit (sp_bytes, byte - min_sp_off)) |
382 { | 453 gcc_unreachable (); |
383 off = INTVAL (XEXP (addr, 1)); | |
384 addr = XEXP (addr, 0); | |
385 } | |
386 if (addr != stack_pointer_rtx) | |
387 { | |
388 df_ref use; | |
389 struct df_link *defs; | |
390 rtx set; | |
391 | |
392 FOR_EACH_INSN_USE (use, call_insn) | |
393 if (rtx_equal_p (addr, DF_REF_REG (use))) | |
394 break; | |
395 | |
396 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next) | |
397 if (! DF_REF_IS_ARTIFICIAL (defs->ref)) | |
398 break; | |
399 | |
400 set = single_set (DF_REF_INSN (defs->ref)); | |
401 off += INTVAL (XEXP (SET_SRC (set), 1)); | |
402 } | |
403 for (byte = off; byte < off + size; byte++) | |
404 { | |
405 if (!bitmap_set_bit (sp_bytes, byte - min_sp_off)) | |
406 gcc_unreachable (); | |
407 } | |
408 } | 454 } |
409 | 455 |
410 /* Walk backwards, looking for argument stores. The search stops | 456 /* Walk backwards, looking for argument stores. The search stops |
411 when seeing another call, sp adjustment or memory store other than | 457 when seeing another call, sp adjustment, memory store other than |
412 argument store. */ | 458 argument store or a read from an argument stack slot. */ |
459 struct check_argument_load_data data | |
460 = { sp_bytes, min_sp_off, max_sp_off, call_insn, fast, false }; | |
413 ret = false; | 461 ret = false; |
414 for (insn = PREV_INSN (call_insn); insn; insn = prev_insn) | 462 for (insn = PREV_INSN (call_insn); insn; insn = prev_insn) |
415 { | 463 { |
416 rtx set, mem, addr; | |
417 HOST_WIDE_INT off; | |
418 | |
419 if (insn == BB_HEAD (BLOCK_FOR_INSN (call_insn))) | 464 if (insn == BB_HEAD (BLOCK_FOR_INSN (call_insn))) |
420 prev_insn = NULL; | 465 prev_insn = NULL; |
421 else | 466 else |
422 prev_insn = PREV_INSN (insn); | 467 prev_insn = PREV_INSN (insn); |
423 | 468 |
425 break; | 470 break; |
426 | 471 |
427 if (!NONDEBUG_INSN_P (insn)) | 472 if (!NONDEBUG_INSN_P (insn)) |
428 continue; | 473 continue; |
429 | 474 |
430 set = single_set (insn); | 475 rtx set = single_set (insn); |
431 if (!set || SET_DEST (set) == stack_pointer_rtx) | 476 if (!set || SET_DEST (set) == stack_pointer_rtx) |
432 break; | 477 break; |
433 | 478 |
479 note_uses (&PATTERN (insn), check_argument_load, &data); | |
480 if (data.load_found) | |
481 break; | |
482 | |
434 if (!MEM_P (SET_DEST (set))) | 483 if (!MEM_P (SET_DEST (set))) |
435 continue; | 484 continue; |
436 | 485 |
437 mem = SET_DEST (set); | 486 rtx mem = SET_DEST (set); |
438 addr = XEXP (mem, 0); | 487 HOST_WIDE_INT off = sp_based_mem_offset (call_insn, mem, fast); |
439 off = 0; | 488 if (off == INTTYPE_MINIMUM (HOST_WIDE_INT)) |
440 if (GET_CODE (addr) == PLUS | 489 break; |
441 && REG_P (XEXP (addr, 0)) | |
442 && CONST_INT_P (XEXP (addr, 1))) | |
443 { | |
444 off = INTVAL (XEXP (addr, 1)); | |
445 addr = XEXP (addr, 0); | |
446 } | |
447 if (addr != stack_pointer_rtx) | |
448 { | |
449 if (!REG_P (addr)) | |
450 break; | |
451 if (!fast) | |
452 { | |
453 df_ref use; | |
454 struct df_link *defs; | |
455 rtx set; | |
456 | |
457 FOR_EACH_INSN_USE (use, insn) | |
458 if (rtx_equal_p (addr, DF_REF_REG (use))) | |
459 break; | |
460 | |
461 if (use == NULL) | |
462 break; | |
463 | |
464 for (defs = DF_REF_CHAIN (use); defs; defs = defs->next) | |
465 if (! DF_REF_IS_ARTIFICIAL (defs->ref)) | |
466 break; | |
467 | |
468 if (defs == NULL) | |
469 break; | |
470 | |
471 set = single_set (DF_REF_INSN (defs->ref)); | |
472 if (!set) | |
473 break; | |
474 | |
475 if (GET_CODE (SET_SRC (set)) != PLUS | |
476 || XEXP (SET_SRC (set), 0) != stack_pointer_rtx | |
477 || !CONST_INT_P (XEXP (SET_SRC (set), 1))) | |
478 break; | |
479 | |
480 off += INTVAL (XEXP (SET_SRC (set), 1)); | |
481 } | |
482 else | |
483 break; | |
484 } | |
485 | 490 |
486 HOST_WIDE_INT size; | 491 HOST_WIDE_INT size; |
487 if (!MEM_SIZE_KNOWN_P (mem) | 492 if (!MEM_SIZE_KNOWN_P (mem) |
488 || !MEM_SIZE (mem).is_constant (&size) | 493 || !MEM_SIZE (mem).is_constant (&size) |
489 || !check_argument_store (size, off, min_sp_off, | 494 || !check_argument_store (size, off, min_sp_off, |
577 if (NONDEBUG_INSN_P (insn)) | 582 if (NONDEBUG_INSN_P (insn)) |
578 { | 583 { |
579 rtx turn_into_use = NULL_RTX; | 584 rtx turn_into_use = NULL_RTX; |
580 | 585 |
581 /* Always delete no-op moves. */ | 586 /* Always delete no-op moves. */ |
582 if (noop_move_p (insn)) | 587 if (noop_move_p (insn) |
588 /* Unless the no-op move can throw and we are not allowed | |
589 to alter cfg. */ | |
590 && (!cfun->can_throw_non_call_exceptions | |
591 || (cfun->can_delete_dead_exceptions && can_alter_cfg) | |
592 || insn_nothrow_p (insn))) | |
583 { | 593 { |
584 if (RTX_FRAME_RELATED_P (insn)) | 594 if (RTX_FRAME_RELATED_P (insn)) |
585 turn_into_use | 595 turn_into_use |
586 = find_reg_note (insn, REG_CFA_RESTORE, NULL); | 596 = find_reg_note (insn, REG_CFA_RESTORE, NULL); |
587 if (turn_into_use && REG_P (XEXP (turn_into_use, 0))) | 597 if (turn_into_use && REG_P (XEXP (turn_into_use, 0))) |
619 fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn)); | 629 fprintf (dump_file, "DCE: Deleting insn %d\n", INSN_UID (insn)); |
620 | 630 |
621 /* Before we delete the insn we have to remove the REG_EQUAL notes | 631 /* Before we delete the insn we have to remove the REG_EQUAL notes |
622 for the destination regs in order to avoid dangling notes. */ | 632 for the destination regs in order to avoid dangling notes. */ |
623 remove_reg_equal_equiv_notes_for_defs (insn); | 633 remove_reg_equal_equiv_notes_for_defs (insn); |
624 | |
625 /* If a pure or const call is deleted, this may make the cfg | |
626 have unreachable blocks. We rememeber this and call | |
627 delete_unreachable_blocks at the end. */ | |
628 if (CALL_P (insn)) | |
629 must_clean = true; | |
630 | 634 |
631 if (turn_into_use) | 635 if (turn_into_use) |
632 { | 636 { |
633 /* Don't remove frame related noop moves if they cary | 637 /* Don't remove frame related noop moves if they cary |
634 REG_CFA_RESTORE note, while we don't need to emit any code, | 638 REG_CFA_RESTORE note, while we don't need to emit any code, |
638 INSN_CODE (insn) = -1; | 642 INSN_CODE (insn) = -1; |
639 df_insn_rescan (insn); | 643 df_insn_rescan (insn); |
640 } | 644 } |
641 else | 645 else |
642 /* Now delete the insn. */ | 646 /* Now delete the insn. */ |
643 delete_insn_and_edges (insn); | 647 must_clean |= delete_insn_and_edges (insn); |
644 } | 648 } |
645 | 649 |
646 /* Deleted a pure or const call. */ | 650 /* Deleted a pure or const call. */ |
647 if (must_clean) | 651 if (must_clean) |
648 delete_unreachable_blocks (); | 652 { |
653 gcc_assert (can_alter_cfg); | |
654 delete_unreachable_blocks (); | |
655 free_dominance_info (CDI_DOMINATORS); | |
656 } | |
649 } | 657 } |
650 | 658 |
651 | 659 |
652 /* Go through the instructions and mark those whose necessity is not | 660 /* Go through the instructions and mark those whose necessity is not |
653 dependent on inter-instruction information. Make sure all other | 661 dependent on inter-instruction information. Make sure all other |
674 /* Don't mark argument stores now. They will be marked | 682 /* Don't mark argument stores now. They will be marked |
675 if needed when the associated CALL is marked. */ | 683 if needed when the associated CALL is marked. */ |
676 if (arg_stores && bitmap_bit_p (arg_stores, INSN_UID (insn))) | 684 if (arg_stores && bitmap_bit_p (arg_stores, INSN_UID (insn))) |
677 continue; | 685 continue; |
678 if (deletable_insn_p (insn, fast, arg_stores)) | 686 if (deletable_insn_p (insn, fast, arg_stores)) |
679 mark_nonreg_stores (PATTERN (insn), insn, fast); | 687 mark_nonreg_stores (insn, fast); |
680 else | 688 else |
681 mark_insn (insn, fast); | 689 mark_insn (insn, fast); |
682 } | 690 } |
683 /* find_call_stack_args only looks at argument stores in the | 691 /* find_call_stack_args only looks at argument stores in the |
684 same bb. */ | 692 same bb. */ |