comparison gcc/cfgcleanup.c @ 67:f6334be47118

update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
author nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
date Tue, 22 Mar 2011 17:18:12 +0900
parents b7f97abdc517
children 04ced10e8804
comparison
equal deleted inserted replaced
65:65488c3d617d 67:f6334be47118
41 #include "timevar.h" 41 #include "timevar.h"
42 #include "output.h" 42 #include "output.h"
43 #include "insn-config.h" 43 #include "insn-config.h"
44 #include "flags.h" 44 #include "flags.h"
45 #include "recog.h" 45 #include "recog.h"
46 #include "toplev.h" 46 #include "diagnostic-core.h"
47 #include "cselib.h" 47 #include "cselib.h"
48 #include "params.h" 48 #include "params.h"
49 #include "tm_p.h" 49 #include "tm_p.h"
50 #include "target.h" 50 #include "target.h"
51 #include "cfglayout.h" 51 #include "cfglayout.h"
62 /* Set to true when we are running first pass of try_optimize_cfg loop. */ 62 /* Set to true when we are running first pass of try_optimize_cfg loop. */
63 static bool first_pass; 63 static bool first_pass;
64 64
65 /* Set to true if crossjumps occured in the latest run of try_optimize_cfg. */ 65 /* Set to true if crossjumps occured in the latest run of try_optimize_cfg. */
66 static bool crossjumps_occured; 66 static bool crossjumps_occured;
67
68 /* Set to true if we couldn't run an optimization due to stale liveness
69 information; we should run df_analyze to enable more opportunities. */
70 static bool block_was_dirty;
67 71
68 static bool try_crossjump_to_edge (int, edge, edge); 72 static bool try_crossjump_to_edge (int, edge, edge);
69 static bool try_crossjump_bb (int, basic_block); 73 static bool try_crossjump_bb (int, basic_block);
70 static bool outgoing_edges_match (int, basic_block, basic_block); 74 static bool outgoing_edges_match (int, basic_block, basic_block);
71 static bool old_insns_match_p (int, rtx, rtx); 75 static bool old_insns_match_p (int, rtx, rtx);
429 { 433 {
430 basic_block target, first; 434 basic_block target, first;
431 int counter, goto_locus; 435 int counter, goto_locus;
432 bool threaded = false; 436 bool threaded = false;
433 int nthreaded_edges = 0; 437 int nthreaded_edges = 0;
434 bool may_thread = first_pass | df_get_bb_dirty (b); 438 bool may_thread = first_pass || (b->flags & BB_MODIFIED) != 0;
435 439
436 /* Skip complex edges because we don't know how to update them. 440 /* Skip complex edges because we don't know how to update them.
437 441
438 Still handle fallthru edges, as we can succeed to forward fallthru 442 Still handle fallthru edges, as we can succeed to forward fallthru
439 edge to the same place as the branch edge of conditional branch 443 edge to the same place as the branch edge of conditional branch
464 468
465 while (counter < n_basic_blocks) 469 while (counter < n_basic_blocks)
466 { 470 {
467 basic_block new_target = NULL; 471 basic_block new_target = NULL;
468 bool new_target_threaded = false; 472 bool new_target_threaded = false;
469 may_thread |= df_get_bb_dirty (target); 473 may_thread |= (target->flags & BB_MODIFIED) != 0;
470 474
471 if (FORWARDER_BLOCK_P (target) 475 if (FORWARDER_BLOCK_P (target)
472 && !(single_succ_edge (target)->flags & EDGE_CROSSING) 476 && !(single_succ_edge (target)->flags & EDGE_CROSSING)
473 && single_succ (target) != EXIT_BLOCK_PTR) 477 && single_succ (target) != EXIT_BLOCK_PTR)
474 { 478 {
478 counter = n_basic_blocks; 482 counter = n_basic_blocks;
479 else if (!optimize) 483 else if (!optimize)
480 { 484 {
481 /* When not optimizing, ensure that edges or forwarder 485 /* When not optimizing, ensure that edges or forwarder
482 blocks with different locus are not optimized out. */ 486 blocks with different locus are not optimized out. */
483 int locus = single_succ_edge (target)->goto_locus; 487 int new_locus = single_succ_edge (target)->goto_locus;
484 488 int locus = goto_locus;
485 if (locus && goto_locus && !locator_eq (locus, goto_locus)) 489
486 counter = n_basic_blocks; 490 if (new_locus && locus && !locator_eq (new_locus, locus))
487 else if (locus) 491 new_target = NULL;
488 goto_locus = locus; 492 else
489
490 if (INSN_P (BB_END (target)))
491 { 493 {
492 locus = INSN_LOCATOR (BB_END (target)); 494 rtx last;
493 495
494 if (locus && goto_locus 496 if (new_locus)
495 && !locator_eq (locus, goto_locus)) 497 locus = new_locus;
496 counter = n_basic_blocks; 498
497 else if (locus) 499 last = BB_END (target);
498 goto_locus = locus; 500 if (DEBUG_INSN_P (last))
501 last = prev_nondebug_insn (last);
502
503 new_locus = last && INSN_P (last)
504 ? INSN_LOCATOR (last) : 0;
505
506 if (new_locus && locus && !locator_eq (new_locus, locus))
507 new_target = NULL;
508 else
509 {
510 if (new_locus)
511 locus = new_locus;
512
513 goto_locus = locus;
514 }
499 } 515 }
500 } 516 }
501 } 517 }
502 518
503 /* Allow to thread only over one edge at time to simplify updating 519 /* Allow to thread only over one edge at time to simplify updating
786 else if (mode & CLEANUP_EXPENSIVE) 802 else if (mode & CLEANUP_EXPENSIVE)
787 { 803 {
788 edge tmp_edge, b_fallthru_edge; 804 edge tmp_edge, b_fallthru_edge;
789 bool c_has_outgoing_fallthru; 805 bool c_has_outgoing_fallthru;
790 bool b_has_incoming_fallthru; 806 bool b_has_incoming_fallthru;
791 edge_iterator ei;
792 807
793 /* Avoid overactive code motion, as the forwarder blocks should be 808 /* Avoid overactive code motion, as the forwarder blocks should be
794 eliminated by edge redirection instead. One exception might have 809 eliminated by edge redirection instead. One exception might have
795 been if B is a forwarder block and C has no fallthru edge, but 810 been if B is a forwarder block and C has no fallthru edge, but
796 that should be cleaned up by bb-reorder instead. */ 811 that should be cleaned up by bb-reorder instead. */
799 814
800 /* We must make sure to not munge nesting of lexical blocks, 815 /* We must make sure to not munge nesting of lexical blocks,
801 and loop notes. This is done by squeezing out all the notes 816 and loop notes. This is done by squeezing out all the notes
802 and leaving them there to lie. Not ideal, but functional. */ 817 and leaving them there to lie. Not ideal, but functional. */
803 818
804 FOR_EACH_EDGE (tmp_edge, ei, c->succs) 819 tmp_edge = find_fallthru_edge (c->succs);
805 if (tmp_edge->flags & EDGE_FALLTHRU)
806 break;
807
808 c_has_outgoing_fallthru = (tmp_edge != NULL); 820 c_has_outgoing_fallthru = (tmp_edge != NULL);
809 821
810 FOR_EACH_EDGE (tmp_edge, ei, b->preds) 822 tmp_edge = find_fallthru_edge (b->preds);
811 if (tmp_edge->flags & EDGE_FALLTHRU)
812 break;
813
814 b_has_incoming_fallthru = (tmp_edge != NULL); 823 b_has_incoming_fallthru = (tmp_edge != NULL);
815 b_fallthru_edge = tmp_edge; 824 b_fallthru_edge = tmp_edge;
816 next = b->prev_bb; 825 next = b->prev_bb;
817 if (next == c) 826 if (next == c)
818 next = next->prev_bb; 827 next = next->prev_bb;
1177 i2 = BB_HEAD (bb2); 1186 i2 = BB_HEAD (bb2);
1178 last1 = beforelast1 = last2 = beforelast2 = NULL_RTX; 1187 last1 = beforelast1 = last2 = beforelast2 = NULL_RTX;
1179 1188
1180 while (true) 1189 while (true)
1181 { 1190 {
1182 1191 /* Ignore notes, except NOTE_INSN_EPILOGUE_BEG. */
1183 /* Ignore notes. */
1184 while (!NONDEBUG_INSN_P (i1) && i1 != BB_END (bb1)) 1192 while (!NONDEBUG_INSN_P (i1) && i1 != BB_END (bb1))
1185 i1 = NEXT_INSN (i1); 1193 {
1194 if (NOTE_P (i1) && NOTE_KIND (i1) == NOTE_INSN_EPILOGUE_BEG)
1195 break;
1196 i1 = NEXT_INSN (i1);
1197 }
1186 1198
1187 while (!NONDEBUG_INSN_P (i2) && i2 != BB_END (bb2)) 1199 while (!NONDEBUG_INSN_P (i2) && i2 != BB_END (bb2))
1188 i2 = NEXT_INSN (i2); 1200 {
1201 if (NOTE_P (i2) && NOTE_KIND (i2) == NOTE_INSN_EPILOGUE_BEG)
1202 break;
1203 i2 = NEXT_INSN (i2);
1204 }
1205
1206 if ((i1 == BB_END (bb1) && !NONDEBUG_INSN_P (i1))
1207 || (i2 == BB_END (bb2) && !NONDEBUG_INSN_P (i2)))
1208 break;
1189 1209
1190 if (NOTE_P (i1) || NOTE_P (i2) 1210 if (NOTE_P (i1) || NOTE_P (i2)
1191 || JUMP_P (i1) || JUMP_P (i2)) 1211 || JUMP_P (i1) || JUMP_P (i2))
1192 break; 1212 break;
1193 1213
1791 { 1811 {
1792 edge e, e2, fallthru; 1812 edge e, e2, fallthru;
1793 bool changed; 1813 bool changed;
1794 unsigned max, ix, ix2; 1814 unsigned max, ix, ix2;
1795 basic_block ev, ev2; 1815 basic_block ev, ev2;
1796 edge_iterator ei;
1797 1816
1798 /* Nothing to do if there is not at least two incoming edges. */ 1817 /* Nothing to do if there is not at least two incoming edges. */
1799 if (EDGE_COUNT (bb->preds) < 2) 1818 if (EDGE_COUNT (bb->preds) < 2)
1800 return false; 1819 return false;
1801 1820
1828 max = PARAM_VALUE (PARAM_MAX_CROSSJUMP_EDGES); 1847 max = PARAM_VALUE (PARAM_MAX_CROSSJUMP_EDGES);
1829 1848
1830 if (EDGE_COUNT (bb->preds) > max) 1849 if (EDGE_COUNT (bb->preds) > max)
1831 return false; 1850 return false;
1832 1851
1833 FOR_EACH_EDGE (e, ei, bb->preds) 1852 fallthru = find_fallthru_edge (bb->preds);
1834 {
1835 if (e->flags & EDGE_FALLTHRU)
1836 {
1837 fallthru = e;
1838 break;
1839 }
1840 }
1841 1853
1842 changed = false; 1854 changed = false;
1843 for (ix = 0, ev = bb; ix < EDGE_COUNT (ev->preds); ) 1855 for (ix = 0, ev = bb; ix < EDGE_COUNT (ev->preds); )
1844 { 1856 {
1845 e = EDGE_PRED (ev, ix); 1857 e = EDGE_PRED (ev, ix);
1854 if (e == fallthru) 1866 if (e == fallthru)
1855 continue; 1867 continue;
1856 /* If nothing changed since the last attempt, there is nothing 1868 /* If nothing changed since the last attempt, there is nothing
1857 we can do. */ 1869 we can do. */
1858 if (!first_pass 1870 if (!first_pass
1859 && (!(df_get_bb_dirty (e->src)) 1871 && !((e->src->flags & BB_MODIFIED)
1860 && !(df_get_bb_dirty (fallthru->src)))) 1872 || (fallthru->src->flags & BB_MODIFIED)))
1861 continue; 1873 continue;
1862 1874
1863 if (try_crossjump_to_edge (mode, e, fallthru)) 1875 if (try_crossjump_to_edge (mode, e, fallthru))
1864 { 1876 {
1865 changed = true; 1877 changed = true;
1904 continue; 1916 continue;
1905 1917
1906 /* If nothing changed since the last attempt, there is nothing 1918 /* If nothing changed since the last attempt, there is nothing
1907 we can do. */ 1919 we can do. */
1908 if (!first_pass 1920 if (!first_pass
1909 && (!(df_get_bb_dirty (e->src)) 1921 && !((e->src->flags & BB_MODIFIED)
1910 && !(df_get_bb_dirty (e2->src)))) 1922 || (e2->src->flags & BB_MODIFIED)))
1911 continue; 1923 continue;
1912 1924
1913 if (try_crossjump_to_edge (mode, e, e2)) 1925 if (try_crossjump_to_edge (mode, e, e2))
1914 { 1926 {
1915 changed = true; 1927 changed = true;
1924 crossjumps_occured = true; 1936 crossjumps_occured = true;
1925 1937
1926 return changed; 1938 return changed;
1927 } 1939 }
1928 1940
1941 /* Search the successors of BB for common insn sequences. When found,
1942 share code between them by moving it across the basic block
1943 boundary. Return true if any changes made. */
1944
1945 static bool
1946 try_head_merge_bb (basic_block bb)
1947 {
1948 basic_block final_dest_bb = NULL;
1949 int max_match = INT_MAX;
1950 edge e0;
1951 rtx *headptr, *currptr, *nextptr;
1952 bool changed, moveall;
1953 unsigned ix;
1954 rtx e0_last_head, cond, move_before;
1955 unsigned nedges = EDGE_COUNT (bb->succs);
1956 rtx jump = BB_END (bb);
1957 regset live, live_union;
1958
1959 /* Nothing to do if there is not at least two outgoing edges. */
1960 if (nedges < 2)
1961 return false;
1962
1963 /* Don't crossjump if this block ends in a computed jump,
1964 unless we are optimizing for size. */
1965 if (optimize_bb_for_size_p (bb)
1966 && bb != EXIT_BLOCK_PTR
1967 && computed_jump_p (BB_END (bb)))
1968 return false;
1969
1970 cond = get_condition (jump, &move_before, true, false);
1971 if (cond == NULL_RTX)
1972 move_before = jump;
1973
1974 for (ix = 0; ix < nedges; ix++)
1975 if (EDGE_SUCC (bb, ix)->dest == EXIT_BLOCK_PTR)
1976 return false;
1977
1978 for (ix = 0; ix < nedges; ix++)
1979 {
1980 edge e = EDGE_SUCC (bb, ix);
1981 basic_block other_bb = e->dest;
1982
1983 if (df_get_bb_dirty (other_bb))
1984 {
1985 block_was_dirty = true;
1986 return false;
1987 }
1988
1989 if (e->flags & EDGE_ABNORMAL)
1990 return false;
1991
1992 /* Normally, all destination blocks must only be reachable from this
1993 block, i.e. they must have one incoming edge.
1994
1995 There is one special case we can handle, that of multiple consecutive
1996 jumps where the first jumps to one of the targets of the second jump.
1997 This happens frequently in switch statements for default labels.
1998 The structure is as follows:
1999 FINAL_DEST_BB
2000 ....
2001 if (cond) jump A;
2002 fall through
2003 BB
2004 jump with targets A, B, C, D...
2005 A
2006 has two incoming edges, from FINAL_DEST_BB and BB
2007
2008 In this case, we can try to move the insns through BB and into
2009 FINAL_DEST_BB. */
2010 if (EDGE_COUNT (other_bb->preds) != 1)
2011 {
2012 edge incoming_edge, incoming_bb_other_edge;
2013 edge_iterator ei;
2014
2015 if (final_dest_bb != NULL
2016 || EDGE_COUNT (other_bb->preds) != 2)
2017 return false;
2018
2019 /* We must be able to move the insns across the whole block. */
2020 move_before = BB_HEAD (bb);
2021 while (!NONDEBUG_INSN_P (move_before))
2022 move_before = NEXT_INSN (move_before);
2023
2024 if (EDGE_COUNT (bb->preds) != 1)
2025 return false;
2026 incoming_edge = EDGE_PRED (bb, 0);
2027 final_dest_bb = incoming_edge->src;
2028 if (EDGE_COUNT (final_dest_bb->succs) != 2)
2029 return false;
2030 FOR_EACH_EDGE (incoming_bb_other_edge, ei, final_dest_bb->succs)
2031 if (incoming_bb_other_edge != incoming_edge)
2032 break;
2033 if (incoming_bb_other_edge->dest != other_bb)
2034 return false;
2035 }
2036 }
2037
2038 e0 = EDGE_SUCC (bb, 0);
2039 e0_last_head = NULL_RTX;
2040 changed = false;
2041
2042 for (ix = 1; ix < nedges; ix++)
2043 {
2044 edge e = EDGE_SUCC (bb, ix);
2045 rtx e0_last, e_last;
2046 int nmatch;
2047
2048 nmatch = flow_find_head_matching_sequence (e0->dest, e->dest,
2049 &e0_last, &e_last, 0);
2050 if (nmatch == 0)
2051 return false;
2052
2053 if (nmatch < max_match)
2054 {
2055 max_match = nmatch;
2056 e0_last_head = e0_last;
2057 }
2058 }
2059
2060 /* If we matched an entire block, we probably have to avoid moving the
2061 last insn. */
2062 if (max_match > 0
2063 && e0_last_head == BB_END (e0->dest)
2064 && (find_reg_note (e0_last_head, REG_EH_REGION, 0)
2065 || control_flow_insn_p (e0_last_head)))
2066 {
2067 max_match--;
2068 if (max_match == 0)
2069 return false;
2070 do
2071 e0_last_head = prev_real_insn (e0_last_head);
2072 while (DEBUG_INSN_P (e0_last_head));
2073 }
2074
2075 if (max_match == 0)
2076 return false;
2077
2078 /* We must find a union of the live registers at each of the end points. */
2079 live = BITMAP_ALLOC (NULL);
2080 live_union = BITMAP_ALLOC (NULL);
2081
2082 currptr = XNEWVEC (rtx, nedges);
2083 headptr = XNEWVEC (rtx, nedges);
2084 nextptr = XNEWVEC (rtx, nedges);
2085
2086 for (ix = 0; ix < nedges; ix++)
2087 {
2088 int j;
2089 basic_block merge_bb = EDGE_SUCC (bb, ix)->dest;
2090 rtx head = BB_HEAD (merge_bb);
2091
2092 while (!NONDEBUG_INSN_P (head))
2093 head = NEXT_INSN (head);
2094 headptr[ix] = head;
2095 currptr[ix] = head;
2096
2097 /* Compute the end point and live information */
2098 for (j = 1; j < max_match; j++)
2099 do
2100 head = NEXT_INSN (head);
2101 while (!NONDEBUG_INSN_P (head));
2102 simulate_backwards_to_point (merge_bb, live, head);
2103 IOR_REG_SET (live_union, live);
2104 }
2105
2106 /* If we're moving across two blocks, verify the validity of the
2107 first move, then adjust the target and let the loop below deal
2108 with the final move. */
2109 if (final_dest_bb != NULL)
2110 {
2111 rtx move_upto;
2112
2113 moveall = can_move_insns_across (currptr[0], e0_last_head, move_before,
2114 jump, e0->dest, live_union,
2115 NULL, &move_upto);
2116 if (!moveall)
2117 {
2118 if (move_upto == NULL_RTX)
2119 goto out;
2120
2121 while (e0_last_head != move_upto)
2122 {
2123 df_simulate_one_insn_backwards (e0->dest, e0_last_head,
2124 live_union);
2125 e0_last_head = PREV_INSN (e0_last_head);
2126 }
2127 }
2128 if (e0_last_head == NULL_RTX)
2129 goto out;
2130
2131 jump = BB_END (final_dest_bb);
2132 cond = get_condition (jump, &move_before, true, false);
2133 if (cond == NULL_RTX)
2134 move_before = jump;
2135 }
2136
2137 do
2138 {
2139 rtx move_upto;
2140 moveall = can_move_insns_across (currptr[0], e0_last_head,
2141 move_before, jump, e0->dest, live_union,
2142 NULL, &move_upto);
2143 if (!moveall && move_upto == NULL_RTX)
2144 {
2145 if (jump == move_before)
2146 break;
2147
2148 /* Try again, using a different insertion point. */
2149 move_before = jump;
2150
2151 #ifdef HAVE_cc0
2152 /* Don't try moving before a cc0 user, as that may invalidate
2153 the cc0. */
2154 if (reg_mentioned_p (cc0_rtx, jump))
2155 break;
2156 #endif
2157
2158 continue;
2159 }
2160
2161 if (final_dest_bb && !moveall)
2162 /* We haven't checked whether a partial move would be OK for the first
2163 move, so we have to fail this case. */
2164 break;
2165
2166 changed = true;
2167 for (;;)
2168 {
2169 if (currptr[0] == move_upto)
2170 break;
2171 for (ix = 0; ix < nedges; ix++)
2172 {
2173 rtx curr = currptr[ix];
2174 do
2175 curr = NEXT_INSN (curr);
2176 while (!NONDEBUG_INSN_P (curr));
2177 currptr[ix] = curr;
2178 }
2179 }
2180
2181 /* If we can't currently move all of the identical insns, remember
2182 each insn after the range that we'll merge. */
2183 if (!moveall)
2184 for (ix = 0; ix < nedges; ix++)
2185 {
2186 rtx curr = currptr[ix];
2187 do
2188 curr = NEXT_INSN (curr);
2189 while (!NONDEBUG_INSN_P (curr));
2190 nextptr[ix] = curr;
2191 }
2192
2193 reorder_insns (headptr[0], currptr[0], PREV_INSN (move_before));
2194 df_set_bb_dirty (EDGE_SUCC (bb, 0)->dest);
2195 if (final_dest_bb != NULL)
2196 df_set_bb_dirty (final_dest_bb);
2197 df_set_bb_dirty (bb);
2198 for (ix = 1; ix < nedges; ix++)
2199 {
2200 df_set_bb_dirty (EDGE_SUCC (bb, ix)->dest);
2201 delete_insn_chain (headptr[ix], currptr[ix], false);
2202 }
2203 if (!moveall)
2204 {
2205 if (jump == move_before)
2206 break;
2207
2208 /* For the unmerged insns, try a different insertion point. */
2209 move_before = jump;
2210
2211 #ifdef HAVE_cc0
2212 /* Don't try moving before a cc0 user, as that may invalidate
2213 the cc0. */
2214 if (reg_mentioned_p (cc0_rtx, jump))
2215 break;
2216 #endif
2217
2218 for (ix = 0; ix < nedges; ix++)
2219 currptr[ix] = headptr[ix] = nextptr[ix];
2220 }
2221 }
2222 while (!moveall);
2223
2224 out:
2225 free (currptr);
2226 free (headptr);
2227 free (nextptr);
2228
2229 crossjumps_occured |= changed;
2230
2231 return changed;
2232 }
2233
1929 /* Return true if BB contains just bb note, or bb note followed 2234 /* Return true if BB contains just bb note, or bb note followed
1930 by only DEBUG_INSNs. */ 2235 by only DEBUG_INSNs. */
1931 2236
1932 static bool 2237 static bool
1933 trivially_empty_bb_p (basic_block bb) 2238 trivially_empty_bb_p (basic_block bb)
1969 /* Attempt to merge blocks as made possible by edge removal. If 2274 /* Attempt to merge blocks as made possible by edge removal. If
1970 a block has only one successor, and the successor has only 2275 a block has only one successor, and the successor has only
1971 one predecessor, they may be combined. */ 2276 one predecessor, they may be combined. */
1972 do 2277 do
1973 { 2278 {
2279 block_was_dirty = false;
1974 changed = false; 2280 changed = false;
1975 iterations++; 2281 iterations++;
1976 2282
1977 if (dump_file) 2283 if (dump_file)
1978 fprintf (dump_file, 2284 fprintf (dump_file,
2033 if ((e->flags & EDGE_FALLTHRU)) 2339 if ((e->flags & EDGE_FALLTHRU))
2034 emit_barrier_after (BB_END (e->src)); 2340 emit_barrier_after (BB_END (e->src));
2035 } 2341 }
2036 } 2342 }
2037 delete_basic_block (b); 2343 delete_basic_block (b);
2038 if (!(mode & CLEANUP_CFGLAYOUT)) 2344 changed = true;
2039 changed = true;
2040 /* Avoid trying to remove ENTRY_BLOCK_PTR. */ 2345 /* Avoid trying to remove ENTRY_BLOCK_PTR. */
2041 b = (c == ENTRY_BLOCK_PTR ? c->next_bb : c); 2346 b = (c == ENTRY_BLOCK_PTR ? c->next_bb : c);
2042 continue; 2347 continue;
2043 } 2348 }
2044 2349
2100 changed = true; 2405 changed = true;
2101 b = c; 2406 b = c;
2102 continue; 2407 continue;
2103 } 2408 }
2104 2409
2410 /* Merge B with its single successor, if any. */
2105 if (single_succ_p (b) 2411 if (single_succ_p (b)
2106 && (s = single_succ_edge (b)) 2412 && (s = single_succ_edge (b))
2107 && !(s->flags & EDGE_COMPLEX) 2413 && !(s->flags & EDGE_COMPLEX)
2108 && (c = s->dest) != EXIT_BLOCK_PTR 2414 && (c = s->dest) != EXIT_BLOCK_PTR
2109 && single_pred_p (c) 2415 && single_pred_p (c)
2167 /* Look for shared code between blocks. */ 2473 /* Look for shared code between blocks. */
2168 if ((mode & CLEANUP_CROSSJUMP) 2474 if ((mode & CLEANUP_CROSSJUMP)
2169 && try_crossjump_bb (mode, b)) 2475 && try_crossjump_bb (mode, b))
2170 changed_here = true; 2476 changed_here = true;
2171 2477
2478 if ((mode & CLEANUP_CROSSJUMP)
2479 /* This can lengthen register lifetimes. Do it only after
2480 reload. */
2481 && reload_completed
2482 && try_head_merge_bb (b))
2483 changed_here = true;
2484
2172 /* Don't get confused by the index shift caused by 2485 /* Don't get confused by the index shift caused by
2173 deleting blocks. */ 2486 deleting blocks. */
2174 if (!changed_here) 2487 if (!changed_here)
2175 b = b->next_bb; 2488 b = b->next_bb;
2176 else 2489 else
2178 } 2491 }
2179 2492
2180 if ((mode & CLEANUP_CROSSJUMP) 2493 if ((mode & CLEANUP_CROSSJUMP)
2181 && try_crossjump_bb (mode, EXIT_BLOCK_PTR)) 2494 && try_crossjump_bb (mode, EXIT_BLOCK_PTR))
2182 changed = true; 2495 changed = true;
2496
2497 if (block_was_dirty)
2498 {
2499 /* This should only be set by head-merging. */
2500 gcc_assert (mode & CLEANUP_CROSSJUMP);
2501 df_analyze ();
2502 }
2183 2503
2184 #ifdef ENABLE_CHECKING 2504 #ifdef ENABLE_CHECKING
2185 if (changed) 2505 if (changed)
2186 verify_flow_info (); 2506 verify_flow_info ();
2187 #endif 2507 #endif
2363 code, so delete_trivially_dead_insns or even doing nothing at all 2683 code, so delete_trivially_dead_insns or even doing nothing at all
2364 is good enough. */ 2684 is good enough. */
2365 if ((mode & CLEANUP_EXPENSIVE) && !reload_completed 2685 if ((mode & CLEANUP_EXPENSIVE) && !reload_completed
2366 && !delete_trivially_dead_insns (get_insns (), max_reg_num ())) 2686 && !delete_trivially_dead_insns (get_insns (), max_reg_num ()))
2367 break; 2687 break;
2368 else if ((mode & CLEANUP_CROSSJUMP) 2688 if ((mode & CLEANUP_CROSSJUMP) && crossjumps_occured)
2369 && crossjumps_occured)
2370 run_fast_dce (); 2689 run_fast_dce ();
2371 } 2690 }
2372 else 2691 else
2373 break; 2692 break;
2374 } 2693 }