Mercurial > hg > CbC > CbC_gcc
comparison gcc/cfgbuild.c @ 0:a06113de4d67
first commit
author | kent <kent@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Fri, 17 Jul 2009 14:47:48 +0900 |
parents | |
children | 77e2b8dfacca |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:a06113de4d67 |
---|---|
1 /* Control flow graph building code for GNU compiler. | |
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, | |
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008 | |
4 Free Software Foundation, Inc. | |
5 | |
6 This file is part of GCC. | |
7 | |
8 GCC is free software; you can redistribute it and/or modify it under | |
9 the terms of the GNU General Public License as published by the Free | |
10 Software Foundation; either version 3, or (at your option) any later | |
11 version. | |
12 | |
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
16 for more details. | |
17 | |
18 You should have received a copy of the GNU General Public License | |
19 along with GCC; see the file COPYING3. If not see | |
20 <http://www.gnu.org/licenses/>. */ | |
21 | |
22 /* find_basic_blocks divides the current function's rtl into basic | |
23 blocks and constructs the CFG. The blocks are recorded in the | |
24 basic_block_info array; the CFG exists in the edge structures | |
25 referenced by the blocks. | |
26 | |
27 find_basic_blocks also finds any unreachable loops and deletes them. | |
28 | |
29 Available functionality: | |
30 - CFG construction | |
31 find_basic_blocks */ | |
32 | |
33 #include "config.h" | |
34 #include "system.h" | |
35 #include "coretypes.h" | |
36 #include "tm.h" | |
37 #include "tree.h" | |
38 #include "rtl.h" | |
39 #include "hard-reg-set.h" | |
40 #include "basic-block.h" | |
41 #include "regs.h" | |
42 #include "flags.h" | |
43 #include "output.h" | |
44 #include "function.h" | |
45 #include "except.h" | |
46 #include "toplev.h" | |
47 #include "timevar.h" | |
48 | |
49 static int count_basic_blocks (const_rtx); | |
50 static void find_basic_blocks_1 (rtx); | |
51 static void make_edges (basic_block, basic_block, int); | |
52 static void make_label_edge (sbitmap, basic_block, rtx, int); | |
53 static void find_bb_boundaries (basic_block); | |
54 static void compute_outgoing_frequencies (basic_block); | |
55 | |
56 /* Return true if insn is something that should be contained inside basic | |
57 block. */ | |
58 | |
59 bool | |
60 inside_basic_block_p (const_rtx insn) | |
61 { | |
62 switch (GET_CODE (insn)) | |
63 { | |
64 case CODE_LABEL: | |
65 /* Avoid creating of basic block for jumptables. */ | |
66 return (NEXT_INSN (insn) == 0 | |
67 || !JUMP_P (NEXT_INSN (insn)) | |
68 || (GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_VEC | |
69 && GET_CODE (PATTERN (NEXT_INSN (insn))) != ADDR_DIFF_VEC)); | |
70 | |
71 case JUMP_INSN: | |
72 return (GET_CODE (PATTERN (insn)) != ADDR_VEC | |
73 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC); | |
74 | |
75 case CALL_INSN: | |
76 case INSN: | |
77 return true; | |
78 | |
79 case BARRIER: | |
80 case NOTE: | |
81 return false; | |
82 | |
83 default: | |
84 gcc_unreachable (); | |
85 } | |
86 } | |
87 | |
88 /* Return true if INSN may cause control flow transfer, so it should be last in | |
89 the basic block. */ | |
90 | |
91 bool | |
92 control_flow_insn_p (const_rtx insn) | |
93 { | |
94 rtx note; | |
95 | |
96 switch (GET_CODE (insn)) | |
97 { | |
98 case NOTE: | |
99 case CODE_LABEL: | |
100 return false; | |
101 | |
102 case JUMP_INSN: | |
103 /* Jump insn always causes control transfer except for tablejumps. */ | |
104 return (GET_CODE (PATTERN (insn)) != ADDR_VEC | |
105 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC); | |
106 | |
107 case CALL_INSN: | |
108 /* Noreturn and sibling call instructions terminate the basic blocks | |
109 (but only if they happen unconditionally). */ | |
110 if ((SIBLING_CALL_P (insn) | |
111 || find_reg_note (insn, REG_NORETURN, 0)) | |
112 && GET_CODE (PATTERN (insn)) != COND_EXEC) | |
113 return true; | |
114 /* Call insn may return to the nonlocal goto handler. */ | |
115 return ((nonlocal_goto_handler_labels | |
116 && (0 == (note = find_reg_note (insn, REG_EH_REGION, | |
117 NULL_RTX)) | |
118 || INTVAL (XEXP (note, 0)) >= 0)) | |
119 /* Or may trap. */ | |
120 || can_throw_internal (insn)); | |
121 | |
122 case INSN: | |
123 /* Treat trap instructions like noreturn calls (same provision). */ | |
124 if (GET_CODE (PATTERN (insn)) == TRAP_IF | |
125 && XEXP (PATTERN (insn), 0) == const1_rtx) | |
126 return true; | |
127 | |
128 return (flag_non_call_exceptions && can_throw_internal (insn)); | |
129 | |
130 case BARRIER: | |
131 /* It is nonsense to reach barrier when looking for the | |
132 end of basic block, but before dead code is eliminated | |
133 this may happen. */ | |
134 return false; | |
135 | |
136 default: | |
137 gcc_unreachable (); | |
138 } | |
139 } | |
140 | |
141 /* Count the basic blocks of the function. */ | |
142 | |
143 static int | |
144 count_basic_blocks (const_rtx f) | |
145 { | |
146 int count = NUM_FIXED_BLOCKS; | |
147 bool saw_insn = false; | |
148 const_rtx insn; | |
149 | |
150 for (insn = f; insn; insn = NEXT_INSN (insn)) | |
151 { | |
152 /* Code labels and barriers causes current basic block to be | |
153 terminated at previous real insn. */ | |
154 if ((LABEL_P (insn) || BARRIER_P (insn)) | |
155 && saw_insn) | |
156 count++, saw_insn = false; | |
157 | |
158 /* Start basic block if needed. */ | |
159 if (!saw_insn && inside_basic_block_p (insn)) | |
160 saw_insn = true; | |
161 | |
162 /* Control flow insn causes current basic block to be terminated. */ | |
163 if (saw_insn && control_flow_insn_p (insn)) | |
164 count++, saw_insn = false; | |
165 } | |
166 | |
167 if (saw_insn) | |
168 count++; | |
169 | |
170 /* The rest of the compiler works a bit smoother when we don't have to | |
171 check for the edge case of do-nothing functions with no basic blocks. */ | |
172 if (count == NUM_FIXED_BLOCKS) | |
173 { | |
174 emit_use (const0_rtx); | |
175 count = NUM_FIXED_BLOCKS + 1; | |
176 } | |
177 | |
178 return count; | |
179 } | |
180 | |
181 /* Create an edge between two basic blocks. FLAGS are auxiliary information | |
182 about the edge that is accumulated between calls. */ | |
183 | |
184 /* Create an edge from a basic block to a label. */ | |
185 | |
186 static void | |
187 make_label_edge (sbitmap edge_cache, basic_block src, rtx label, int flags) | |
188 { | |
189 gcc_assert (LABEL_P (label)); | |
190 | |
191 /* If the label was never emitted, this insn is junk, but avoid a | |
192 crash trying to refer to BLOCK_FOR_INSN (label). This can happen | |
193 as a result of a syntax error and a diagnostic has already been | |
194 printed. */ | |
195 | |
196 if (INSN_UID (label) == 0) | |
197 return; | |
198 | |
199 cached_make_edge (edge_cache, src, BLOCK_FOR_INSN (label), flags); | |
200 } | |
201 | |
202 /* Create the edges generated by INSN in REGION. */ | |
203 | |
204 void | |
205 rtl_make_eh_edge (sbitmap edge_cache, basic_block src, rtx insn) | |
206 { | |
207 int is_call = CALL_P (insn) ? EDGE_ABNORMAL_CALL : 0; | |
208 rtx handlers, i; | |
209 | |
210 handlers = reachable_handlers (insn); | |
211 | |
212 for (i = handlers; i; i = XEXP (i, 1)) | |
213 make_label_edge (edge_cache, src, XEXP (i, 0), | |
214 EDGE_ABNORMAL | EDGE_EH | is_call); | |
215 | |
216 free_INSN_LIST_list (&handlers); | |
217 } | |
218 | |
219 /* States of basic block as seen by find_many_sub_basic_blocks. */ | |
220 enum state { | |
221 /* Basic blocks created via split_block belong to this state. | |
222 make_edges will examine these basic blocks to see if we need to | |
223 create edges going out of them. */ | |
224 BLOCK_NEW = 0, | |
225 | |
226 /* Basic blocks that do not need examining belong to this state. | |
227 These blocks will be left intact. In particular, make_edges will | |
228 not create edges going out of these basic blocks. */ | |
229 BLOCK_ORIGINAL, | |
230 | |
231 /* Basic blocks that may need splitting (due to a label appearing in | |
232 the middle, etc) belong to this state. After splitting them, | |
233 make_edges will create edges going out of them as needed. */ | |
234 BLOCK_TO_SPLIT | |
235 }; | |
236 | |
237 #define STATE(BB) (enum state) ((size_t) (BB)->aux) | |
238 #define SET_STATE(BB, STATE) ((BB)->aux = (void *) (size_t) (STATE)) | |
239 | |
240 /* Used internally by purge_dead_tablejump_edges, ORed into state. */ | |
241 #define BLOCK_USED_BY_TABLEJUMP 32 | |
242 #define FULL_STATE(BB) ((size_t) (BB)->aux) | |
243 | |
244 /* Identify the edges going out of basic blocks between MIN and MAX, | |
245 inclusive, that have their states set to BLOCK_NEW or | |
246 BLOCK_TO_SPLIT. | |
247 | |
248 UPDATE_P should be nonzero if we are updating CFG and zero if we | |
249 are building CFG from scratch. */ | |
250 | |
251 static void | |
252 make_edges (basic_block min, basic_block max, int update_p) | |
253 { | |
254 basic_block bb; | |
255 sbitmap edge_cache = NULL; | |
256 | |
257 /* Heavy use of computed goto in machine-generated code can lead to | |
258 nearly fully-connected CFGs. In that case we spend a significant | |
259 amount of time searching the edge lists for duplicates. */ | |
260 if (forced_labels || cfun->cfg->max_jumptable_ents > 100) | |
261 edge_cache = sbitmap_alloc (last_basic_block); | |
262 | |
263 /* By nature of the way these get numbered, ENTRY_BLOCK_PTR->next_bb block | |
264 is always the entry. */ | |
265 if (min == ENTRY_BLOCK_PTR->next_bb) | |
266 make_edge (ENTRY_BLOCK_PTR, min, EDGE_FALLTHRU); | |
267 | |
268 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb) | |
269 { | |
270 rtx insn, x; | |
271 enum rtx_code code; | |
272 edge e; | |
273 edge_iterator ei; | |
274 | |
275 if (STATE (bb) == BLOCK_ORIGINAL) | |
276 continue; | |
277 | |
278 /* If we have an edge cache, cache edges going out of BB. */ | |
279 if (edge_cache) | |
280 { | |
281 sbitmap_zero (edge_cache); | |
282 if (update_p) | |
283 { | |
284 FOR_EACH_EDGE (e, ei, bb->succs) | |
285 if (e->dest != EXIT_BLOCK_PTR) | |
286 SET_BIT (edge_cache, e->dest->index); | |
287 } | |
288 } | |
289 | |
290 if (LABEL_P (BB_HEAD (bb)) | |
291 && LABEL_ALT_ENTRY_P (BB_HEAD (bb))) | |
292 cached_make_edge (NULL, ENTRY_BLOCK_PTR, bb, 0); | |
293 | |
294 /* Examine the last instruction of the block, and discover the | |
295 ways we can leave the block. */ | |
296 | |
297 insn = BB_END (bb); | |
298 code = GET_CODE (insn); | |
299 | |
300 /* A branch. */ | |
301 if (code == JUMP_INSN) | |
302 { | |
303 rtx tmp; | |
304 | |
305 /* Recognize exception handling placeholders. */ | |
306 if (GET_CODE (PATTERN (insn)) == RESX) | |
307 rtl_make_eh_edge (edge_cache, bb, insn); | |
308 | |
309 /* Recognize a non-local goto as a branch outside the | |
310 current function. */ | |
311 else if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX)) | |
312 ; | |
313 | |
314 /* Recognize a tablejump and do the right thing. */ | |
315 else if (tablejump_p (insn, NULL, &tmp)) | |
316 { | |
317 rtvec vec; | |
318 int j; | |
319 | |
320 if (GET_CODE (PATTERN (tmp)) == ADDR_VEC) | |
321 vec = XVEC (PATTERN (tmp), 0); | |
322 else | |
323 vec = XVEC (PATTERN (tmp), 1); | |
324 | |
325 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j) | |
326 make_label_edge (edge_cache, bb, | |
327 XEXP (RTVEC_ELT (vec, j), 0), 0); | |
328 | |
329 /* Some targets (eg, ARM) emit a conditional jump that also | |
330 contains the out-of-range target. Scan for these and | |
331 add an edge if necessary. */ | |
332 if ((tmp = single_set (insn)) != NULL | |
333 && SET_DEST (tmp) == pc_rtx | |
334 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE | |
335 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF) | |
336 make_label_edge (edge_cache, bb, | |
337 XEXP (XEXP (SET_SRC (tmp), 2), 0), 0); | |
338 } | |
339 | |
340 /* If this is a computed jump, then mark it as reaching | |
341 everything on the forced_labels list. */ | |
342 else if (computed_jump_p (insn)) | |
343 { | |
344 for (x = forced_labels; x; x = XEXP (x, 1)) | |
345 make_label_edge (edge_cache, bb, XEXP (x, 0), EDGE_ABNORMAL); | |
346 } | |
347 | |
348 /* Returns create an exit out. */ | |
349 else if (returnjump_p (insn)) | |
350 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, 0); | |
351 | |
352 /* Otherwise, we have a plain conditional or unconditional jump. */ | |
353 else | |
354 { | |
355 gcc_assert (JUMP_LABEL (insn)); | |
356 make_label_edge (edge_cache, bb, JUMP_LABEL (insn), 0); | |
357 } | |
358 } | |
359 | |
360 /* If this is a sibling call insn, then this is in effect a combined call | |
361 and return, and so we need an edge to the exit block. No need to | |
362 worry about EH edges, since we wouldn't have created the sibling call | |
363 in the first place. */ | |
364 if (code == CALL_INSN && SIBLING_CALL_P (insn)) | |
365 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, | |
366 EDGE_SIBCALL | EDGE_ABNORMAL); | |
367 | |
368 /* If this is a CALL_INSN, then mark it as reaching the active EH | |
369 handler for this CALL_INSN. If we're handling non-call | |
370 exceptions then any insn can reach any of the active handlers. | |
371 Also mark the CALL_INSN as reaching any nonlocal goto handler. */ | |
372 else if (code == CALL_INSN || flag_non_call_exceptions) | |
373 { | |
374 /* Add any appropriate EH edges. */ | |
375 rtl_make_eh_edge (edge_cache, bb, insn); | |
376 | |
377 if (code == CALL_INSN && nonlocal_goto_handler_labels) | |
378 { | |
379 /* ??? This could be made smarter: in some cases it's possible | |
380 to tell that certain calls will not do a nonlocal goto. | |
381 For example, if the nested functions that do the nonlocal | |
382 gotos do not have their addresses taken, then only calls to | |
383 those functions or to other nested functions that use them | |
384 could possibly do nonlocal gotos. */ | |
385 | |
386 /* We do know that a REG_EH_REGION note with a value less | |
387 than 0 is guaranteed not to perform a non-local goto. */ | |
388 rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX); | |
389 | |
390 if (!note || INTVAL (XEXP (note, 0)) >= 0) | |
391 for (x = nonlocal_goto_handler_labels; x; x = XEXP (x, 1)) | |
392 make_label_edge (edge_cache, bb, XEXP (x, 0), | |
393 EDGE_ABNORMAL | EDGE_ABNORMAL_CALL); | |
394 } | |
395 } | |
396 | |
397 /* Find out if we can drop through to the next block. */ | |
398 insn = NEXT_INSN (insn); | |
399 e = find_edge (bb, EXIT_BLOCK_PTR); | |
400 if (e && e->flags & EDGE_FALLTHRU) | |
401 insn = NULL; | |
402 | |
403 while (insn | |
404 && NOTE_P (insn) | |
405 && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK) | |
406 insn = NEXT_INSN (insn); | |
407 | |
408 if (!insn) | |
409 cached_make_edge (edge_cache, bb, EXIT_BLOCK_PTR, EDGE_FALLTHRU); | |
410 else if (bb->next_bb != EXIT_BLOCK_PTR) | |
411 { | |
412 if (insn == BB_HEAD (bb->next_bb)) | |
413 cached_make_edge (edge_cache, bb, bb->next_bb, EDGE_FALLTHRU); | |
414 } | |
415 } | |
416 | |
417 if (edge_cache) | |
418 sbitmap_vector_free (edge_cache); | |
419 } | |
420 | |
421 /* Find all basic blocks of the function whose first insn is F. | |
422 | |
423 Collect and return a list of labels whose addresses are taken. This | |
424 will be used in make_edges for use with computed gotos. */ | |
425 | |
426 static void | |
427 find_basic_blocks_1 (rtx f) | |
428 { | |
429 rtx insn, next; | |
430 rtx bb_note = NULL_RTX; | |
431 rtx head = NULL_RTX; | |
432 rtx end = NULL_RTX; | |
433 basic_block prev = ENTRY_BLOCK_PTR; | |
434 | |
435 /* We process the instructions in a slightly different way than we did | |
436 previously. This is so that we see a NOTE_BASIC_BLOCK after we have | |
437 closed out the previous block, so that it gets attached at the proper | |
438 place. Since this form should be equivalent to the previous, | |
439 count_basic_blocks continues to use the old form as a check. */ | |
440 | |
441 for (insn = f; insn; insn = next) | |
442 { | |
443 enum rtx_code code = GET_CODE (insn); | |
444 | |
445 next = NEXT_INSN (insn); | |
446 | |
447 if ((LABEL_P (insn) || BARRIER_P (insn)) | |
448 && head) | |
449 { | |
450 prev = create_basic_block_structure (head, end, bb_note, prev); | |
451 head = end = NULL_RTX; | |
452 bb_note = NULL_RTX; | |
453 } | |
454 | |
455 if (inside_basic_block_p (insn)) | |
456 { | |
457 if (head == NULL_RTX) | |
458 head = insn; | |
459 end = insn; | |
460 } | |
461 | |
462 if (head && control_flow_insn_p (insn)) | |
463 { | |
464 prev = create_basic_block_structure (head, end, bb_note, prev); | |
465 head = end = NULL_RTX; | |
466 bb_note = NULL_RTX; | |
467 } | |
468 | |
469 switch (code) | |
470 { | |
471 case NOTE: | |
472 /* Look for basic block notes with which to keep the | |
473 basic_block_info pointers stable. Unthread the note now; | |
474 we'll put it back at the right place in create_basic_block. | |
475 Or not at all if we've already found a note in this block. */ | |
476 if (NOTE_INSN_BASIC_BLOCK_P (insn)) | |
477 { | |
478 if (bb_note == NULL_RTX) | |
479 bb_note = insn; | |
480 else | |
481 next = delete_insn (insn); | |
482 } | |
483 break; | |
484 | |
485 case CODE_LABEL: | |
486 case JUMP_INSN: | |
487 case CALL_INSN: | |
488 case INSN: | |
489 case BARRIER: | |
490 break; | |
491 | |
492 default: | |
493 gcc_unreachable (); | |
494 } | |
495 } | |
496 | |
497 if (head != NULL_RTX) | |
498 create_basic_block_structure (head, end, bb_note, prev); | |
499 else if (bb_note) | |
500 delete_insn (bb_note); | |
501 | |
502 gcc_assert (last_basic_block == n_basic_blocks); | |
503 | |
504 clear_aux_for_blocks (); | |
505 } | |
506 | |
507 | |
508 /* Find basic blocks of the current function. | |
509 F is the first insn of the function. */ | |
510 | |
511 void | |
512 find_basic_blocks (rtx f) | |
513 { | |
514 basic_block bb; | |
515 | |
516 timevar_push (TV_CFG); | |
517 | |
518 /* Flush out existing data. */ | |
519 if (basic_block_info != NULL) | |
520 { | |
521 clear_edges (); | |
522 | |
523 /* Clear bb->aux on all extant basic blocks. We'll use this as a | |
524 tag for reuse during create_basic_block, just in case some pass | |
525 copies around basic block notes improperly. */ | |
526 FOR_EACH_BB (bb) | |
527 bb->aux = NULL; | |
528 | |
529 basic_block_info = NULL; | |
530 } | |
531 | |
532 n_basic_blocks = count_basic_blocks (f); | |
533 last_basic_block = NUM_FIXED_BLOCKS; | |
534 ENTRY_BLOCK_PTR->next_bb = EXIT_BLOCK_PTR; | |
535 EXIT_BLOCK_PTR->prev_bb = ENTRY_BLOCK_PTR; | |
536 | |
537 | |
538 /* Size the basic block table. The actual structures will be allocated | |
539 by find_basic_blocks_1, since we want to keep the structure pointers | |
540 stable across calls to find_basic_blocks. */ | |
541 /* ??? This whole issue would be much simpler if we called find_basic_blocks | |
542 exactly once, and thereafter we don't have a single long chain of | |
543 instructions at all until close to the end of compilation when we | |
544 actually lay them out. */ | |
545 | |
546 basic_block_info = VEC_alloc (basic_block, gc, n_basic_blocks); | |
547 VEC_safe_grow_cleared (basic_block, gc, basic_block_info, n_basic_blocks); | |
548 SET_BASIC_BLOCK (ENTRY_BLOCK, ENTRY_BLOCK_PTR); | |
549 SET_BASIC_BLOCK (EXIT_BLOCK, EXIT_BLOCK_PTR); | |
550 | |
551 find_basic_blocks_1 (f); | |
552 | |
553 profile_status = PROFILE_ABSENT; | |
554 | |
555 /* Tell make_edges to examine every block for out-going edges. */ | |
556 FOR_EACH_BB (bb) | |
557 SET_STATE (bb, BLOCK_NEW); | |
558 | |
559 /* Discover the edges of our cfg. */ | |
560 make_edges (ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR->prev_bb, 0); | |
561 | |
562 /* Do very simple cleanup now, for the benefit of code that runs between | |
563 here and cleanup_cfg, e.g. thread_prologue_and_epilogue_insns. */ | |
564 tidy_fallthru_edges (); | |
565 | |
566 #ifdef ENABLE_CHECKING | |
567 verify_flow_info (); | |
568 #endif | |
569 timevar_pop (TV_CFG); | |
570 } | |
571 | |
572 static void | |
573 mark_tablejump_edge (rtx label) | |
574 { | |
575 basic_block bb; | |
576 | |
577 gcc_assert (LABEL_P (label)); | |
578 /* See comment in make_label_edge. */ | |
579 if (INSN_UID (label) == 0) | |
580 return; | |
581 bb = BLOCK_FOR_INSN (label); | |
582 SET_STATE (bb, FULL_STATE (bb) | BLOCK_USED_BY_TABLEJUMP); | |
583 } | |
584 | |
585 static void | |
586 purge_dead_tablejump_edges (basic_block bb, rtx table) | |
587 { | |
588 rtx insn = BB_END (bb), tmp; | |
589 rtvec vec; | |
590 int j; | |
591 edge_iterator ei; | |
592 edge e; | |
593 | |
594 if (GET_CODE (PATTERN (table)) == ADDR_VEC) | |
595 vec = XVEC (PATTERN (table), 0); | |
596 else | |
597 vec = XVEC (PATTERN (table), 1); | |
598 | |
599 for (j = GET_NUM_ELEM (vec) - 1; j >= 0; --j) | |
600 mark_tablejump_edge (XEXP (RTVEC_ELT (vec, j), 0)); | |
601 | |
602 /* Some targets (eg, ARM) emit a conditional jump that also | |
603 contains the out-of-range target. Scan for these and | |
604 add an edge if necessary. */ | |
605 if ((tmp = single_set (insn)) != NULL | |
606 && SET_DEST (tmp) == pc_rtx | |
607 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE | |
608 && GET_CODE (XEXP (SET_SRC (tmp), 2)) == LABEL_REF) | |
609 mark_tablejump_edge (XEXP (XEXP (SET_SRC (tmp), 2), 0)); | |
610 | |
611 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) | |
612 { | |
613 if (FULL_STATE (e->dest) & BLOCK_USED_BY_TABLEJUMP) | |
614 SET_STATE (e->dest, FULL_STATE (e->dest) | |
615 & ~(size_t) BLOCK_USED_BY_TABLEJUMP); | |
616 else if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH))) | |
617 { | |
618 remove_edge (e); | |
619 continue; | |
620 } | |
621 ei_next (&ei); | |
622 } | |
623 } | |
624 | |
625 /* Scan basic block BB for possible BB boundaries inside the block | |
626 and create new basic blocks in the progress. */ | |
627 | |
628 static void | |
629 find_bb_boundaries (basic_block bb) | |
630 { | |
631 basic_block orig_bb = bb; | |
632 rtx insn = BB_HEAD (bb); | |
633 rtx end = BB_END (bb), x; | |
634 rtx table; | |
635 rtx flow_transfer_insn = NULL_RTX; | |
636 edge fallthru = NULL; | |
637 | |
638 if (insn == BB_END (bb)) | |
639 return; | |
640 | |
641 if (LABEL_P (insn)) | |
642 insn = NEXT_INSN (insn); | |
643 | |
644 /* Scan insn chain and try to find new basic block boundaries. */ | |
645 while (1) | |
646 { | |
647 enum rtx_code code = GET_CODE (insn); | |
648 | |
649 /* On code label, split current basic block. */ | |
650 if (code == CODE_LABEL) | |
651 { | |
652 fallthru = split_block (bb, PREV_INSN (insn)); | |
653 if (flow_transfer_insn) | |
654 { | |
655 BB_END (bb) = flow_transfer_insn; | |
656 | |
657 /* Clean up the bb field for the insns between the blocks. */ | |
658 for (x = NEXT_INSN (flow_transfer_insn); | |
659 x != BB_HEAD (fallthru->dest); | |
660 x = NEXT_INSN (x)) | |
661 if (!BARRIER_P (x)) | |
662 set_block_for_insn (x, NULL); | |
663 } | |
664 | |
665 bb = fallthru->dest; | |
666 remove_edge (fallthru); | |
667 flow_transfer_insn = NULL_RTX; | |
668 if (LABEL_ALT_ENTRY_P (insn)) | |
669 make_edge (ENTRY_BLOCK_PTR, bb, 0); | |
670 } | |
671 | |
672 /* In case we've previously seen an insn that effects a control | |
673 flow transfer, split the block. */ | |
674 if (flow_transfer_insn && inside_basic_block_p (insn)) | |
675 { | |
676 fallthru = split_block (bb, PREV_INSN (insn)); | |
677 BB_END (bb) = flow_transfer_insn; | |
678 | |
679 /* Clean up the bb field for the insns between the blocks. */ | |
680 for (x = NEXT_INSN (flow_transfer_insn); | |
681 x != BB_HEAD (fallthru->dest); | |
682 x = NEXT_INSN (x)) | |
683 if (!BARRIER_P (x)) | |
684 set_block_for_insn (x, NULL); | |
685 | |
686 bb = fallthru->dest; | |
687 remove_edge (fallthru); | |
688 flow_transfer_insn = NULL_RTX; | |
689 } | |
690 | |
691 if (control_flow_insn_p (insn)) | |
692 flow_transfer_insn = insn; | |
693 if (insn == end) | |
694 break; | |
695 insn = NEXT_INSN (insn); | |
696 } | |
697 | |
698 /* In case expander replaced normal insn by sequence terminating by | |
699 return and barrier, or possibly other sequence not behaving like | |
700 ordinary jump, we need to take care and move basic block boundary. */ | |
701 if (flow_transfer_insn) | |
702 { | |
703 BB_END (bb) = flow_transfer_insn; | |
704 | |
705 /* Clean up the bb field for the insns that do not belong to BB. */ | |
706 x = flow_transfer_insn; | |
707 while (x != end) | |
708 { | |
709 x = NEXT_INSN (x); | |
710 if (!BARRIER_P (x)) | |
711 set_block_for_insn (x, NULL); | |
712 } | |
713 } | |
714 | |
715 /* We've possibly replaced the conditional jump by conditional jump | |
716 followed by cleanup at fallthru edge, so the outgoing edges may | |
717 be dead. */ | |
718 purge_dead_edges (bb); | |
719 | |
720 /* purge_dead_edges doesn't handle tablejump's, but if we have split the | |
721 basic block, we might need to kill some edges. */ | |
722 if (bb != orig_bb && tablejump_p (BB_END (bb), NULL, &table)) | |
723 purge_dead_tablejump_edges (bb, table); | |
724 } | |
725 | |
726 /* Assume that frequency of basic block B is known. Compute frequencies | |
727 and probabilities of outgoing edges. */ | |
728 | |
729 static void | |
730 compute_outgoing_frequencies (basic_block b) | |
731 { | |
732 edge e, f; | |
733 edge_iterator ei; | |
734 | |
735 if (EDGE_COUNT (b->succs) == 2) | |
736 { | |
737 rtx note = find_reg_note (BB_END (b), REG_BR_PROB, NULL); | |
738 int probability; | |
739 | |
740 if (note) | |
741 { | |
742 probability = INTVAL (XEXP (note, 0)); | |
743 e = BRANCH_EDGE (b); | |
744 e->probability = probability; | |
745 e->count = ((b->count * probability + REG_BR_PROB_BASE / 2) | |
746 / REG_BR_PROB_BASE); | |
747 f = FALLTHRU_EDGE (b); | |
748 f->probability = REG_BR_PROB_BASE - probability; | |
749 f->count = b->count - e->count; | |
750 return; | |
751 } | |
752 } | |
753 | |
754 if (single_succ_p (b)) | |
755 { | |
756 e = single_succ_edge (b); | |
757 e->probability = REG_BR_PROB_BASE; | |
758 e->count = b->count; | |
759 return; | |
760 } | |
761 guess_outgoing_edge_probabilities (b); | |
762 if (b->count) | |
763 FOR_EACH_EDGE (e, ei, b->succs) | |
764 e->count = ((b->count * e->probability + REG_BR_PROB_BASE / 2) | |
765 / REG_BR_PROB_BASE); | |
766 } | |
767 | |
768 /* Assume that some pass has inserted labels or control flow | |
769 instructions within a basic block. Split basic blocks as needed | |
770 and create edges. */ | |
771 | |
772 void | |
773 find_many_sub_basic_blocks (sbitmap blocks) | |
774 { | |
775 basic_block bb, min, max; | |
776 | |
777 FOR_EACH_BB (bb) | |
778 SET_STATE (bb, | |
779 TEST_BIT (blocks, bb->index) ? BLOCK_TO_SPLIT : BLOCK_ORIGINAL); | |
780 | |
781 FOR_EACH_BB (bb) | |
782 if (STATE (bb) == BLOCK_TO_SPLIT) | |
783 find_bb_boundaries (bb); | |
784 | |
785 FOR_EACH_BB (bb) | |
786 if (STATE (bb) != BLOCK_ORIGINAL) | |
787 break; | |
788 | |
789 min = max = bb; | |
790 for (; bb != EXIT_BLOCK_PTR; bb = bb->next_bb) | |
791 if (STATE (bb) != BLOCK_ORIGINAL) | |
792 max = bb; | |
793 | |
794 /* Now re-scan and wire in all edges. This expect simple (conditional) | |
795 jumps at the end of each new basic blocks. */ | |
796 make_edges (min, max, 1); | |
797 | |
798 /* Update branch probabilities. Expect only (un)conditional jumps | |
799 to be created with only the forward edges. */ | |
800 if (profile_status != PROFILE_ABSENT) | |
801 FOR_BB_BETWEEN (bb, min, max->next_bb, next_bb) | |
802 { | |
803 edge e; | |
804 edge_iterator ei; | |
805 | |
806 if (STATE (bb) == BLOCK_ORIGINAL) | |
807 continue; | |
808 if (STATE (bb) == BLOCK_NEW) | |
809 { | |
810 bb->count = 0; | |
811 bb->frequency = 0; | |
812 FOR_EACH_EDGE (e, ei, bb->preds) | |
813 { | |
814 bb->count += e->count; | |
815 bb->frequency += EDGE_FREQUENCY (e); | |
816 } | |
817 } | |
818 | |
819 compute_outgoing_frequencies (bb); | |
820 } | |
821 | |
822 FOR_EACH_BB (bb) | |
823 SET_STATE (bb, 0); | |
824 } |