Mercurial > hg > CbC > CbC_gcc
comparison gcc/cfganal.c @ 55:77e2b8dfacca gcc-4.4.5
update it from 4.4.3 to 4.5.0
author | ryoma <e075725@ie.u-ryukyu.ac.jp> |
---|---|
date | Fri, 12 Feb 2010 23:39:51 +0900 |
parents | a06113de4d67 |
children | b7f97abdc517 |
comparison
equal
deleted
inserted
replaced
52:c156f1bd5cd9 | 55:77e2b8dfacca |
---|---|
651 numbering of the graph. If INCLUDE_ENTRY_EXIT is true, then then | 651 numbering of the graph. If INCLUDE_ENTRY_EXIT is true, then then |
652 ENTRY_BLOCK and EXIT_BLOCK are included. If DELETE_UNREACHABLE is | 652 ENTRY_BLOCK and EXIT_BLOCK are included. If DELETE_UNREACHABLE is |
653 true, unreachable blocks are deleted. */ | 653 true, unreachable blocks are deleted. */ |
654 | 654 |
655 int | 655 int |
656 post_order_compute (int *post_order, bool include_entry_exit, | 656 post_order_compute (int *post_order, bool include_entry_exit, |
657 bool delete_unreachable) | 657 bool delete_unreachable) |
658 { | 658 { |
659 edge_iterator *stack; | 659 edge_iterator *stack; |
660 int sp; | 660 int sp; |
661 int post_order_num = 0; | 661 int post_order_num = 0; |
717 if (include_entry_exit) | 717 if (include_entry_exit) |
718 { | 718 { |
719 post_order[post_order_num++] = ENTRY_BLOCK; | 719 post_order[post_order_num++] = ENTRY_BLOCK; |
720 count = post_order_num; | 720 count = post_order_num; |
721 } | 721 } |
722 else | 722 else |
723 count = post_order_num + 2; | 723 count = post_order_num + 2; |
724 | 724 |
725 /* Delete the unreachable blocks if some were found and we are | 725 /* Delete the unreachable blocks if some were found and we are |
726 supposed to do it. */ | 726 supposed to do it. */ |
727 if (delete_unreachable && (count != n_basic_blocks)) | 727 if (delete_unreachable && (count != n_basic_blocks)) |
728 { | 728 { |
729 basic_block b; | 729 basic_block b; |
730 basic_block next_bb; | 730 basic_block next_bb; |
731 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb) | 731 for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb) |
732 { | 732 { |
733 next_bb = b->next_bb; | 733 next_bb = b->next_bb; |
734 | 734 |
735 if (!(TEST_BIT (visited, b->index))) | 735 if (!(TEST_BIT (visited, b->index))) |
736 delete_basic_block (b); | 736 delete_basic_block (b); |
737 } | 737 } |
738 | 738 |
739 tidy_fallthru_edges (); | 739 tidy_fallthru_edges (); |
740 } | 740 } |
741 | 741 |
742 free (stack); | 742 free (stack); |
743 sbitmap_free (visited); | 743 sbitmap_free (visited); |
744 return post_order_num; | 744 return post_order_num; |
745 } | 745 } |
746 | 746 |
747 | 747 |
748 /* Helper routine for inverted_post_order_compute. | 748 /* Helper routine for inverted_post_order_compute. |
749 BB has to belong to a region of CFG | 749 BB has to belong to a region of CFG |
750 unreachable by inverted traversal from the exit. | 750 unreachable by inverted traversal from the exit. |
751 i.e. there's no control flow path from ENTRY to EXIT | 751 i.e. there's no control flow path from ENTRY to EXIT |
752 that contains this BB. | 752 that contains this BB. |
753 This can happen in two cases - if there's an infinite loop | 753 This can happen in two cases - if there's an infinite loop |
754 or if there's a block that has no successor | 754 or if there's a block that has no successor |
755 (call to a function with no return). | 755 (call to a function with no return). |
756 Some RTL passes deal with this condition by | 756 Some RTL passes deal with this condition by |
757 calling connect_infinite_loops_to_exit () and/or | 757 calling connect_infinite_loops_to_exit () and/or |
758 add_noreturn_fake_exit_edges (). | 758 add_noreturn_fake_exit_edges (). |
759 However, those methods involve modifying the CFG itself | 759 However, those methods involve modifying the CFG itself |
760 which may not be desirable. | 760 which may not be desirable. |
761 Hence, we deal with the infinite loop/no return cases | 761 Hence, we deal with the infinite loop/no return cases |
762 by identifying a unique basic block that can reach all blocks | 762 by identifying a unique basic block that can reach all blocks |
799 If there's an infinite loop, | 799 If there's an infinite loop, |
800 a simple inverted traversal starting from the blocks | 800 a simple inverted traversal starting from the blocks |
801 with no successors can't visit all blocks. | 801 with no successors can't visit all blocks. |
802 To solve this problem, we first do inverted traversal | 802 To solve this problem, we first do inverted traversal |
803 starting from the blocks with no successor. | 803 starting from the blocks with no successor. |
804 And if there's any block left that's not visited by the regular | 804 And if there's any block left that's not visited by the regular |
805 inverted traversal from EXIT, | 805 inverted traversal from EXIT, |
806 those blocks are in such problematic region. | 806 those blocks are in such problematic region. |
807 Among those, we find one block that has | 807 Among those, we find one block that has |
808 any visited predecessor (which is an entry into such a region), | 808 any visited predecessor (which is an entry into such a region), |
809 and start looking for a "dead end" from that block | 809 and start looking for a "dead end" from that block |
810 and do another inverted traversal from that block. */ | 810 and do another inverted traversal from that block. */ |
811 | 811 |
812 int | 812 int |
813 inverted_post_order_compute (int *post_order) | 813 inverted_post_order_compute (int *post_order) |
814 { | 814 { |
831 /* Put all blocks that have no successor into the initial work list. */ | 831 /* Put all blocks that have no successor into the initial work list. */ |
832 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb) | 832 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb) |
833 if (EDGE_COUNT (bb->succs) == 0) | 833 if (EDGE_COUNT (bb->succs) == 0) |
834 { | 834 { |
835 /* Push the initial edge on to the stack. */ | 835 /* Push the initial edge on to the stack. */ |
836 if (EDGE_COUNT (bb->preds) > 0) | 836 if (EDGE_COUNT (bb->preds) > 0) |
837 { | 837 { |
838 stack[sp++] = ei_start (bb->preds); | 838 stack[sp++] = ei_start (bb->preds); |
839 SET_BIT (visited, bb->index); | 839 SET_BIT (visited, bb->index); |
840 } | 840 } |
841 } | 841 } |
842 | 842 |
843 do | 843 do |
844 { | 844 { |
845 bool has_unvisited_bb = false; | 845 bool has_unvisited_bb = false; |
846 | 846 |
847 /* The inverted traversal loop. */ | 847 /* The inverted traversal loop. */ |
848 while (sp) | 848 while (sp) |
878 else | 878 else |
879 sp--; | 879 sp--; |
880 } | 880 } |
881 } | 881 } |
882 | 882 |
883 /* Detect any infinite loop and activate the kludge. | 883 /* Detect any infinite loop and activate the kludge. |
884 Note that this doesn't check EXIT_BLOCK itself | 884 Note that this doesn't check EXIT_BLOCK itself |
885 since EXIT_BLOCK is always added after the outer do-while loop. */ | 885 since EXIT_BLOCK is always added after the outer do-while loop. */ |
886 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb) | 886 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb) |
887 if (!TEST_BIT (visited, bb->index)) | 887 if (!TEST_BIT (visited, bb->index)) |
888 { | 888 { |
912 } | 912 } |
913 } | 913 } |
914 | 914 |
915 if (has_unvisited_bb && sp == 0) | 915 if (has_unvisited_bb && sp == 0) |
916 { | 916 { |
917 /* No blocks are reachable from EXIT at all. | 917 /* No blocks are reachable from EXIT at all. |
918 Find a dead-end from the ENTRY, and restart the iteration. */ | 918 Find a dead-end from the ENTRY, and restart the iteration. */ |
919 basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR); | 919 basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR); |
920 gcc_assert (be != NULL); | 920 gcc_assert (be != NULL); |
921 SET_BIT (visited, be->index); | 921 SET_BIT (visited, be->index); |
922 stack[sp++] = ei_start (be->preds); | 922 stack[sp++] = ei_start (be->preds); |
923 } | 923 } |
924 | 924 |
925 /* The only case the below while fires is | 925 /* The only case the below while fires is |
926 when there's an infinite loop. */ | 926 when there's an infinite loop. */ |
927 } | 927 } |
928 while (sp); | 928 while (sp); |
929 | 929 |
930 /* EXIT_BLOCK is always included. */ | 930 /* EXIT_BLOCK is always included. */ |
938 /* Compute the depth first search order and store in the array | 938 /* Compute the depth first search order and store in the array |
939 PRE_ORDER if nonzero, marking the nodes visited in VISITED. If | 939 PRE_ORDER if nonzero, marking the nodes visited in VISITED. If |
940 REV_POST_ORDER is nonzero, return the reverse completion number for each | 940 REV_POST_ORDER is nonzero, return the reverse completion number for each |
941 node. Returns the number of nodes visited. A depth first search | 941 node. Returns the number of nodes visited. A depth first search |
942 tries to get as far away from the starting point as quickly as | 942 tries to get as far away from the starting point as quickly as |
943 possible. | 943 possible. |
944 | 944 |
945 pre_order is a really a preorder numbering of the graph. | 945 pre_order is a really a preorder numbering of the graph. |
946 rev_post_order is really a reverse postorder numbering of the graph. | 946 rev_post_order is really a reverse postorder numbering of the graph. |
947 */ | 947 */ |
948 | 948 |
949 int | 949 int |
950 pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order, | 950 pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order, |
951 bool include_entry_exit) | 951 bool include_entry_exit) |
952 { | 952 { |
953 edge_iterator *stack; | 953 edge_iterator *stack; |
954 int sp; | 954 int sp; |
955 int pre_order_num = 0; | 955 int pre_order_num = 0; |
966 pre_order[pre_order_num] = ENTRY_BLOCK; | 966 pre_order[pre_order_num] = ENTRY_BLOCK; |
967 pre_order_num++; | 967 pre_order_num++; |
968 if (rev_post_order) | 968 if (rev_post_order) |
969 rev_post_order[rev_post_order_num--] = ENTRY_BLOCK; | 969 rev_post_order[rev_post_order_num--] = ENTRY_BLOCK; |
970 } | 970 } |
971 else | 971 else |
972 rev_post_order_num -= NUM_FIXED_BLOCKS; | 972 rev_post_order_num -= NUM_FIXED_BLOCKS; |
973 | 973 |
974 /* Allocate bitmap to track nodes that have been visited. */ | 974 /* Allocate bitmap to track nodes that have been visited. */ |
975 visited = sbitmap_alloc (last_basic_block); | 975 visited = sbitmap_alloc (last_basic_block); |
976 | 976 |
1163 loops), and allocating a large sbitmap would lead to quadratic | 1163 loops), and allocating a large sbitmap would lead to quadratic |
1164 behavior. */ | 1164 behavior. */ |
1165 static sbitmap visited; | 1165 static sbitmap visited; |
1166 static unsigned v_size; | 1166 static unsigned v_size; |
1167 | 1167 |
1168 #define MARK_VISITED(BB) (SET_BIT (visited, (BB)->index)) | 1168 #define MARK_VISITED(BB) (SET_BIT (visited, (BB)->index)) |
1169 #define UNMARK_VISITED(BB) (RESET_BIT (visited, (BB)->index)) | 1169 #define UNMARK_VISITED(BB) (RESET_BIT (visited, (BB)->index)) |
1170 #define VISITED_P(BB) (TEST_BIT (visited, (BB)->index)) | 1170 #define VISITED_P(BB) (TEST_BIT (visited, (BB)->index)) |
1171 | 1171 |
1172 /* Resize the VISITED sbitmap if necessary. */ | 1172 /* Resize the VISITED sbitmap if necessary. */ |
1173 size = last_basic_block; | 1173 size = last_basic_block; |
1174 if (size < 10) | 1174 if (size < 10) |
1175 size = 10; | 1175 size = 10; |
1176 | 1176 |
1177 if (!visited) | 1177 if (!visited) |
1178 { | 1178 { |