Mercurial > hg > CbC > CbC_gcc
comparison gcc/tree-eh.c @ 63:b7f97abdc517 gcc-4.6-20100522
update gcc from gcc-4.5.0 to gcc-4.6
author | ryoma <e075725@ie.u-ryukyu.ac.jp> |
---|---|
date | Mon, 24 May 2010 12:47:05 +0900 |
parents | 77e2b8dfacca |
children | f6334be47118 |
comparison
equal
deleted
inserted
replaced
56:3c8a44c06a95 | 63:b7f97abdc517 |
---|---|
1 /* Exception handling semantics and decomposition for trees. | 1 /* Exception handling semantics and decomposition for trees. |
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 | 2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 |
3 Free Software Foundation, Inc. | 3 Free Software Foundation, Inc. |
4 | 4 |
5 This file is part of GCC. | 5 This file is part of GCC. |
6 | 6 |
7 GCC is free software; you can redistribute it and/or modify | 7 GCC is free software; you can redistribute it and/or modify |
21 #include "config.h" | 21 #include "config.h" |
22 #include "system.h" | 22 #include "system.h" |
23 #include "coretypes.h" | 23 #include "coretypes.h" |
24 #include "tm.h" | 24 #include "tm.h" |
25 #include "tree.h" | 25 #include "tree.h" |
26 #include "rtl.h" | |
27 #include "tm_p.h" | |
28 #include "flags.h" | 26 #include "flags.h" |
29 #include "function.h" | 27 #include "function.h" |
30 #include "except.h" | 28 #include "except.h" |
29 #include "pointer-set.h" | |
31 #include "tree-flow.h" | 30 #include "tree-flow.h" |
32 #include "tree-dump.h" | 31 #include "tree-dump.h" |
33 #include "tree-inline.h" | 32 #include "tree-inline.h" |
34 #include "tree-iterator.h" | 33 #include "tree-iterator.h" |
35 #include "tree-pass.h" | 34 #include "tree-pass.h" |
561 replace_goto_queue (struct leh_tf_state *tf) | 560 replace_goto_queue (struct leh_tf_state *tf) |
562 { | 561 { |
563 if (tf->goto_queue_active == 0) | 562 if (tf->goto_queue_active == 0) |
564 return; | 563 return; |
565 replace_goto_queue_stmt_list (tf->top_p_seq, tf); | 564 replace_goto_queue_stmt_list (tf->top_p_seq, tf); |
565 replace_goto_queue_stmt_list (eh_seq, tf); | |
566 } | 566 } |
567 | 567 |
568 /* Add a new record to the goto queue contained in TF. NEW_STMT is the | 568 /* Add a new record to the goto queue contained in TF. NEW_STMT is the |
569 data to be added, IS_LABEL indicates whether NEW_STMT is a label or | 569 data to be added, IS_LABEL indicates whether NEW_STMT is a label or |
570 a gimple return. */ | 570 a gimple return. */ |
641 /* In the case of a GOTO we want to record the destination label, | 641 /* In the case of a GOTO we want to record the destination label, |
642 since with a GIMPLE_COND we have an easy access to the then/else | 642 since with a GIMPLE_COND we have an easy access to the then/else |
643 labels. */ | 643 labels. */ |
644 new_stmt = stmt; | 644 new_stmt = stmt; |
645 record_in_goto_queue (tf, new_stmt, index, true); | 645 record_in_goto_queue (tf, new_stmt, index, true); |
646 | |
647 } | 646 } |
648 | 647 |
649 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally | 648 /* For any GIMPLE_GOTO or GIMPLE_RETURN, decide whether it leaves a try_finally |
650 node, and if so record that fact in the goto queue associated with that | 649 node, and if so record that fact in the goto queue associated with that |
651 try_finally node. */ | 650 try_finally node. */ |
1516 return f_estimate < 100 || f_estimate < sw_estimate * 2; | 1515 return f_estimate < 100 || f_estimate < sw_estimate * 2; |
1517 else | 1516 else |
1518 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3; | 1517 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3; |
1519 } | 1518 } |
1520 | 1519 |
1520 /* REG is the enclosing region for a possible cleanup region, or the region | |
1521 itself. Returns TRUE if such a region would be unreachable. | |
1522 | |
1523 Cleanup regions within a must-not-throw region aren't actually reachable | |
1524 even if there are throwing stmts within them, because the personality | |
1525 routine will call terminate before unwinding. */ | |
1526 | |
1527 static bool | |
1528 cleanup_is_dead_in (eh_region reg) | |
1529 { | |
1530 while (reg && reg->type == ERT_CLEANUP) | |
1531 reg = reg->outer; | |
1532 return (reg && reg->type == ERT_MUST_NOT_THROW); | |
1533 } | |
1521 | 1534 |
1522 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes | 1535 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes |
1523 to a sequence of labels and blocks, plus the exception region trees | 1536 to a sequence of labels and blocks, plus the exception region trees |
1524 that record all the magic. This is complicated by the need to | 1537 that record all the magic. This is complicated by the need to |
1525 arrange for the FINALLY block to be executed on all exits. */ | 1538 arrange for the FINALLY block to be executed on all exits. */ |
1528 lower_try_finally (struct leh_state *state, gimple tp) | 1541 lower_try_finally (struct leh_state *state, gimple tp) |
1529 { | 1542 { |
1530 struct leh_tf_state this_tf; | 1543 struct leh_tf_state this_tf; |
1531 struct leh_state this_state; | 1544 struct leh_state this_state; |
1532 int ndests; | 1545 int ndests; |
1546 gimple_seq old_eh_seq; | |
1533 | 1547 |
1534 /* Process the try block. */ | 1548 /* Process the try block. */ |
1535 | 1549 |
1536 memset (&this_tf, 0, sizeof (this_tf)); | 1550 memset (&this_tf, 0, sizeof (this_tf)); |
1537 this_tf.try_finally_expr = tp; | 1551 this_tf.try_finally_expr = tp; |
1538 this_tf.top_p = tp; | 1552 this_tf.top_p = tp; |
1539 this_tf.outer = state; | 1553 this_tf.outer = state; |
1540 if (using_eh_for_cleanups_p) | 1554 if (using_eh_for_cleanups_p && !cleanup_is_dead_in (state->cur_region)) |
1541 this_tf.region = gen_eh_region_cleanup (state->cur_region); | 1555 { |
1556 this_tf.region = gen_eh_region_cleanup (state->cur_region); | |
1557 this_state.cur_region = this_tf.region; | |
1558 } | |
1542 else | 1559 else |
1543 this_tf.region = NULL; | 1560 { |
1544 | 1561 this_tf.region = NULL; |
1545 this_state.cur_region = this_tf.region; | 1562 this_state.cur_region = state->cur_region; |
1563 } | |
1564 | |
1546 this_state.ehp_region = state->ehp_region; | 1565 this_state.ehp_region = state->ehp_region; |
1547 this_state.tf = &this_tf; | 1566 this_state.tf = &this_tf; |
1548 | 1567 |
1568 old_eh_seq = eh_seq; | |
1569 eh_seq = NULL; | |
1570 | |
1549 lower_eh_constructs_1 (&this_state, gimple_try_eval(tp)); | 1571 lower_eh_constructs_1 (&this_state, gimple_try_eval(tp)); |
1550 | 1572 |
1551 /* Determine if the try block is escaped through the bottom. */ | 1573 /* Determine if the try block is escaped through the bottom. */ |
1552 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp)); | 1574 this_tf.may_fallthru = gimple_seq_may_fallthru (gimple_try_eval (tp)); |
1553 | 1575 |
1554 /* Determine if any exceptions are possible within the try block. */ | 1576 /* Determine if any exceptions are possible within the try block. */ |
1555 if (using_eh_for_cleanups_p) | 1577 if (this_tf.region) |
1556 this_tf.may_throw = eh_region_may_contain_throw (this_tf.region); | 1578 this_tf.may_throw = eh_region_may_contain_throw (this_tf.region); |
1557 if (this_tf.may_throw) | 1579 if (this_tf.may_throw) |
1558 honor_protect_cleanup_actions (state, &this_state, &this_tf); | 1580 honor_protect_cleanup_actions (state, &this_state, &this_tf); |
1559 | 1581 |
1560 /* Determine how many edges (still) reach the finally block. Or rather, | 1582 /* Determine how many edges (still) reach the finally block. Or rather, |
1599 if (this_tf.goto_queue) | 1621 if (this_tf.goto_queue) |
1600 free (this_tf.goto_queue); | 1622 free (this_tf.goto_queue); |
1601 if (this_tf.goto_queue_map) | 1623 if (this_tf.goto_queue_map) |
1602 pointer_map_destroy (this_tf.goto_queue_map); | 1624 pointer_map_destroy (this_tf.goto_queue_map); |
1603 | 1625 |
1626 /* If there was an old (aka outer) eh_seq, append the current eh_seq. | |
1627 If there was no old eh_seq, then the append is trivially already done. */ | |
1628 if (old_eh_seq) | |
1629 { | |
1630 if (eh_seq == NULL) | |
1631 eh_seq = old_eh_seq; | |
1632 else | |
1633 { | |
1634 gimple_seq new_eh_seq = eh_seq; | |
1635 eh_seq = old_eh_seq; | |
1636 gimple_seq_add_seq(&eh_seq, new_eh_seq); | |
1637 } | |
1638 } | |
1639 | |
1604 return this_tf.top_p_seq; | 1640 return this_tf.top_p_seq; |
1605 } | 1641 } |
1606 | 1642 |
1607 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a | 1643 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_CATCH with a |
1608 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the | 1644 list of GIMPLE_CATCH to a sequence of labels and blocks, plus the |
1760 { | 1796 { |
1761 struct leh_state this_state = *state; | 1797 struct leh_state this_state = *state; |
1762 eh_region this_region = NULL; | 1798 eh_region this_region = NULL; |
1763 struct leh_tf_state fake_tf; | 1799 struct leh_tf_state fake_tf; |
1764 gimple_seq result; | 1800 gimple_seq result; |
1765 | 1801 bool cleanup_dead = cleanup_is_dead_in (state->cur_region); |
1766 if (flag_exceptions) | 1802 |
1803 if (flag_exceptions && !cleanup_dead) | |
1767 { | 1804 { |
1768 this_region = gen_eh_region_cleanup (state->cur_region); | 1805 this_region = gen_eh_region_cleanup (state->cur_region); |
1769 this_state.cur_region = this_region; | 1806 this_state.cur_region = this_region; |
1770 } | 1807 } |
1771 | 1808 |
1772 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp)); | 1809 lower_eh_constructs_1 (&this_state, gimple_try_eval (tp)); |
1773 | 1810 |
1774 if (!eh_region_may_contain_throw (this_region)) | 1811 if (cleanup_dead || !eh_region_may_contain_throw (this_region)) |
1775 return gimple_try_eval (tp); | 1812 return gimple_try_eval (tp); |
1776 | 1813 |
1777 /* Build enough of a try-finally state so that we can reuse | 1814 /* Build enough of a try-finally state so that we can reuse |
1778 honor_protect_cleanup_actions. */ | 1815 honor_protect_cleanup_actions. */ |
1779 memset (&fake_tf, 0, sizeof (fake_tf)); | 1816 memset (&fake_tf, 0, sizeof (fake_tf)); |
3036 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */ | 3073 TODO_dump_func | TODO_verify_flow /* todo_flags_finish */ |
3037 } | 3074 } |
3038 }; | 3075 }; |
3039 | 3076 |
3040 | 3077 |
3041 /* At the end of inlining, we can lower EH_DISPATCH. */ | 3078 /* At the end of inlining, we can lower EH_DISPATCH. Return true when |
3042 | 3079 we have found some duplicate labels and removed some edges. */ |
3043 static void | 3080 |
3081 static bool | |
3044 lower_eh_dispatch (basic_block src, gimple stmt) | 3082 lower_eh_dispatch (basic_block src, gimple stmt) |
3045 { | 3083 { |
3046 gimple_stmt_iterator gsi; | 3084 gimple_stmt_iterator gsi; |
3047 int region_nr; | 3085 int region_nr; |
3048 eh_region r; | 3086 eh_region r; |
3049 tree filter, fn; | 3087 tree filter, fn; |
3050 gimple x; | 3088 gimple x; |
3089 bool redirected = false; | |
3051 | 3090 |
3052 region_nr = gimple_eh_dispatch_region (stmt); | 3091 region_nr = gimple_eh_dispatch_region (stmt); |
3053 r = get_eh_region_from_number (region_nr); | 3092 r = get_eh_region_from_number (region_nr); |
3054 | 3093 |
3055 gsi = gsi_last_bb (src); | 3094 gsi = gsi_last_bb (src); |
3061 VEC (tree, heap) *labels = NULL; | 3100 VEC (tree, heap) *labels = NULL; |
3062 tree default_label = NULL; | 3101 tree default_label = NULL; |
3063 eh_catch c; | 3102 eh_catch c; |
3064 edge_iterator ei; | 3103 edge_iterator ei; |
3065 edge e; | 3104 edge e; |
3105 struct pointer_set_t *seen_values = pointer_set_create (); | |
3066 | 3106 |
3067 /* Collect the labels for a switch. Zero the post_landing_pad | 3107 /* Collect the labels for a switch. Zero the post_landing_pad |
3068 field becase we'll no longer have anything keeping these labels | 3108 field becase we'll no longer have anything keeping these labels |
3069 in existance and the optimizer will be free to merge these | 3109 in existance and the optimizer will be free to merge these |
3070 blocks at will. */ | 3110 blocks at will. */ |
3071 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch) | 3111 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch) |
3072 { | 3112 { |
3073 tree tp_node, flt_node, lab = c->label; | 3113 tree tp_node, flt_node, lab = c->label; |
3114 bool have_label = false; | |
3074 | 3115 |
3075 c->label = NULL; | 3116 c->label = NULL; |
3076 tp_node = c->type_list; | 3117 tp_node = c->type_list; |
3077 flt_node = c->filter_list; | 3118 flt_node = c->filter_list; |
3078 | 3119 |
3081 default_label = lab; | 3122 default_label = lab; |
3082 break; | 3123 break; |
3083 } | 3124 } |
3084 do | 3125 do |
3085 { | 3126 { |
3086 tree t = build3 (CASE_LABEL_EXPR, void_type_node, | 3127 /* Filter out duplicate labels that arise when this handler |
3087 TREE_VALUE (flt_node), NULL, lab); | 3128 is shadowed by an earlier one. When no labels are |
3088 VEC_safe_push (tree, heap, labels, t); | 3129 attached to the handler anymore, we remove |
3130 the corresponding edge and then we delete unreachable | |
3131 blocks at the end of this pass. */ | |
3132 if (! pointer_set_contains (seen_values, TREE_VALUE (flt_node))) | |
3133 { | |
3134 tree t = build3 (CASE_LABEL_EXPR, void_type_node, | |
3135 TREE_VALUE (flt_node), NULL, lab); | |
3136 VEC_safe_push (tree, heap, labels, t); | |
3137 pointer_set_insert (seen_values, TREE_VALUE (flt_node)); | |
3138 have_label = true; | |
3139 } | |
3089 | 3140 |
3090 tp_node = TREE_CHAIN (tp_node); | 3141 tp_node = TREE_CHAIN (tp_node); |
3091 flt_node = TREE_CHAIN (flt_node); | 3142 flt_node = TREE_CHAIN (flt_node); |
3092 } | 3143 } |
3093 while (tp_node); | 3144 while (tp_node); |
3145 if (! have_label) | |
3146 { | |
3147 remove_edge (find_edge (src, label_to_block (lab))); | |
3148 redirected = true; | |
3149 } | |
3094 } | 3150 } |
3095 | 3151 |
3096 /* Clean up the edge flags. */ | 3152 /* Clean up the edge flags. */ |
3097 FOR_EACH_EDGE (e, ei, src->succs) | 3153 FOR_EACH_EDGE (e, ei, src->succs) |
3098 { | 3154 { |
3130 x = gimple_build_switch_vec (filter, default_label, labels); | 3186 x = gimple_build_switch_vec (filter, default_label, labels); |
3131 gsi_insert_before (&gsi, x, GSI_SAME_STMT); | 3187 gsi_insert_before (&gsi, x, GSI_SAME_STMT); |
3132 | 3188 |
3133 VEC_free (tree, heap, labels); | 3189 VEC_free (tree, heap, labels); |
3134 } | 3190 } |
3191 pointer_set_destroy (seen_values); | |
3135 } | 3192 } |
3136 break; | 3193 break; |
3137 | 3194 |
3138 case ERT_ALLOWED_EXCEPTIONS: | 3195 case ERT_ALLOWED_EXCEPTIONS: |
3139 { | 3196 { |
3163 gcc_unreachable (); | 3220 gcc_unreachable (); |
3164 } | 3221 } |
3165 | 3222 |
3166 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */ | 3223 /* Replace the EH_DISPATCH with the SWITCH or COND generated above. */ |
3167 gsi_remove (&gsi, true); | 3224 gsi_remove (&gsi, true); |
3225 return redirected; | |
3168 } | 3226 } |
3169 | 3227 |
3170 static unsigned | 3228 static unsigned |
3171 execute_lower_eh_dispatch (void) | 3229 execute_lower_eh_dispatch (void) |
3172 { | 3230 { |
3173 basic_block bb; | 3231 basic_block bb; |
3174 bool any_rewritten = false; | 3232 bool any_rewritten = false; |
3233 bool redirected = false; | |
3175 | 3234 |
3176 assign_filter_values (); | 3235 assign_filter_values (); |
3177 | 3236 |
3178 FOR_EACH_BB (bb) | 3237 FOR_EACH_BB (bb) |
3179 { | 3238 { |
3180 gimple last = last_stmt (bb); | 3239 gimple last = last_stmt (bb); |
3181 if (last && gimple_code (last) == GIMPLE_EH_DISPATCH) | 3240 if (last && gimple_code (last) == GIMPLE_EH_DISPATCH) |
3182 { | 3241 { |
3183 lower_eh_dispatch (bb, last); | 3242 redirected |= lower_eh_dispatch (bb, last); |
3184 any_rewritten = true; | 3243 any_rewritten = true; |
3185 } | 3244 } |
3186 } | 3245 } |
3187 | 3246 |
3247 if (redirected) | |
3248 delete_unreachable_blocks (); | |
3188 return any_rewritten ? TODO_update_ssa_only_virtuals : 0; | 3249 return any_rewritten ? TODO_update_ssa_only_virtuals : 0; |
3189 } | 3250 } |
3190 | 3251 |
3191 static bool | 3252 static bool |
3192 gate_lower_eh_dispatch (void) | 3253 gate_lower_eh_dispatch (void) |
3348 | 3409 |
3349 /* Input edge must be EH and output edge must be normal. */ | 3410 /* Input edge must be EH and output edge must be normal. */ |
3350 if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0) | 3411 if ((e_in->flags & EDGE_EH) == 0 || (e_out->flags & EDGE_EH) != 0) |
3351 return false; | 3412 return false; |
3352 | 3413 |
3353 /* The block must be empty except for the labels. */ | 3414 /* The block must be empty except for the labels and debug insns. */ |
3354 if (!gsi_end_p (gsi_after_labels (bb))) | 3415 gsi = gsi_after_labels (bb); |
3416 if (!gsi_end_p (gsi) && is_gimple_debug (gsi_stmt (gsi))) | |
3417 gsi_next_nondebug (&gsi); | |
3418 if (!gsi_end_p (gsi)) | |
3355 return false; | 3419 return false; |
3356 | 3420 |
3357 /* The destination block must not already have a landing pad | 3421 /* The destination block must not already have a landing pad |
3358 for a different region. */ | 3422 for a different region. */ |
3359 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi)) | 3423 for (gsi = gsi_start_bb (e_out->dest); !gsi_end_p (gsi); gsi_next (&gsi)) |