comparison gcc/tree-eh.c @ 145:1830386684a0

gcc-9.2.0
author anatofuz
date Thu, 13 Feb 2020 11:34:05 +0900
parents 84e7813d76e9
children
comparison
equal deleted inserted replaced
131:84e7813d76e9 145:1830386684a0
1 /* Exception handling semantics and decomposition for trees. 1 /* Exception handling semantics and decomposition for trees.
2 Copyright (C) 2003-2018 Free Software Foundation, Inc. 2 Copyright (C) 2003-2020 Free Software Foundation, Inc.
3 3
4 This file is part of GCC. 4 This file is part of GCC.
5 5
6 GCC is free software; you can redistribute it and/or modify 6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by 7 it under the terms of the GNU General Public License as published by
137 Positive numbers indicate a landing pad index; negative numbers 137 Positive numbers indicate a landing pad index; negative numbers
138 indicate a MUST_NOT_THROW region index; zero indicates that the 138 indicate a MUST_NOT_THROW region index; zero indicates that the
139 statement is not recorded in the region table. */ 139 statement is not recorded in the region table. */
140 140
141 int 141 int
142 lookup_stmt_eh_lp_fn (struct function *ifun, gimple *t) 142 lookup_stmt_eh_lp_fn (struct function *ifun, const gimple *t)
143 { 143 {
144 if (ifun->eh->throw_stmt_table == NULL) 144 if (ifun->eh->throw_stmt_table == NULL)
145 return 0; 145 return 0;
146 146
147 int *lp_nr = ifun->eh->throw_stmt_table->get (t); 147 int *lp_nr = ifun->eh->throw_stmt_table->get (const_cast <gimple *> (t));
148 return lp_nr ? *lp_nr : 0; 148 return lp_nr ? *lp_nr : 0;
149 } 149 }
150 150
151 /* Likewise, but always use the current function. */ 151 /* Likewise, but always use the current function. */
152 152
153 int 153 int
154 lookup_stmt_eh_lp (gimple *t) 154 lookup_stmt_eh_lp (const gimple *t)
155 { 155 {
156 /* We can get called from initialized data when -fnon-call-exceptions 156 /* We can get called from initialized data when -fnon-call-exceptions
157 is on; prevent crash. */ 157 is on; prevent crash. */
158 if (!cfun) 158 if (!cfun)
159 return 0; 159 return 0;
354 354
355 /* Processing of TRY_FINALLY requires a bit more state. This is 355 /* Processing of TRY_FINALLY requires a bit more state. This is
356 split out into a separate structure so that we don't have to 356 split out into a separate structure so that we don't have to
357 copy so much when processing other nodes. */ 357 copy so much when processing other nodes. */
358 struct leh_tf_state *tf; 358 struct leh_tf_state *tf;
359
360 /* Outer non-clean up region. */
361 eh_region outer_non_cleanup;
359 }; 362 };
360 363
361 struct leh_tf_state 364 struct leh_tf_state
362 { 365 {
363 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The 366 /* Pointer to the GIMPLE_TRY_FINALLY node under discussion. The
501 case GIMPLE_RETURN: 504 case GIMPLE_RETURN:
502 temp.g = stmt; 505 temp.g = stmt;
503 seq = find_goto_replacement (tf, temp); 506 seq = find_goto_replacement (tf, temp);
504 if (seq) 507 if (seq)
505 { 508 {
506 gsi_insert_seq_before (gsi, gimple_seq_copy (seq), GSI_SAME_STMT); 509 gimple_stmt_iterator i;
510 seq = gimple_seq_copy (seq);
511 for (i = gsi_start (seq); !gsi_end_p (i); gsi_next (&i))
512 gimple_set_location (gsi_stmt (i), gimple_location (stmt));
513 gsi_insert_seq_before (gsi, seq, GSI_SAME_STMT);
507 gsi_remove (gsi, false); 514 gsi_remove (gsi, false);
508 return; 515 return;
509 } 516 }
510 break; 517 break;
511 518
809 gimple_seq_add_stmt (seq, x); 816 gimple_seq_add_stmt (seq, x);
810 if (region->outer) 817 if (region->outer)
811 record_stmt_eh_region (region->outer, x); 818 record_stmt_eh_region (region->outer, x);
812 } 819 }
813 820
814 /* Emit an EH_DISPATCH statement into SEQ for REGION. */
815
816 static void
817 emit_eh_dispatch (gimple_seq *seq, eh_region region)
818 {
819 geh_dispatch *x = gimple_build_eh_dispatch (region->index);
820 gimple_seq_add_stmt (seq, x);
821 }
822
823 /* Note that the current EH region may contain a throw, or a 821 /* Note that the current EH region may contain a throw, or a
824 call to a function which itself may contain a throw. */ 822 call to a function which itself may contain a throw. */
825 823
826 static void 824 static void
827 note_eh_region_may_contain_throw (eh_region region) 825 note_eh_region_may_contain_throw (eh_region region)
999 if (geh_else *eh_else = get_eh_else (finally)) 997 if (geh_else *eh_else = get_eh_else (finally))
1000 { 998 {
1001 gimple_try_set_cleanup (tf->top_p, gimple_eh_else_n_body (eh_else)); 999 gimple_try_set_cleanup (tf->top_p, gimple_eh_else_n_body (eh_else));
1002 finally = gimple_eh_else_e_body (eh_else); 1000 finally = gimple_eh_else_e_body (eh_else);
1003 1001
1004 /* Let the ELSE see the exception that's being processed. */ 1002 /* Let the ELSE see the exception that's being processed, but
1005 eh_region save_ehp = this_state->ehp_region; 1003 since the cleanup is outside the try block, process it with
1006 this_state->ehp_region = this_state->cur_region; 1004 outer_state, otherwise it may be used as a cleanup for
1007 lower_eh_constructs_1 (this_state, &finally); 1005 itself, and Bad Things (TM) ensue. */
1008 this_state->ehp_region = save_ehp; 1006 eh_region save_ehp = outer_state->ehp_region;
1007 outer_state->ehp_region = this_state->cur_region;
1008 lower_eh_constructs_1 (outer_state, &finally);
1009 outer_state->ehp_region = save_ehp;
1009 } 1010 }
1010 else 1011 else
1011 { 1012 {
1012 /* First check for nothing to do. */ 1013 /* First check for nothing to do. */
1013 if (lang_hooks.eh_protect_cleanup_actions == NULL) 1014 if (lang_hooks.eh_protect_cleanup_actions == NULL)
1624 return f_estimate < 100 || f_estimate < sw_estimate * 2; 1625 return f_estimate < 100 || f_estimate < sw_estimate * 2;
1625 else 1626 else
1626 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3; 1627 return f_estimate < 40 || f_estimate * 2 < sw_estimate * 3;
1627 } 1628 }
1628 1629
1629 /* REG is the enclosing region for a possible cleanup region, or the region 1630 /* REG is current region of a LEH state.
1631 is the enclosing region for a possible cleanup region, or the region
1630 itself. Returns TRUE if such a region would be unreachable. 1632 itself. Returns TRUE if such a region would be unreachable.
1631 1633
1632 Cleanup regions within a must-not-throw region aren't actually reachable 1634 Cleanup regions within a must-not-throw region aren't actually reachable
1633 even if there are throwing stmts within them, because the personality 1635 even if there are throwing stmts within them, because the personality
1634 routine will call terminate before unwinding. */ 1636 routine will call terminate before unwinding. */
1635 1637
1636 static bool 1638 static bool
1637 cleanup_is_dead_in (eh_region reg) 1639 cleanup_is_dead_in (leh_state *state)
1638 { 1640 {
1639 while (reg && reg->type == ERT_CLEANUP) 1641 if (flag_checking)
1640 reg = reg->outer; 1642 {
1643 eh_region reg = state->cur_region;
1644 while (reg && reg->type == ERT_CLEANUP)
1645 reg = reg->outer;
1646
1647 gcc_assert (reg == state->outer_non_cleanup);
1648 }
1649
1650 eh_region reg = state->outer_non_cleanup;
1641 return (reg && reg->type == ERT_MUST_NOT_THROW); 1651 return (reg && reg->type == ERT_MUST_NOT_THROW);
1642 } 1652 }
1643 1653
1644 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes 1654 /* A subroutine of lower_eh_constructs_1. Lower a GIMPLE_TRY_FINALLY nodes
1645 to a sequence of labels and blocks, plus the exception region trees 1655 to a sequence of labels and blocks, plus the exception region trees
1658 1668
1659 memset (&this_tf, 0, sizeof (this_tf)); 1669 memset (&this_tf, 0, sizeof (this_tf));
1660 this_tf.try_finally_expr = tp; 1670 this_tf.try_finally_expr = tp;
1661 this_tf.top_p = tp; 1671 this_tf.top_p = tp;
1662 this_tf.outer = state; 1672 this_tf.outer = state;
1663 if (using_eh_for_cleanups_p () && !cleanup_is_dead_in (state->cur_region)) 1673 if (using_eh_for_cleanups_p () && !cleanup_is_dead_in (state))
1664 { 1674 {
1665 this_tf.region = gen_eh_region_cleanup (state->cur_region); 1675 this_tf.region = gen_eh_region_cleanup (state->cur_region);
1666 this_state.cur_region = this_tf.region; 1676 this_state.cur_region = this_tf.region;
1667 } 1677 }
1668 else 1678 else
1669 { 1679 {
1670 this_tf.region = NULL; 1680 this_tf.region = NULL;
1671 this_state.cur_region = state->cur_region; 1681 this_state.cur_region = state->cur_region;
1672 } 1682 }
1673 1683
1684 this_state.outer_non_cleanup = state->outer_non_cleanup;
1674 this_state.ehp_region = state->ehp_region; 1685 this_state.ehp_region = state->ehp_region;
1675 this_state.tf = &this_tf; 1686 this_state.tf = &this_tf;
1676 1687
1677 old_eh_seq = eh_seq; 1688 old_eh_seq = eh_seq;
1678 eh_seq = NULL; 1689 eh_seq = NULL;
1760 struct leh_state this_state = *state; 1771 struct leh_state this_state = *state;
1761 gimple_stmt_iterator gsi; 1772 gimple_stmt_iterator gsi;
1762 tree out_label; 1773 tree out_label;
1763 gimple_seq new_seq, cleanup; 1774 gimple_seq new_seq, cleanup;
1764 gimple *x; 1775 gimple *x;
1776 geh_dispatch *eh_dispatch;
1765 location_t try_catch_loc = gimple_location (tp); 1777 location_t try_catch_loc = gimple_location (tp);
1778 location_t catch_loc = UNKNOWN_LOCATION;
1766 1779
1767 if (flag_exceptions) 1780 if (flag_exceptions)
1768 { 1781 {
1769 try_region = gen_eh_region_try (state->cur_region); 1782 try_region = gen_eh_region_try (state->cur_region);
1770 this_state.cur_region = try_region; 1783 this_state.cur_region = try_region;
1784 this_state.outer_non_cleanup = this_state.cur_region;
1771 } 1785 }
1772 1786
1773 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp)); 1787 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1774 1788
1775 if (!eh_region_may_contain_throw (try_region)) 1789 if (!eh_region_may_contain_throw (try_region))
1776 return gimple_try_eval (tp); 1790 return gimple_try_eval (tp);
1777 1791
1778 new_seq = NULL; 1792 new_seq = NULL;
1779 emit_eh_dispatch (&new_seq, try_region); 1793 eh_dispatch = gimple_build_eh_dispatch (try_region->index);
1794 gimple_seq_add_stmt (&new_seq, eh_dispatch);
1780 emit_resx (&new_seq, try_region); 1795 emit_resx (&new_seq, try_region);
1781 1796
1782 this_state.cur_region = state->cur_region; 1797 this_state.cur_region = state->cur_region;
1798 this_state.outer_non_cleanup = state->outer_non_cleanup;
1783 this_state.ehp_region = try_region; 1799 this_state.ehp_region = try_region;
1784 1800
1785 /* Add eh_seq from lowering EH in the cleanup sequence after the cleanup 1801 /* Add eh_seq from lowering EH in the cleanup sequence after the cleanup
1786 itself, so that e.g. for coverage purposes the nested cleanups don't 1802 itself, so that e.g. for coverage purposes the nested cleanups don't
1787 appear before the cleanup body. See PR64634 for details. */ 1803 appear before the cleanup body. See PR64634 for details. */
1797 eh_catch c; 1813 eh_catch c;
1798 gcatch *catch_stmt; 1814 gcatch *catch_stmt;
1799 gimple_seq handler; 1815 gimple_seq handler;
1800 1816
1801 catch_stmt = as_a <gcatch *> (gsi_stmt (gsi)); 1817 catch_stmt = as_a <gcatch *> (gsi_stmt (gsi));
1818 if (catch_loc == UNKNOWN_LOCATION)
1819 catch_loc = gimple_location (catch_stmt);
1802 c = gen_eh_region_catch (try_region, gimple_catch_types (catch_stmt)); 1820 c = gen_eh_region_catch (try_region, gimple_catch_types (catch_stmt));
1803 1821
1804 handler = gimple_catch_handler (catch_stmt); 1822 handler = gimple_catch_handler (catch_stmt);
1805 lower_eh_constructs_1 (&this_state, &handler); 1823 lower_eh_constructs_1 (&this_state, &handler);
1806 1824
1819 gimple_seq_add_stmt (&new_seq, x); 1837 gimple_seq_add_stmt (&new_seq, x);
1820 } 1838 }
1821 if (!c->type_list) 1839 if (!c->type_list)
1822 break; 1840 break;
1823 } 1841 }
1842
1843 /* Try to set a location on the dispatching construct to avoid inheriting
1844 the location of the previous statement. */
1845 gimple_set_location (eh_dispatch, catch_loc);
1824 1846
1825 gimple_try_set_cleanup (tp, new_seq); 1847 gimple_try_set_cleanup (tp, new_seq);
1826 1848
1827 gimple_seq new_eh_seq = eh_seq; 1849 gimple_seq new_eh_seq = eh_seq;
1828 eh_seq = old_eh_seq; 1850 eh_seq = old_eh_seq;
1848 if (flag_exceptions) 1870 if (flag_exceptions)
1849 { 1871 {
1850 this_region = gen_eh_region_allowed (state->cur_region, 1872 this_region = gen_eh_region_allowed (state->cur_region,
1851 gimple_eh_filter_types (inner)); 1873 gimple_eh_filter_types (inner));
1852 this_state.cur_region = this_region; 1874 this_state.cur_region = this_region;
1875 this_state.outer_non_cleanup = this_state.cur_region;
1853 } 1876 }
1854 1877
1855 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp)); 1878 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1856 1879
1857 if (!eh_region_may_contain_throw (this_region)) 1880 if (!eh_region_may_contain_throw (this_region))
1858 return gimple_try_eval (tp); 1881 return gimple_try_eval (tp);
1859 1882
1860 new_seq = NULL;
1861 this_state.cur_region = state->cur_region; 1883 this_state.cur_region = state->cur_region;
1862 this_state.ehp_region = this_region; 1884 this_state.ehp_region = this_region;
1863 1885
1864 emit_eh_dispatch (&new_seq, this_region); 1886 new_seq = NULL;
1887 x = gimple_build_eh_dispatch (this_region->index);
1888 gimple_set_location (x, gimple_location (tp));
1889 gimple_seq_add_stmt (&new_seq, x);
1865 emit_resx (&new_seq, this_region); 1890 emit_resx (&new_seq, this_region);
1866 1891
1867 this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION); 1892 this_region->u.allowed.label = create_artificial_label (UNKNOWN_LOCATION);
1868 x = gimple_build_label (this_region->u.allowed.label); 1893 x = gimple_build_label (this_region->u.allowed.label);
1869 gimple_seq_add_stmt (&new_seq, x); 1894 gimple_seq_add_stmt (&new_seq, x);
1901 used now. Otherwise, pass_ipa_free_lang_data won't think it 1926 used now. Otherwise, pass_ipa_free_lang_data won't think it
1902 needs to happen. */ 1927 needs to happen. */
1903 TREE_USED (this_region->u.must_not_throw.failure_decl) = 1; 1928 TREE_USED (this_region->u.must_not_throw.failure_decl) = 1;
1904 1929
1905 this_state.cur_region = this_region; 1930 this_state.cur_region = this_region;
1931 this_state.outer_non_cleanup = this_state.cur_region;
1906 } 1932 }
1907 1933
1908 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp)); 1934 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1909 1935
1910 return gimple_try_eval (tp); 1936 return gimple_try_eval (tp);
1918 { 1944 {
1919 struct leh_state this_state = *state; 1945 struct leh_state this_state = *state;
1920 eh_region this_region = NULL; 1946 eh_region this_region = NULL;
1921 struct leh_tf_state fake_tf; 1947 struct leh_tf_state fake_tf;
1922 gimple_seq result; 1948 gimple_seq result;
1923 bool cleanup_dead = cleanup_is_dead_in (state->cur_region); 1949 bool cleanup_dead = cleanup_is_dead_in (state);
1924 1950
1925 if (flag_exceptions && !cleanup_dead) 1951 if (flag_exceptions && !cleanup_dead)
1926 { 1952 {
1927 this_region = gen_eh_region_cleanup (state->cur_region); 1953 this_region = gen_eh_region_cleanup (state->cur_region);
1928 this_state.cur_region = this_region; 1954 this_state.cur_region = this_region;
1955 this_state.outer_non_cleanup = state->outer_non_cleanup;
1929 } 1956 }
1930 1957
1931 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp)); 1958 lower_eh_constructs_1 (&this_state, gimple_try_eval_ptr (tp));
1932 1959
1933 if (cleanup_dead || !eh_region_may_contain_throw (this_region)) 1960 if (cleanup_dead || !eh_region_may_contain_throw (this_region))
2299 old_lp_nr = EH_LANDING_PAD_NR (old_label); 2326 old_lp_nr = EH_LANDING_PAD_NR (old_label);
2300 gcc_assert (old_lp_nr > 0); 2327 gcc_assert (old_lp_nr > 0);
2301 old_lp = get_eh_landing_pad_from_number (old_lp_nr); 2328 old_lp = get_eh_landing_pad_from_number (old_lp_nr);
2302 2329
2303 throw_stmt = last_stmt (edge_in->src); 2330 throw_stmt = last_stmt (edge_in->src);
2304 gcc_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr); 2331 gcc_checking_assert (lookup_stmt_eh_lp (throw_stmt) == old_lp_nr);
2305 2332
2306 new_label = gimple_block_label (new_bb); 2333 new_label = gimple_block_label (new_bb);
2307 2334
2308 /* Look for an existing region that might be using NEW_BB already. */ 2335 /* Look for an existing region that might be using NEW_BB already. */
2309 new_lp_nr = EH_LANDING_PAD_NR (new_label); 2336 new_lp_nr = EH_LANDING_PAD_NR (new_label);
2488 case COMPLEX_EXPR: 2515 case COMPLEX_EXPR:
2489 case CONSTRUCTOR: 2516 case CONSTRUCTOR:
2490 /* Constructing an object cannot trap. */ 2517 /* Constructing an object cannot trap. */
2491 return false; 2518 return false;
2492 2519
2520 case COND_EXPR:
2521 case VEC_COND_EXPR:
2522 /* Whether *COND_EXPR can trap depends on whether the
2523 first argument can trap, so signal it as not handled.
2524 Whether lhs is floating or not doesn't matter. */
2525 *handled = false;
2526 return false;
2527
2493 default: 2528 default:
2494 /* Any floating arithmetic may trap. */ 2529 /* Any floating arithmetic may trap. */
2495 if (fp_operation && flag_trapping_math) 2530 if (fp_operation && flag_trapping_math)
2496 return true; 2531 return true;
2497 2532
2511 { 2546 {
2512 bool honor_nans = (fp_operation && flag_trapping_math 2547 bool honor_nans = (fp_operation && flag_trapping_math
2513 && !flag_finite_math_only); 2548 && !flag_finite_math_only);
2514 bool honor_snans = fp_operation && flag_signaling_nans != 0; 2549 bool honor_snans = fp_operation && flag_signaling_nans != 0;
2515 bool handled; 2550 bool handled;
2551
2552 /* This function cannot tell whether or not COND_EXPR and VEC_COND_EXPR could
2553 trap, because that depends on the respective condition op. */
2554 gcc_assert (op != COND_EXPR && op != VEC_COND_EXPR);
2516 2555
2517 if (TREE_CODE_CLASS (op) != tcc_comparison 2556 if (TREE_CODE_CLASS (op) != tcc_comparison
2518 && TREE_CODE_CLASS (op) != tcc_unary 2557 && TREE_CODE_CLASS (op) != tcc_unary
2519 && TREE_CODE_CLASS (op) != tcc_binary) 2558 && TREE_CODE_CLASS (op) != tcc_binary)
2520 return false; 2559 return false;
2597 bool fp_operation = false; 2636 bool fp_operation = false;
2598 bool honor_trapv = false; 2637 bool honor_trapv = false;
2599 tree t, base, div = NULL_TREE; 2638 tree t, base, div = NULL_TREE;
2600 2639
2601 if (!expr) 2640 if (!expr)
2641 return false;
2642
2643 /* In COND_EXPR and VEC_COND_EXPR only the condition may trap, but
2644 they won't appear as operands in GIMPLE form, so this is just for the
2645 GENERIC uses where it needs to recurse on the operands and so
2646 *COND_EXPR itself doesn't trap. */
2647 if (TREE_CODE (expr) == COND_EXPR || TREE_CODE (expr) == VEC_COND_EXPR)
2602 return false; 2648 return false;
2603 2649
2604 code = TREE_CODE (expr); 2650 code = TREE_CODE (expr);
2605 t = TREE_TYPE (expr); 2651 t = TREE_TYPE (expr);
2606 2652
2757 walk_tree (&TREE_OPERAND (*tp, i), replace_trapping_overflow, 2803 walk_tree (&TREE_OPERAND (*tp, i), replace_trapping_overflow,
2758 data, (hash_set<tree> *) data); 2804 data, (hash_set<tree> *) data);
2759 2805
2760 if (TREE_CODE (*tp) == ABS_EXPR) 2806 if (TREE_CODE (*tp) == ABS_EXPR)
2761 { 2807 {
2762 tree op = TREE_OPERAND (*tp, 0); 2808 TREE_SET_CODE (*tp, ABSU_EXPR);
2763 op = save_expr (op); 2809 TREE_TYPE (*tp) = utype;
2764 /* save_expr skips simple arithmetics, which is undesirable 2810 *tp = fold_convert (type, *tp);
2765 here, if it might trap due to flag_trapv. We need to
2766 force a SAVE_EXPR in the COND_EXPR condition, to evaluate
2767 it before the comparison. */
2768 if (EXPR_P (op)
2769 && TREE_CODE (op) != SAVE_EXPR
2770 && walk_tree (&op, find_trapping_overflow, NULL, NULL))
2771 {
2772 op = build1_loc (EXPR_LOCATION (op), SAVE_EXPR, type, op);
2773 TREE_SIDE_EFFECTS (op) = 1;
2774 }
2775 /* Change abs (op) to op < 0 ? -op : op and handle the NEGATE_EXPR
2776 like other signed integer trapping operations. */
2777 tree cond = fold_build2 (LT_EXPR, boolean_type_node,
2778 op, build_int_cst (type, 0));
2779 tree neg = fold_build1 (NEGATE_EXPR, utype,
2780 fold_convert (utype, op));
2781 *tp = fold_build3 (COND_EXPR, type, cond,
2782 fold_convert (type, neg), op);
2783 } 2811 }
2784 else 2812 else
2785 { 2813 {
2786 TREE_TYPE (*tp) = utype; 2814 TREE_TYPE (*tp) = utype;
2787 len = TREE_OPERAND_LENGTH (*tp); 2815 len = TREE_OPERAND_LENGTH (*tp);
3538 release_defs (stmt); 3566 release_defs (stmt);
3539 } 3567 }
3540 } 3568 }
3541 3569
3542 /* Try to sink var = {v} {CLOBBER} stmts followed just by 3570 /* Try to sink var = {v} {CLOBBER} stmts followed just by
3543 internal throw to successor BB. */ 3571 internal throw to successor BB.
3572 SUNK, if not NULL, is an array of sequences indexed by basic-block
3573 index to sink to and to pick up sinking opportunities from.
3574 If FOUND_OPPORTUNITY is not NULL then do not perform the optimization
3575 but set *FOUND_OPPORTUNITY to true. */
3544 3576
3545 static int 3577 static int
3546 sink_clobbers (basic_block bb) 3578 sink_clobbers (basic_block bb,
3579 gimple_seq *sunk = NULL, bool *found_opportunity = NULL)
3547 { 3580 {
3548 edge e; 3581 edge e;
3549 edge_iterator ei; 3582 edge_iterator ei;
3550 gimple_stmt_iterator gsi, dgsi; 3583 gimple_stmt_iterator gsi, dgsi;
3551 basic_block succbb; 3584 basic_block succbb;
3576 break; 3609 break;
3577 if (!gimple_clobber_p (stmt)) 3610 if (!gimple_clobber_p (stmt))
3578 return 0; 3611 return 0;
3579 any_clobbers = true; 3612 any_clobbers = true;
3580 } 3613 }
3581 if (!any_clobbers) 3614 if (!any_clobbers && (!sunk || gimple_seq_empty_p (sunk[bb->index])))
3582 return 0; 3615 return 0;
3616
3617 /* If this was a dry run, tell it we found clobbers to sink. */
3618 if (found_opportunity)
3619 {
3620 *found_opportunity = true;
3621 return 0;
3622 }
3583 3623
3584 edge succe = single_succ_edge (bb); 3624 edge succe = single_succ_edge (bb);
3585 succbb = succe->dest; 3625 succbb = succe->dest;
3586 3626
3587 /* See if there is a virtual PHI node to take an updated virtual 3627 /* See if there is a virtual PHI node to take an updated virtual
3588 operand from. */ 3628 operand from. */
3589 gphi *vphi = NULL; 3629 gphi *vphi = NULL;
3590 tree vuse = NULL_TREE;
3591 for (gphi_iterator gpi = gsi_start_phis (succbb); 3630 for (gphi_iterator gpi = gsi_start_phis (succbb);
3592 !gsi_end_p (gpi); gsi_next (&gpi)) 3631 !gsi_end_p (gpi); gsi_next (&gpi))
3593 { 3632 {
3594 tree res = gimple_phi_result (gpi.phi ()); 3633 tree res = gimple_phi_result (gpi.phi ());
3595 if (virtual_operand_p (res)) 3634 if (virtual_operand_p (res))
3596 { 3635 {
3597 vphi = gpi.phi (); 3636 vphi = gpi.phi ();
3598 vuse = res;
3599 break; 3637 break;
3600 } 3638 }
3601 } 3639 }
3602 3640
3603 dgsi = gsi_after_labels (succbb); 3641 gimple *first_sunk = NULL;
3642 gimple *last_sunk = NULL;
3643 if (sunk && !(succbb->flags & BB_VISITED))
3644 dgsi = gsi_start (sunk[succbb->index]);
3645 else
3646 dgsi = gsi_after_labels (succbb);
3604 gsi = gsi_last_bb (bb); 3647 gsi = gsi_last_bb (bb);
3605 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi)) 3648 for (gsi_prev (&gsi); !gsi_end_p (gsi); gsi_prev (&gsi))
3606 { 3649 {
3607 gimple *stmt = gsi_stmt (gsi); 3650 gimple *stmt = gsi_stmt (gsi);
3608 tree lhs; 3651 tree lhs;
3629 3672
3630 /* As we do not change stmt order when sinking across a 3673 /* As we do not change stmt order when sinking across a
3631 forwarder edge we can keep virtual operands in place. */ 3674 forwarder edge we can keep virtual operands in place. */
3632 gsi_remove (&gsi, false); 3675 gsi_remove (&gsi, false);
3633 gsi_insert_before (&dgsi, stmt, GSI_NEW_STMT); 3676 gsi_insert_before (&dgsi, stmt, GSI_NEW_STMT);
3634 3677 if (!first_sunk)
3635 /* But adjust virtual operands if we sunk across a PHI node. */ 3678 first_sunk = stmt;
3636 if (vuse) 3679 last_sunk = stmt;
3637 { 3680 }
3638 gimple *use_stmt; 3681 if (sunk && !gimple_seq_empty_p (sunk[bb->index]))
3682 {
3683 if (!first_sunk)
3684 first_sunk = gsi_stmt (gsi_last (sunk[bb->index]));
3685 last_sunk = gsi_stmt (gsi_start (sunk[bb->index]));
3686 gsi_insert_seq_before_without_update (&dgsi,
3687 sunk[bb->index], GSI_NEW_STMT);
3688 sunk[bb->index] = NULL;
3689 }
3690 if (first_sunk)
3691 {
3692 /* Adjust virtual operands if we sunk across a virtual PHI. */
3693 if (vphi)
3694 {
3639 imm_use_iterator iter; 3695 imm_use_iterator iter;
3640 use_operand_p use_p; 3696 use_operand_p use_p;
3641 FOR_EACH_IMM_USE_STMT (use_stmt, iter, vuse) 3697 gimple *use_stmt;
3698 tree phi_def = gimple_phi_result (vphi);
3699 FOR_EACH_IMM_USE_STMT (use_stmt, iter, phi_def)
3642 FOR_EACH_IMM_USE_ON_STMT (use_p, iter) 3700 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
3643 SET_USE (use_p, gimple_vdef (stmt)); 3701 SET_USE (use_p, gimple_vdef (first_sunk));
3644 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse)) 3702 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (phi_def))
3645 { 3703 {
3646 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (stmt)) = 1; 3704 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_vdef (first_sunk)) = 1;
3647 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 0; 3705 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (phi_def) = 0;
3648 } 3706 }
3649 /* Adjust the incoming virtual operand. */ 3707 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (vphi, succe),
3650 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (vphi, succe), gimple_vuse (stmt)); 3708 gimple_vuse (last_sunk));
3651 SET_USE (gimple_vuse_op (stmt), vuse); 3709 SET_USE (gimple_vuse_op (last_sunk), phi_def);
3652 } 3710 }
3653 /* If there isn't a single predecessor but no virtual PHI node 3711 /* If there isn't a single predecessor but no virtual PHI node
3654 arrange for virtual operands to be renamed. */ 3712 arrange for virtual operands to be renamed. */
3655 else if (gimple_vuse_op (stmt) != NULL_USE_OPERAND_P 3713 else if (!single_pred_p (succbb)
3656 && !single_pred_p (succbb)) 3714 && TREE_CODE (gimple_vuse (last_sunk)) == SSA_NAME)
3657 { 3715 {
3658 /* In this case there will be no use of the VDEF of this stmt. 3716 mark_virtual_operand_for_renaming (gimple_vuse (last_sunk));
3659 ??? Unless this is a secondary opportunity and we have not
3660 removed unreachable blocks yet, so we cannot assert this.
3661 Which also means we will end up renaming too many times. */
3662 SET_USE (gimple_vuse_op (stmt), gimple_vop (cfun));
3663 mark_virtual_operands_for_renaming (cfun);
3664 todo |= TODO_update_ssa_only_virtuals; 3717 todo |= TODO_update_ssa_only_virtuals;
3665 } 3718 }
3666 } 3719 }
3667 3720
3668 return todo; 3721 return todo;
3768 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node, 3821 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3769 region_nr)); 3822 region_nr));
3770 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn))); 3823 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)));
3771 filter = make_ssa_name (filter, x); 3824 filter = make_ssa_name (filter, x);
3772 gimple_call_set_lhs (x, filter); 3825 gimple_call_set_lhs (x, filter);
3826 gimple_set_location (x, gimple_location (stmt));
3773 gsi_insert_before (&gsi, x, GSI_SAME_STMT); 3827 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3774 3828
3775 /* Turn the default label into a default case. */ 3829 /* Turn the default label into a default case. */
3776 default_label = build_case_label (NULL, NULL, default_label); 3830 default_label = build_case_label (NULL, NULL, default_label);
3777 sort_case_labels (labels); 3831 sort_case_labels (labels);
3778 3832
3779 x = gimple_build_switch (filter, default_label, labels); 3833 x = gimple_build_switch (filter, default_label, labels);
3834 gimple_set_location (x, gimple_location (stmt));
3780 gsi_insert_before (&gsi, x, GSI_SAME_STMT); 3835 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3781 } 3836 }
3782 } 3837 }
3783 break; 3838 break;
3784 3839
3791 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node, 3846 x = gimple_build_call (fn, 1, build_int_cst (integer_type_node,
3792 region_nr)); 3847 region_nr));
3793 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn))); 3848 filter = create_tmp_var (TREE_TYPE (TREE_TYPE (fn)));
3794 filter = make_ssa_name (filter, x); 3849 filter = make_ssa_name (filter, x);
3795 gimple_call_set_lhs (x, filter); 3850 gimple_call_set_lhs (x, filter);
3851 gimple_set_location (x, gimple_location (stmt));
3796 gsi_insert_before (&gsi, x, GSI_SAME_STMT); 3852 gsi_insert_before (&gsi, x, GSI_SAME_STMT);
3797 3853
3798 r->u.allowed.label = NULL; 3854 r->u.allowed.label = NULL;
3799 x = gimple_build_cond (EQ_EXPR, filter, 3855 x = gimple_build_cond (EQ_EXPR, filter,
3800 build_int_cst (TREE_TYPE (filter), 3856 build_int_cst (TREE_TYPE (filter),
3848 pass_lower_eh_dispatch::execute (function *fun) 3904 pass_lower_eh_dispatch::execute (function *fun)
3849 { 3905 {
3850 basic_block bb; 3906 basic_block bb;
3851 int flags = 0; 3907 int flags = 0;
3852 bool redirected = false; 3908 bool redirected = false;
3909 bool any_resx_to_process = false;
3853 3910
3854 assign_filter_values (); 3911 assign_filter_values ();
3855 3912
3856 FOR_EACH_BB_FN (bb, fun) 3913 FOR_EACH_BB_FN (bb, fun)
3857 { 3914 {
3864 as_a <geh_dispatch *> (last)); 3921 as_a <geh_dispatch *> (last));
3865 flags |= TODO_update_ssa_only_virtuals; 3922 flags |= TODO_update_ssa_only_virtuals;
3866 } 3923 }
3867 else if (gimple_code (last) == GIMPLE_RESX) 3924 else if (gimple_code (last) == GIMPLE_RESX)
3868 { 3925 {
3869 if (stmt_can_throw_external (cfun, last)) 3926 if (stmt_can_throw_external (fun, last))
3870 optimize_clobbers (bb); 3927 optimize_clobbers (bb);
3871 else 3928 else if (!any_resx_to_process)
3872 flags |= sink_clobbers (bb); 3929 sink_clobbers (bb, NULL, &any_resx_to_process);
3873 } 3930 }
3874 } 3931 bb->flags &= ~BB_VISITED;
3875 3932 }
3876 if (redirected) 3933 if (redirected)
3877 { 3934 {
3878 free_dominance_info (CDI_DOMINATORS); 3935 free_dominance_info (CDI_DOMINATORS);
3879 delete_unreachable_blocks (); 3936 delete_unreachable_blocks ();
3880 } 3937 }
3938
3939 if (any_resx_to_process)
3940 {
3941 /* Make sure to catch all secondary sinking opportunities by processing
3942 blocks in RPO order and after all CFG modifications from lowering
3943 and unreachable block removal. */
3944 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fun));
3945 int rpo_n = pre_and_rev_post_order_compute_fn (fun, NULL, rpo, false);
3946 gimple_seq *sunk = XCNEWVEC (gimple_seq, last_basic_block_for_fn (fun));
3947 for (int i = 0; i < rpo_n; ++i)
3948 {
3949 bb = BASIC_BLOCK_FOR_FN (fun, rpo[i]);
3950 gimple *last = last_stmt (bb);
3951 if (last
3952 && gimple_code (last) == GIMPLE_RESX
3953 && !stmt_can_throw_external (fun, last))
3954 flags |= sink_clobbers (bb, sunk);
3955 /* If there were any clobbers sunk into this BB, insert them now. */
3956 if (!gimple_seq_empty_p (sunk[bb->index]))
3957 {
3958 gimple_stmt_iterator gsi = gsi_after_labels (bb);
3959 gsi_insert_seq_before (&gsi, sunk[bb->index], GSI_NEW_STMT);
3960 sunk[bb->index] = NULL;
3961 }
3962 bb->flags |= BB_VISITED;
3963 }
3964 free (rpo);
3965 free (sunk);
3966 }
3967
3881 return flags; 3968 return flags;
3882 } 3969 }
3883 3970
3884 } // anon namespace 3971 } // anon namespace
3885 3972
4044 eh_landing_pad lp; 4131 eh_landing_pad lp;
4045 unsigned i; 4132 unsigned i;
4046 4133
4047 if (cfun->eh == NULL) 4134 if (cfun->eh == NULL)
4048 return; 4135 return;
4049 4136
4050 FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp) 4137 FOR_EACH_VEC_SAFE_ELT (cfun->eh->lp_array, i, lp)
4051 if (lp && lp->post_landing_pad) 4138 if (lp
4139 && (lp->post_landing_pad == NULL_TREE
4140 || label_to_block (cfun, lp->post_landing_pad) == NULL))
4052 { 4141 {
4053 if (label_to_block (cfun, lp->post_landing_pad) == NULL) 4142 remove_unreachable_handlers ();
4054 { 4143 return;
4055 remove_unreachable_handlers ();
4056 return;
4057 }
4058 } 4144 }
4059 } 4145 }
4060 4146
4061 /* Remove regions that do not have landing pads. This assumes 4147 /* Remove regions that do not have landing pads. This assumes
4062 that remove_unreachable_handlers has already been run, and 4148 that remove_unreachable_handlers has already been run, and
4213 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i) 4299 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
4214 if (lp) 4300 if (lp)
4215 changed |= unsplit_eh (lp); 4301 changed |= unsplit_eh (lp);
4216 4302
4217 return changed; 4303 return changed;
4304 }
4305
4306 /* Wrapper around unsplit_all_eh that makes it usable everywhere. */
4307
4308 void
4309 unsplit_eh_edges (void)
4310 {
4311 bool changed;
4312
4313 /* unsplit_all_eh can die looking up unreachable landing pads. */
4314 maybe_remove_unreachable_handlers ();
4315
4316 changed = unsplit_all_eh ();
4317
4318 /* If EH edges have been unsplit, delete unreachable forwarder blocks. */
4319 if (changed)
4320 {
4321 free_dominance_info (CDI_DOMINATORS);
4322 free_dominance_info (CDI_POST_DOMINATORS);
4323 delete_unreachable_blocks ();
4324 }
4218 } 4325 }
4219 4326
4220 /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming 4327 /* A subroutine of cleanup_empty_eh. Redirect all EH edges incoming
4221 to OLD_BB to NEW_BB; return true on success, false on failure. 4328 to OLD_BB to NEW_BB; return true on success, false on failure.
4222 4329
4243 into 4350 into
4244 <..> 4351 <..>
4245 | | EH 4352 | | EH
4246 <..> 4353 <..>
4247 which CFG verification would choke on. See PR45172 and PR51089. */ 4354 which CFG verification would choke on. See PR45172 and PR51089. */
4248 FOR_EACH_EDGE (e, ei, old_bb->preds) 4355 if (!single_pred_p (new_bb))
4249 if (find_edge (e->src, new_bb)) 4356 FOR_EACH_EDGE (e, ei, old_bb->preds)
4250 return false; 4357 if (find_edge (e->src, new_bb))
4358 return false;
4251 4359
4252 FOR_EACH_EDGE (e, ei, old_bb->preds) 4360 FOR_EACH_EDGE (e, ei, old_bb->preds)
4253 redirect_edge_var_map_clear (e); 4361 redirect_edge_var_map_clear (e);
4254 4362
4255 ophi_handled = BITMAP_ALLOC (NULL); 4363 ophi_handled = BITMAP_ALLOC (NULL);
4634 { 4742 {
4635 bool changed = false; 4743 bool changed = false;
4636 eh_landing_pad lp; 4744 eh_landing_pad lp;
4637 int i; 4745 int i;
4638 4746
4639 for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i) 4747 /* Ideally we'd walk the region tree and process LPs inner to outer
4640 if (lp) 4748 to avoid quadraticness in EH redirection. Walking the LP array
4641 changed |= cleanup_empty_eh (lp); 4749 in reverse seems to be an approximation of that. */
4750 for (i = vec_safe_length (cfun->eh->lp_array) - 1; i >= 1; --i)
4751 {
4752 lp = (*cfun->eh->lp_array)[i];
4753 if (lp)
4754 changed |= cleanup_empty_eh (lp);
4755 }
4642 4756
4643 return changed; 4757 return changed;
4644 } 4758 }
4645 4759
4646 /* Perform cleanups and lowering of exception handling 4760 /* Perform cleanups and lowering of exception handling
4744 make_pass_cleanup_eh (gcc::context *ctxt) 4858 make_pass_cleanup_eh (gcc::context *ctxt)
4745 { 4859 {
4746 return new pass_cleanup_eh (ctxt); 4860 return new pass_cleanup_eh (ctxt);
4747 } 4861 }
4748 4862
4863 /* Disable warnings about missing quoting in GCC diagnostics for
4864 the verification errors. Their format strings don't follow GCC
4865 diagnostic conventions but are only used for debugging. */
4866 #if __GNUC__ >= 10
4867 # pragma GCC diagnostic push
4868 # pragma GCC diagnostic ignored "-Wformat-diag"
4869 #endif
4870
4749 /* Verify that BB containing STMT as the last statement, has precisely the 4871 /* Verify that BB containing STMT as the last statement, has precisely the
4750 edge that make_eh_edges would create. */ 4872 edge that make_eh_edges would create. */
4751 4873
4752 DEBUG_FUNCTION bool 4874 DEBUG_FUNCTION bool
4753 verify_eh_edges (gimple *stmt) 4875 verify_eh_edges (gimple *stmt)
4779 4901
4780 if (lp == NULL) 4902 if (lp == NULL)
4781 { 4903 {
4782 if (eh_edge) 4904 if (eh_edge)
4783 { 4905 {
4784 error ("BB %i can not throw but has an EH edge", bb->index); 4906 error ("BB %i cannot throw but has an EH edge", bb->index);
4785 return true; 4907 return true;
4786 } 4908 }
4787 return false; 4909 return false;
4788 } 4910 }
4789 4911
4890 return true; 5012 return true;
4891 } 5013 }
4892 5014
4893 return false; 5015 return false;
4894 } 5016 }
5017
5018 #if __GNUC__ >= 10
5019 # pragma GCC diagnostic pop
5020 #endif