comparison gcc/tree-complex.c @ 131:84e7813d76e9

gcc-8.2
author mir3636
date Thu, 25 Oct 2018 07:37:49 +0900
parents 04ced10e8804
children 1830386684a0
comparison
equal deleted inserted replaced
111:04ced10e8804 131:84e7813d76e9
1 /* Lower complex number operations to scalar operations. 1 /* Lower complex number operations to scalar operations.
2 Copyright (C) 2004-2017 Free Software Foundation, Inc. 2 Copyright (C) 2004-2018 Free Software Foundation, Inc.
3 3
4 This file is part of GCC. 4 This file is part of GCC.
5 5
6 GCC is free software; you can redistribute it and/or modify it 6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the 7 under the terms of the GNU General Public License as published by the
58 constants. */ 58 constants. */
59 typedef int complex_lattice_t; 59 typedef int complex_lattice_t;
60 60
61 #define PAIR(a, b) ((a) << 2 | (b)) 61 #define PAIR(a, b) ((a) << 2 | (b))
62 62
63 class complex_propagate : public ssa_propagation_engine
64 {
65 enum ssa_prop_result visit_stmt (gimple *, edge *, tree *) FINAL OVERRIDE;
66 enum ssa_prop_result visit_phi (gphi *) FINAL OVERRIDE;
67 };
63 68
64 static vec<complex_lattice_t> complex_lattice_values; 69 static vec<complex_lattice_t> complex_lattice_values;
65 70
66 /* For each complex variable, a pair of variables for the components exists in 71 /* For each complex variable, a pair of variables for the components exists in
67 the hashtable. */ 72 the hashtable. */
298 } 303 }
299 304
300 305
301 /* Evaluate statement STMT against the complex lattice defined above. */ 306 /* Evaluate statement STMT against the complex lattice defined above. */
302 307
303 static enum ssa_prop_result 308 enum ssa_prop_result
304 complex_visit_stmt (gimple *stmt, edge *taken_edge_p ATTRIBUTE_UNUSED, 309 complex_propagate::visit_stmt (gimple *stmt, edge *taken_edge_p ATTRIBUTE_UNUSED,
305 tree *result_p) 310 tree *result_p)
306 { 311 {
307 complex_lattice_t new_l, old_l, op1_l, op2_l; 312 complex_lattice_t new_l, old_l, op1_l, op2_l;
308 unsigned int ver; 313 unsigned int ver;
309 tree lhs; 314 tree lhs;
310 315
393 return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING; 398 return new_l == VARYING ? SSA_PROP_VARYING : SSA_PROP_INTERESTING;
394 } 399 }
395 400
396 /* Evaluate a PHI node against the complex lattice defined above. */ 401 /* Evaluate a PHI node against the complex lattice defined above. */
397 402
398 static enum ssa_prop_result 403 enum ssa_prop_result
399 complex_visit_phi (gphi *phi) 404 complex_propagate::visit_phi (gphi *phi)
400 { 405 {
401 complex_lattice_t new_l, old_l; 406 complex_lattice_t new_l, old_l;
402 unsigned int ver; 407 unsigned int ver;
403 tree lhs; 408 tree lhs;
404 int i; 409 int i;
696 stmt = gsi_stmt (*gsi); 701 stmt = gsi_stmt (*gsi);
697 update_stmt (stmt); 702 update_stmt (stmt);
698 if (maybe_clean_eh_stmt (stmt)) 703 if (maybe_clean_eh_stmt (stmt))
699 gimple_purge_dead_eh_edges (gimple_bb (stmt)); 704 gimple_purge_dead_eh_edges (gimple_bb (stmt));
700 705
701 if (gimple_in_ssa_p (cfun)) 706 update_complex_components (gsi, gsi_stmt (*gsi), r, i);
702 update_complex_components (gsi, gsi_stmt (*gsi), r, i);
703 } 707 }
704 708
705 709
706 /* Generate code at the entry point of the function to initialize the 710 /* Generate code at the entry point of the function to initialize the
707 component variables for a complex parameter. */ 711 component variables for a complex parameter. */
971 975
972 update_complex_assignment (gsi, rr, ri); 976 update_complex_assignment (gsi, rr, ri);
973 } 977 }
974 978
975 /* Expand a complex multiplication or division to a libcall to the c99 979 /* Expand a complex multiplication or division to a libcall to the c99
976 compliant routines. */ 980 compliant routines. TYPE is the complex type of the operation.
977 981 If INPLACE_P replace the statement at GSI with
978 static void 982 the libcall and return NULL_TREE. Else insert the call, assign its
979 expand_complex_libcall (gimple_stmt_iterator *gsi, tree ar, tree ai, 983 result to an output variable and return that variable. If INPLACE_P
980 tree br, tree bi, enum tree_code code) 984 is true then the statement being replaced should be an assignment
985 statement. */
986
987 static tree
988 expand_complex_libcall (gimple_stmt_iterator *gsi, tree type, tree ar, tree ai,
989 tree br, tree bi, enum tree_code code, bool inplace_p)
981 { 990 {
982 machine_mode mode; 991 machine_mode mode;
983 enum built_in_function bcode; 992 enum built_in_function bcode;
984 tree fn, type, lhs; 993 tree fn, lhs;
985 gimple *old_stmt;
986 gcall *stmt; 994 gcall *stmt;
987
988 old_stmt = gsi_stmt (*gsi);
989 lhs = gimple_assign_lhs (old_stmt);
990 type = TREE_TYPE (lhs);
991 995
992 mode = TYPE_MODE (type); 996 mode = TYPE_MODE (type);
993 gcc_assert (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT); 997 gcc_assert (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT);
994 998
995 if (code == MULT_EXPR) 999 if (code == MULT_EXPR)
999 bcode = ((enum built_in_function) 1003 bcode = ((enum built_in_function)
1000 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT)); 1004 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT));
1001 else 1005 else
1002 gcc_unreachable (); 1006 gcc_unreachable ();
1003 fn = builtin_decl_explicit (bcode); 1007 fn = builtin_decl_explicit (bcode);
1004
1005 stmt = gimple_build_call (fn, 4, ar, ai, br, bi); 1008 stmt = gimple_build_call (fn, 4, ar, ai, br, bi);
1009
1010 if (inplace_p)
1011 {
1012 gimple *old_stmt = gsi_stmt (*gsi);
1013 gimple_call_set_nothrow (stmt, !stmt_could_throw_p (cfun, old_stmt));
1014 lhs = gimple_assign_lhs (old_stmt);
1015 gimple_call_set_lhs (stmt, lhs);
1016 gsi_replace (gsi, stmt, true);
1017
1018 type = TREE_TYPE (type);
1019 if (stmt_can_throw_internal (cfun, stmt))
1020 {
1021 edge_iterator ei;
1022 edge e;
1023 FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->succs)
1024 if (!(e->flags & EDGE_EH))
1025 break;
1026 basic_block bb = split_edge (e);
1027 gimple_stmt_iterator gsi2 = gsi_start_bb (bb);
1028 update_complex_components (&gsi2, stmt,
1029 build1 (REALPART_EXPR, type, lhs),
1030 build1 (IMAGPART_EXPR, type, lhs));
1031 return NULL_TREE;
1032 }
1033 else
1034 update_complex_components (gsi, stmt,
1035 build1 (REALPART_EXPR, type, lhs),
1036 build1 (IMAGPART_EXPR, type, lhs));
1037 SSA_NAME_DEF_STMT (lhs) = stmt;
1038 return NULL_TREE;
1039 }
1040
1041 gimple_call_set_nothrow (stmt, true);
1042 lhs = make_ssa_name (type);
1006 gimple_call_set_lhs (stmt, lhs); 1043 gimple_call_set_lhs (stmt, lhs);
1007 update_stmt (stmt); 1044 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1008 gsi_replace (gsi, stmt, false); 1045
1009 1046 return lhs;
1010 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt)) 1047 }
1011 gimple_purge_dead_eh_edges (gsi_bb (*gsi)); 1048
1012 1049 /* Perform a complex multiplication on two complex constants A, B represented
1013 if (gimple_in_ssa_p (cfun)) 1050 by AR, AI, BR, BI of type TYPE.
1014 { 1051 The operation we want is: a * b = (ar*br - ai*bi) + i(ar*bi + br*ai).
1015 type = TREE_TYPE (type); 1052 Insert the GIMPLE statements into GSI. Store the real and imaginary
1016 update_complex_components (gsi, stmt, 1053 components of the result into RR and RI. */
1017 build1 (REALPART_EXPR, type, lhs), 1054
1018 build1 (IMAGPART_EXPR, type, lhs)); 1055 static void
1019 SSA_NAME_DEF_STMT (lhs) = stmt; 1056 expand_complex_multiplication_components (gimple_stmt_iterator *gsi,
1020 } 1057 tree type, tree ar, tree ai,
1058 tree br, tree bi,
1059 tree *rr, tree *ri)
1060 {
1061 tree t1, t2, t3, t4;
1062
1063 t1 = gimplify_build2 (gsi, MULT_EXPR, type, ar, br);
1064 t2 = gimplify_build2 (gsi, MULT_EXPR, type, ai, bi);
1065 t3 = gimplify_build2 (gsi, MULT_EXPR, type, ar, bi);
1066
1067 /* Avoid expanding redundant multiplication for the common
1068 case of squaring a complex number. */
1069 if (ar == br && ai == bi)
1070 t4 = t3;
1071 else
1072 t4 = gimplify_build2 (gsi, MULT_EXPR, type, ai, br);
1073
1074 *rr = gimplify_build2 (gsi, MINUS_EXPR, type, t1, t2);
1075 *ri = gimplify_build2 (gsi, PLUS_EXPR, type, t3, t4);
1021 } 1076 }
1022 1077
1023 /* Expand complex multiplication to scalars: 1078 /* Expand complex multiplication to scalars:
1024 a * b = (ar*br - ai*bi) + i(ar*bi + br*ai) 1079 a * b = (ar*br - ai*bi) + i(ar*bi + br*ai)
1025 */ 1080 */
1026 1081
1027 static void 1082 static void
1028 expand_complex_multiplication (gimple_stmt_iterator *gsi, tree inner_type, 1083 expand_complex_multiplication (gimple_stmt_iterator *gsi, tree type,
1029 tree ar, tree ai, tree br, tree bi, 1084 tree ar, tree ai, tree br, tree bi,
1030 complex_lattice_t al, complex_lattice_t bl) 1085 complex_lattice_t al, complex_lattice_t bl)
1031 { 1086 {
1032 tree rr, ri; 1087 tree rr, ri;
1088 tree inner_type = TREE_TYPE (type);
1033 1089
1034 if (al < bl) 1090 if (al < bl)
1035 { 1091 {
1036 complex_lattice_t tl; 1092 complex_lattice_t tl;
1037 rr = ar, ar = br, br = rr; 1093 rr = ar, ar = br, br = rr;
1073 break; 1129 break;
1074 1130
1075 case PAIR (VARYING, VARYING): 1131 case PAIR (VARYING, VARYING):
1076 if (flag_complex_method == 2 && SCALAR_FLOAT_TYPE_P (inner_type)) 1132 if (flag_complex_method == 2 && SCALAR_FLOAT_TYPE_P (inner_type))
1077 { 1133 {
1078 expand_complex_libcall (gsi, ar, ai, br, bi, MULT_EXPR); 1134 /* If optimizing for size or not at all just do a libcall.
1079 return; 1135 Same if there are exception-handling edges or signaling NaNs. */
1136 if (optimize == 0 || optimize_bb_for_size_p (gsi_bb (*gsi))
1137 || stmt_can_throw_internal (cfun, gsi_stmt (*gsi))
1138 || flag_signaling_nans)
1139 {
1140 expand_complex_libcall (gsi, type, ar, ai, br, bi,
1141 MULT_EXPR, true);
1142 return;
1143 }
1144
1145 /* Else, expand x = a * b into
1146 x = (ar*br - ai*bi) + i(ar*bi + br*ai);
1147 if (isunordered (__real__ x, __imag__ x))
1148 x = __muldc3 (a, b); */
1149
1150 tree tmpr, tmpi;
1151 expand_complex_multiplication_components (gsi, inner_type, ar, ai,
1152 br, bi, &tmpr, &tmpi);
1153
1154 gimple *check
1155 = gimple_build_cond (UNORDERED_EXPR, tmpr, tmpi,
1156 NULL_TREE, NULL_TREE);
1157
1158 basic_block orig_bb = gsi_bb (*gsi);
1159 /* We want to keep track of the original complex multiplication
1160 statement as we're going to modify it later in
1161 update_complex_assignment. Make sure that insert_cond_bb leaves
1162 that statement in the join block. */
1163 gsi_prev (gsi);
1164 basic_block cond_bb
1165 = insert_cond_bb (gsi_bb (*gsi), gsi_stmt (*gsi), check,
1166 profile_probability::very_unlikely ());
1167
1168
1169 gimple_stmt_iterator cond_bb_gsi = gsi_last_bb (cond_bb);
1170 gsi_insert_after (&cond_bb_gsi, gimple_build_nop (), GSI_NEW_STMT);
1171
1172 tree libcall_res
1173 = expand_complex_libcall (&cond_bb_gsi, type, ar, ai, br,
1174 bi, MULT_EXPR, false);
1175 tree cond_real = gimplify_build1 (&cond_bb_gsi, REALPART_EXPR,
1176 inner_type, libcall_res);
1177 tree cond_imag = gimplify_build1 (&cond_bb_gsi, IMAGPART_EXPR,
1178 inner_type, libcall_res);
1179
1180 basic_block join_bb = single_succ_edge (cond_bb)->dest;
1181 *gsi = gsi_start_nondebug_after_labels_bb (join_bb);
1182
1183 /* We have a conditional block with some assignments in cond_bb.
1184 Wire up the PHIs to wrap up. */
1185 rr = make_ssa_name (inner_type);
1186 ri = make_ssa_name (inner_type);
1187 edge cond_to_join = single_succ_edge (cond_bb);
1188 edge orig_to_join = find_edge (orig_bb, join_bb);
1189
1190 gphi *real_phi = create_phi_node (rr, gsi_bb (*gsi));
1191 add_phi_arg (real_phi, cond_real, cond_to_join,
1192 UNKNOWN_LOCATION);
1193 add_phi_arg (real_phi, tmpr, orig_to_join, UNKNOWN_LOCATION);
1194
1195 gphi *imag_phi = create_phi_node (ri, gsi_bb (*gsi));
1196 add_phi_arg (imag_phi, cond_imag, cond_to_join,
1197 UNKNOWN_LOCATION);
1198 add_phi_arg (imag_phi, tmpi, orig_to_join, UNKNOWN_LOCATION);
1080 } 1199 }
1081 else 1200 else
1082 { 1201 /* If we are not worrying about NaNs expand to
1083 tree t1, t2, t3, t4; 1202 (ar*br - ai*bi) + i(ar*bi + br*ai) directly. */
1084 1203 expand_complex_multiplication_components (gsi, inner_type, ar, ai,
1085 t1 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, br); 1204 br, bi, &rr, &ri);
1086 t2 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, bi);
1087 t3 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ar, bi);
1088
1089 /* Avoid expanding redundant multiplication for the common
1090 case of squaring a complex number. */
1091 if (ar == br && ai == bi)
1092 t4 = t3;
1093 else
1094 t4 = gimplify_build2 (gsi, MULT_EXPR, inner_type, ai, br);
1095
1096 rr = gimplify_build2 (gsi, MINUS_EXPR, inner_type, t1, t2);
1097 ri = gimplify_build2 (gsi, PLUS_EXPR, inner_type, t3, t4);
1098 }
1099 break; 1205 break;
1100 1206
1101 default: 1207 default:
1102 gcc_unreachable (); 1208 gcc_unreachable ();
1103 } 1209 }
1163 { 1269 {
1164 edge e; 1270 edge e;
1165 gimple *stmt; 1271 gimple *stmt;
1166 tree cond, tmp; 1272 tree cond, tmp;
1167 1273
1168 tmp = create_tmp_var (boolean_type_node); 1274 tmp = make_ssa_name (boolean_type_node);
1169 stmt = gimple_build_assign (tmp, compare); 1275 stmt = gimple_build_assign (tmp, compare);
1170 if (gimple_in_ssa_p (cfun))
1171 {
1172 tmp = make_ssa_name (tmp, stmt);
1173 gimple_assign_set_lhs (stmt, tmp);
1174 }
1175
1176 gsi_insert_before (gsi, stmt, GSI_SAME_STMT); 1276 gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1177 1277
1178 cond = fold_build2_loc (gimple_location (stmt), 1278 cond = fold_build2_loc (gimple_location (stmt),
1179 EQ_EXPR, boolean_type_node, tmp, boolean_true_node); 1279 EQ_EXPR, boolean_type_node, tmp, boolean_true_node);
1180 stmt = gimple_build_cond_from_tree (cond, NULL_TREE, NULL_TREE); 1280 stmt = gimple_build_cond_from_tree (cond, NULL_TREE, NULL_TREE);
1184 e = split_block (gsi_bb (*gsi), stmt); 1284 e = split_block (gsi_bb (*gsi), stmt);
1185 bb_cond = e->src; 1285 bb_cond = e->src;
1186 bb_join = e->dest; 1286 bb_join = e->dest;
1187 bb_true = create_empty_bb (bb_cond); 1287 bb_true = create_empty_bb (bb_cond);
1188 bb_false = create_empty_bb (bb_true); 1288 bb_false = create_empty_bb (bb_true);
1189 bb_true->frequency = bb_false->frequency = bb_cond->frequency / 2;
1190 bb_true->count = bb_false->count 1289 bb_true->count = bb_false->count
1191 = bb_cond->count.apply_probability (profile_probability::even ()); 1290 = bb_cond->count.apply_probability (profile_probability::even ());
1192 1291
1193 /* Wire the blocks together. */ 1292 /* Wire the blocks together. */
1194 e->flags = EDGE_TRUE_VALUE; 1293 e->flags = EDGE_TRUE_VALUE;
1302 } 1401 }
1303 1402
1304 /* Expand complex division to scalars. */ 1403 /* Expand complex division to scalars. */
1305 1404
1306 static void 1405 static void
1307 expand_complex_division (gimple_stmt_iterator *gsi, tree inner_type, 1406 expand_complex_division (gimple_stmt_iterator *gsi, tree type,
1308 tree ar, tree ai, tree br, tree bi, 1407 tree ar, tree ai, tree br, tree bi,
1309 enum tree_code code, 1408 enum tree_code code,
1310 complex_lattice_t al, complex_lattice_t bl) 1409 complex_lattice_t al, complex_lattice_t bl)
1311 { 1410 {
1312 tree rr, ri; 1411 tree rr, ri;
1313 1412
1413 tree inner_type = TREE_TYPE (type);
1314 switch (PAIR (al, bl)) 1414 switch (PAIR (al, bl))
1315 { 1415 {
1316 case PAIR (ONLY_REAL, ONLY_REAL): 1416 case PAIR (ONLY_REAL, ONLY_REAL):
1317 rr = gimplify_build2 (gsi, code, inner_type, ar, br); 1417 rr = gimplify_build2 (gsi, code, inner_type, ar, br);
1318 ri = ai; 1418 ri = ai;
1356 break; 1456 break;
1357 1457
1358 case 2: 1458 case 2:
1359 if (SCALAR_FLOAT_TYPE_P (inner_type)) 1459 if (SCALAR_FLOAT_TYPE_P (inner_type))
1360 { 1460 {
1361 expand_complex_libcall (gsi, ar, ai, br, bi, code); 1461 expand_complex_libcall (gsi, type, ar, ai, br, bi, code, true);
1362 break; 1462 break;
1363 } 1463 }
1364 /* FALLTHRU */ 1464 /* FALLTHRU */
1365 1465
1366 case 1: 1466 case 1:
1596 bi = extract_component (gsi, bc, 1, true); 1696 bi = extract_component (gsi, bc, 1, true);
1597 } 1697 }
1598 else 1698 else
1599 br = bi = NULL_TREE; 1699 br = bi = NULL_TREE;
1600 1700
1601 if (gimple_in_ssa_p (cfun)) 1701 al = find_lattice_value (ac);
1602 { 1702 if (al == UNINITIALIZED)
1603 al = find_lattice_value (ac); 1703 al = VARYING;
1604 if (al == UNINITIALIZED) 1704
1605 al = VARYING; 1705 if (TREE_CODE_CLASS (code) == tcc_unary)
1606 1706 bl = UNINITIALIZED;
1607 if (TREE_CODE_CLASS (code) == tcc_unary) 1707 else if (ac == bc)
1608 bl = UNINITIALIZED; 1708 bl = al;
1609 else if (ac == bc)
1610 bl = al;
1611 else
1612 {
1613 bl = find_lattice_value (bc);
1614 if (bl == UNINITIALIZED)
1615 bl = VARYING;
1616 }
1617 }
1618 else 1709 else
1619 al = bl = VARYING; 1710 {
1711 bl = find_lattice_value (bc);
1712 if (bl == UNINITIALIZED)
1713 bl = VARYING;
1714 }
1620 1715
1621 switch (code) 1716 switch (code)
1622 { 1717 {
1623 case PLUS_EXPR: 1718 case PLUS_EXPR:
1624 case MINUS_EXPR: 1719 case MINUS_EXPR:
1625 expand_complex_addition (gsi, inner_type, ar, ai, br, bi, code, al, bl); 1720 expand_complex_addition (gsi, inner_type, ar, ai, br, bi, code, al, bl);
1626 break; 1721 break;
1627 1722
1628 case MULT_EXPR: 1723 case MULT_EXPR:
1629 expand_complex_multiplication (gsi, inner_type, ar, ai, br, bi, al, bl); 1724 expand_complex_multiplication (gsi, type, ar, ai, br, bi, al, bl);
1630 break; 1725 break;
1631 1726
1632 case TRUNC_DIV_EXPR: 1727 case TRUNC_DIV_EXPR:
1633 case CEIL_DIV_EXPR: 1728 case CEIL_DIV_EXPR:
1634 case FLOOR_DIV_EXPR: 1729 case FLOOR_DIV_EXPR:
1635 case ROUND_DIV_EXPR: 1730 case ROUND_DIV_EXPR:
1636 case RDIV_EXPR: 1731 case RDIV_EXPR:
1637 expand_complex_division (gsi, inner_type, ar, ai, br, bi, code, al, bl); 1732 expand_complex_division (gsi, type, ar, ai, br, bi, code, al, bl);
1638 break; 1733 break;
1639 1734
1640 case NEGATE_EXPR: 1735 case NEGATE_EXPR:
1641 expand_complex_negation (gsi, inner_type, ar, ai); 1736 expand_complex_negation (gsi, inner_type, ar, ai);
1642 break; 1737 break;
1671 1766
1672 complex_lattice_values.create (num_ssa_names); 1767 complex_lattice_values.create (num_ssa_names);
1673 complex_lattice_values.safe_grow_cleared (num_ssa_names); 1768 complex_lattice_values.safe_grow_cleared (num_ssa_names);
1674 1769
1675 init_parameter_lattice_values (); 1770 init_parameter_lattice_values ();
1676 ssa_propagate (complex_visit_stmt, complex_visit_phi); 1771 class complex_propagate complex_propagate;
1772 complex_propagate.ssa_propagate ();
1677 1773
1678 complex_variable_components = new int_tree_htab_type (10); 1774 complex_variable_components = new int_tree_htab_type (10);
1679 1775
1680 complex_ssa_name_components.create (2 * num_ssa_names); 1776 complex_ssa_name_components.create (2 * num_ssa_names);
1681 complex_ssa_name_components.safe_grow_cleared (2 * num_ssa_names); 1777 complex_ssa_name_components.safe_grow_cleared (2 * num_ssa_names);
1685 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun)); 1781 rpo = XNEWVEC (int, last_basic_block_for_fn (cfun));
1686 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false); 1782 n_bbs = pre_and_rev_post_order_compute (NULL, rpo, false);
1687 for (i = 0; i < n_bbs; i++) 1783 for (i = 0; i < n_bbs; i++)
1688 { 1784 {
1689 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]); 1785 bb = BASIC_BLOCK_FOR_FN (cfun, rpo[i]);
1786 if (!bb)
1787 continue;
1690 update_phi_components (bb); 1788 update_phi_components (bb);
1691 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 1789 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1692 expand_complex_operations_1 (&gsi); 1790 expand_complex_operations_1 (&gsi);
1693 } 1791 }
1694 1792