Mercurial > hg > CbC > CbC_gcc
comparison gcc/tree-ssa-pre.c @ 55:77e2b8dfacca gcc-4.4.5
update it from 4.4.3 to 4.5.0
author | ryoma <e075725@ie.u-ryukyu.ac.jp> |
---|---|
date | Fri, 12 Feb 2010 23:39:51 +0900 |
parents | 3bfb6c00c1e0 |
children | b7f97abdc517 |
comparison
equal
deleted
inserted
replaced
52:c156f1bd5cd9 | 55:77e2b8dfacca |
---|---|
43 #include "flags.h" | 43 #include "flags.h" |
44 #include "bitmap.h" | 44 #include "bitmap.h" |
45 #include "langhooks.h" | 45 #include "langhooks.h" |
46 #include "cfgloop.h" | 46 #include "cfgloop.h" |
47 #include "tree-ssa-sccvn.h" | 47 #include "tree-ssa-sccvn.h" |
48 #include "tree-scalar-evolution.h" | |
48 #include "params.h" | 49 #include "params.h" |
49 #include "dbgcnt.h" | 50 #include "dbgcnt.h" |
50 | 51 |
51 /* TODO: | 52 /* TODO: |
52 | 53 |
132 It would also require an additional indirection at each point we | 133 It would also require an additional indirection at each point we |
133 use the value id. */ | 134 use the value id. */ |
134 | 135 |
135 /* Representation of expressions on value numbers: | 136 /* Representation of expressions on value numbers: |
136 | 137 |
137 Expressions consisting of value numbers are represented the same | 138 Expressions consisting of value numbers are represented the same |
138 way as our VN internally represents them, with an additional | 139 way as our VN internally represents them, with an additional |
139 "pre_expr" wrapping around them in order to facilitate storing all | 140 "pre_expr" wrapping around them in order to facilitate storing all |
140 of the expressions in the same sets. */ | 141 of the expressions in the same sets. */ |
141 | 142 |
142 /* Representation of sets: | 143 /* Representation of sets: |
375 /* The NEW_SETS set, which is used during insertion to augment the | 376 /* The NEW_SETS set, which is used during insertion to augment the |
376 AVAIL_OUT set of blocks with the new insertions performed during | 377 AVAIL_OUT set of blocks with the new insertions performed during |
377 the current iteration. */ | 378 the current iteration. */ |
378 bitmap_set_t new_sets; | 379 bitmap_set_t new_sets; |
379 | 380 |
381 /* A cache for value_dies_in_block_x. */ | |
382 bitmap expr_dies; | |
383 | |
380 /* True if we have visited this block during ANTIC calculation. */ | 384 /* True if we have visited this block during ANTIC calculation. */ |
381 unsigned int visited:1; | 385 unsigned int visited:1; |
382 | 386 |
383 /* True we have deferred processing this block during ANTIC | 387 /* True we have deferred processing this block during ANTIC |
384 calculation until its successor is processed. */ | 388 calculation until its successor is processed. */ |
390 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen | 394 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen |
391 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out | 395 #define AVAIL_OUT(BB) ((bb_value_sets_t) ((BB)->aux))->avail_out |
392 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in | 396 #define ANTIC_IN(BB) ((bb_value_sets_t) ((BB)->aux))->antic_in |
393 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in | 397 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in |
394 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets | 398 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets |
395 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited | 399 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies |
400 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited | |
396 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred | 401 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred |
397 | 402 |
398 | 403 |
399 /* Basic block list in postorder. */ | 404 /* Basic block list in postorder. */ |
400 static int *postorder; | 405 static int *postorder; |
897 fprintf (outfile, "{"); | 902 fprintf (outfile, "{"); |
898 for (i = 0; | 903 for (i = 0; |
899 VEC_iterate (vn_reference_op_s, ref->operands, i, vro); | 904 VEC_iterate (vn_reference_op_s, ref->operands, i, vro); |
900 i++) | 905 i++) |
901 { | 906 { |
907 bool closebrace = false; | |
902 if (vro->opcode != SSA_NAME | 908 if (vro->opcode != SSA_NAME |
903 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration) | 909 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration) |
904 fprintf (outfile, "%s ", tree_code_name [vro->opcode]); | 910 { |
911 fprintf (outfile, "%s", tree_code_name [vro->opcode]); | |
912 if (vro->op0) | |
913 { | |
914 fprintf (outfile, "<"); | |
915 closebrace = true; | |
916 } | |
917 } | |
905 if (vro->op0) | 918 if (vro->op0) |
906 { | 919 { |
907 if (vro->op1) | |
908 fprintf (outfile, "<"); | |
909 print_generic_expr (outfile, vro->op0, 0); | 920 print_generic_expr (outfile, vro->op0, 0); |
910 if (vro->op1) | 921 if (vro->op1) |
911 { | 922 { |
912 fprintf (outfile, ","); | 923 fprintf (outfile, ","); |
913 print_generic_expr (outfile, vro->op1, 0); | 924 print_generic_expr (outfile, vro->op1, 0); |
914 } | 925 } |
915 if (vro->op1) | 926 if (vro->op2) |
916 fprintf (outfile, ">"); | 927 { |
928 fprintf (outfile, ","); | |
929 print_generic_expr (outfile, vro->op2, 0); | |
930 } | |
917 } | 931 } |
932 if (closebrace) | |
933 fprintf (outfile, ">"); | |
918 if (i != VEC_length (vn_reference_op_s, ref->operands) - 1) | 934 if (i != VEC_length (vn_reference_op_s, ref->operands) - 1) |
919 fprintf (outfile, ","); | 935 fprintf (outfile, ","); |
920 } | 936 } |
921 fprintf (outfile, "}"); | 937 fprintf (outfile, "}"); |
938 if (ref->vuse) | |
939 { | |
940 fprintf (outfile, "@"); | |
941 print_generic_expr (outfile, ref->vuse, 0); | |
942 } | |
922 } | 943 } |
923 break; | 944 break; |
924 } | 945 } |
925 } | 946 } |
926 void debug_pre_expr (pre_expr); | 947 void debug_pre_expr (pre_expr); |
1130 not be available readily. */ | 1151 not be available readily. */ |
1131 return e; | 1152 return e; |
1132 } | 1153 } |
1133 case tcc_reference: | 1154 case tcc_reference: |
1134 if (nary->opcode != REALPART_EXPR | 1155 if (nary->opcode != REALPART_EXPR |
1135 && nary->opcode != IMAGPART_EXPR | 1156 && nary->opcode != IMAGPART_EXPR |
1136 && nary->opcode != VIEW_CONVERT_EXPR) | 1157 && nary->opcode != VIEW_CONVERT_EXPR) |
1137 return e; | 1158 return e; |
1138 /* Fallthrough. */ | 1159 /* Fallthrough. */ |
1139 case tcc_unary: | 1160 case tcc_unary: |
1140 do_unary: | 1161 do_unary: |
1216 return e; | 1237 return e; |
1217 } | 1238 } |
1218 return e; | 1239 return e; |
1219 } | 1240 } |
1220 | 1241 |
1221 /* Translate the vuses in the VUSES vector backwards through phi nodes | 1242 /* Translate the VUSE backwards through phi nodes in PHIBLOCK, so that |
1222 in PHIBLOCK, so that they have the value they would have in | 1243 it has the value it would have in BLOCK. Set *SAME_VALID to true |
1223 BLOCK. */ | 1244 in case the new vuse doesn't change the value id of the OPERANDS. */ |
1224 | 1245 |
1225 static VEC(tree, gc) * | 1246 static tree |
1226 translate_vuses_through_block (VEC (tree, gc) *vuses, | 1247 translate_vuse_through_block (VEC (vn_reference_op_s, heap) *operands, |
1227 basic_block phiblock, | 1248 alias_set_type set, tree type, tree vuse, |
1228 basic_block block) | 1249 basic_block phiblock, |
1229 { | 1250 basic_block block, bool *same_valid) |
1230 tree oldvuse; | 1251 { |
1231 VEC(tree, gc) *result = NULL; | 1252 gimple phi = SSA_NAME_DEF_STMT (vuse); |
1232 int i; | 1253 ao_ref ref; |
1233 | 1254 edge e = NULL; |
1234 for (i = 0; VEC_iterate (tree, vuses, i, oldvuse); i++) | 1255 bool use_oracle; |
1235 { | 1256 |
1236 gimple phi = SSA_NAME_DEF_STMT (oldvuse); | 1257 *same_valid = true; |
1237 if (gimple_code (phi) == GIMPLE_PHI | 1258 |
1238 && gimple_bb (phi) == phiblock) | 1259 if (gimple_bb (phi) != phiblock) |
1260 return vuse; | |
1261 | |
1262 use_oracle = ao_ref_init_from_vn_reference (&ref, set, type, operands); | |
1263 | |
1264 /* Use the alias-oracle to find either the PHI node in this block, | |
1265 the first VUSE used in this block that is equivalent to vuse or | |
1266 the first VUSE which definition in this block kills the value. */ | |
1267 if (gimple_code (phi) == GIMPLE_PHI) | |
1268 e = find_edge (block, phiblock); | |
1269 else if (use_oracle) | |
1270 while (!stmt_may_clobber_ref_p_1 (phi, &ref)) | |
1271 { | |
1272 vuse = gimple_vuse (phi); | |
1273 phi = SSA_NAME_DEF_STMT (vuse); | |
1274 if (gimple_bb (phi) != phiblock) | |
1275 return vuse; | |
1276 if (gimple_code (phi) == GIMPLE_PHI) | |
1277 { | |
1278 e = find_edge (block, phiblock); | |
1279 break; | |
1280 } | |
1281 } | |
1282 else | |
1283 return NULL_TREE; | |
1284 | |
1285 if (e) | |
1286 { | |
1287 if (use_oracle) | |
1239 { | 1288 { |
1240 edge e = find_edge (block, gimple_bb (phi)); | 1289 bitmap visited = NULL; |
1241 if (e) | 1290 /* Try to find a vuse that dominates this phi node by skipping |
1242 { | 1291 non-clobbering statements. */ |
1243 tree def = PHI_ARG_DEF (phi, e->dest_idx); | 1292 vuse = get_continuation_for_phi (phi, &ref, &visited); |
1244 if (def != oldvuse) | 1293 if (visited) |
1245 { | 1294 BITMAP_FREE (visited); |
1246 if (!result) | |
1247 result = VEC_copy (tree, gc, vuses); | |
1248 VEC_replace (tree, result, i, def); | |
1249 } | |
1250 } | |
1251 } | 1295 } |
1252 } | 1296 else |
1253 | 1297 vuse = NULL_TREE; |
1254 /* We avoid creating a new copy of the vuses unless something | 1298 if (!vuse) |
1255 actually changed, so result can be NULL. */ | 1299 { |
1256 if (result) | 1300 /* If we didn't find any, the value ID can't stay the same, |
1257 { | 1301 but return the translated vuse. */ |
1258 sort_vuses (result); | 1302 *same_valid = false; |
1259 return result; | 1303 vuse = PHI_ARG_DEF (phi, e->dest_idx); |
1260 } | 1304 } |
1261 return vuses; | 1305 /* ??? We would like to return vuse here as this is the canonical |
1262 | 1306 upmost vdef that this reference is associated with. But during |
1263 } | 1307 insertion of the references into the hash tables we only ever |
1264 | 1308 directly insert with their direct gimple_vuse, hence returning |
1265 /* Like find_leader, but checks for the value existing in SET1 *or* | 1309 something else would make us not find the other expression. */ |
1310 return PHI_ARG_DEF (phi, e->dest_idx); | |
1311 } | |
1312 | |
1313 return NULL_TREE; | |
1314 } | |
1315 | |
1316 /* Like bitmap_find_leader, but checks for the value existing in SET1 *or* | |
1266 SET2. This is used to avoid making a set consisting of the union | 1317 SET2. This is used to avoid making a set consisting of the union |
1267 of PA_IN and ANTIC_IN during insert. */ | 1318 of PA_IN and ANTIC_IN during insert. */ |
1268 | 1319 |
1269 static inline pre_expr | 1320 static inline pre_expr |
1270 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2) | 1321 find_leader_in_sets (unsigned int val, bitmap_set_t set1, bitmap_set_t set2) |
1287 case NAME: | 1338 case NAME: |
1288 return TREE_TYPE (PRE_EXPR_NAME (e)); | 1339 return TREE_TYPE (PRE_EXPR_NAME (e)); |
1289 case CONSTANT: | 1340 case CONSTANT: |
1290 return TREE_TYPE (PRE_EXPR_CONSTANT (e)); | 1341 return TREE_TYPE (PRE_EXPR_CONSTANT (e)); |
1291 case REFERENCE: | 1342 case REFERENCE: |
1292 { | 1343 return PRE_EXPR_REFERENCE (e)->type; |
1293 vn_reference_op_t vro; | |
1294 | |
1295 gcc_assert (PRE_EXPR_REFERENCE (e)->operands); | |
1296 vro = VEC_index (vn_reference_op_s, | |
1297 PRE_EXPR_REFERENCE (e)->operands, | |
1298 0); | |
1299 /* We don't store type along with COMPONENT_REF because it is | |
1300 always the same as FIELD_DECL's type. */ | |
1301 if (!vro->type) | |
1302 { | |
1303 gcc_assert (vro->opcode == COMPONENT_REF); | |
1304 return TREE_TYPE (vro->op0); | |
1305 } | |
1306 return vro->type; | |
1307 } | |
1308 | |
1309 case NARY: | 1344 case NARY: |
1310 return PRE_EXPR_NARY (e)->type; | 1345 return PRE_EXPR_NARY (e)->type; |
1311 } | 1346 } |
1312 gcc_unreachable(); | 1347 gcc_unreachable(); |
1313 } | 1348 } |
1517 | 1552 |
1518 case REFERENCE: | 1553 case REFERENCE: |
1519 { | 1554 { |
1520 vn_reference_t ref = PRE_EXPR_REFERENCE (expr); | 1555 vn_reference_t ref = PRE_EXPR_REFERENCE (expr); |
1521 VEC (vn_reference_op_s, heap) *operands = ref->operands; | 1556 VEC (vn_reference_op_s, heap) *operands = ref->operands; |
1522 VEC (tree, gc) *vuses = ref->vuses; | 1557 tree vuse = ref->vuse; |
1523 VEC (tree, gc) *newvuses = vuses; | 1558 tree newvuse = vuse; |
1524 VEC (vn_reference_op_s, heap) *newoperands = NULL; | 1559 VEC (vn_reference_op_s, heap) *newoperands = NULL; |
1525 bool changed = false; | 1560 bool changed = false, same_valid = true; |
1526 unsigned int i; | 1561 unsigned int i, j; |
1527 vn_reference_op_t operand; | 1562 vn_reference_op_t operand; |
1528 vn_reference_t newref; | 1563 vn_reference_t newref; |
1529 | 1564 |
1530 for (i = 0; VEC_iterate (vn_reference_op_s, operands, i, operand); i++) | 1565 for (i = 0, j = 0; |
1566 VEC_iterate (vn_reference_op_s, operands, i, operand); i++, j++) | |
1531 { | 1567 { |
1532 pre_expr opresult; | 1568 pre_expr opresult; |
1533 pre_expr leader; | 1569 pre_expr leader; |
1534 tree oldop0 = operand->op0; | 1570 tree oldop0 = operand->op0; |
1535 tree oldop1 = operand->op1; | 1571 tree oldop1 = operand->op1; |
1570 op1 = name; | 1606 op1 = name; |
1571 } | 1607 } |
1572 else if (!opresult) | 1608 else if (!opresult) |
1573 break; | 1609 break; |
1574 } | 1610 } |
1611 /* We can't possibly insert these. */ | |
1612 else if (op1 && !is_gimple_min_invariant (op1)) | |
1613 break; | |
1575 changed |= op1 != oldop1; | 1614 changed |= op1 != oldop1; |
1576 if (op2 && TREE_CODE (op2) == SSA_NAME) | 1615 if (op2 && TREE_CODE (op2) == SSA_NAME) |
1577 { | 1616 { |
1578 unsigned int op_val_id = VN_INFO (op2)->value_id; | 1617 unsigned int op_val_id = VN_INFO (op2)->value_id; |
1579 leader = find_leader_in_sets (op_val_id, set1, set2); | 1618 leader = find_leader_in_sets (op_val_id, set1, set2); |
1586 op2 = name; | 1625 op2 = name; |
1587 } | 1626 } |
1588 else if (!opresult) | 1627 else if (!opresult) |
1589 break; | 1628 break; |
1590 } | 1629 } |
1630 /* We can't possibly insert these. */ | |
1631 else if (op2 && !is_gimple_min_invariant (op2)) | |
1632 break; | |
1591 changed |= op2 != oldop2; | 1633 changed |= op2 != oldop2; |
1592 | 1634 |
1593 if (!newoperands) | 1635 if (!newoperands) |
1594 newoperands = VEC_copy (vn_reference_op_s, heap, operands); | 1636 newoperands = VEC_copy (vn_reference_op_s, heap, operands); |
1595 /* We may have changed from an SSA_NAME to a constant */ | 1637 /* We may have changed from an SSA_NAME to a constant */ |
1597 newop.opcode = TREE_CODE (op0); | 1639 newop.opcode = TREE_CODE (op0); |
1598 newop.type = type; | 1640 newop.type = type; |
1599 newop.op0 = op0; | 1641 newop.op0 = op0; |
1600 newop.op1 = op1; | 1642 newop.op1 = op1; |
1601 newop.op2 = op2; | 1643 newop.op2 = op2; |
1602 VEC_replace (vn_reference_op_s, newoperands, i, &newop); | 1644 VEC_replace (vn_reference_op_s, newoperands, j, &newop); |
1645 /* If it transforms from an SSA_NAME to an address, fold with | |
1646 a preceding indirect reference. */ | |
1647 if (j > 0 && op0 && TREE_CODE (op0) == ADDR_EXPR | |
1648 && VEC_index (vn_reference_op_s, | |
1649 newoperands, j - 1)->opcode == INDIRECT_REF) | |
1650 vn_reference_fold_indirect (&newoperands, &j); | |
1603 } | 1651 } |
1604 if (i != VEC_length (vn_reference_op_s, operands)) | 1652 if (i != VEC_length (vn_reference_op_s, operands)) |
1605 { | 1653 { |
1606 if (newoperands) | 1654 if (newoperands) |
1607 VEC_free (vn_reference_op_s, heap, newoperands); | 1655 VEC_free (vn_reference_op_s, heap, newoperands); |
1608 return NULL; | 1656 return NULL; |
1609 } | 1657 } |
1610 | 1658 |
1611 newvuses = translate_vuses_through_block (vuses, phiblock, pred); | 1659 if (vuse) |
1612 changed |= newvuses != vuses; | 1660 { |
1613 | 1661 newvuse = translate_vuse_through_block (newoperands, |
1614 if (changed) | 1662 ref->set, ref->type, |
1663 vuse, phiblock, pred, | |
1664 &same_valid); | |
1665 if (newvuse == NULL_TREE) | |
1666 { | |
1667 VEC_free (vn_reference_op_s, heap, newoperands); | |
1668 return NULL; | |
1669 } | |
1670 } | |
1671 | |
1672 if (changed || newvuse != vuse) | |
1615 { | 1673 { |
1616 unsigned int new_val_id; | 1674 unsigned int new_val_id; |
1617 pre_expr constant; | 1675 pre_expr constant; |
1618 | 1676 |
1619 tree result = vn_reference_lookup_pieces (newvuses, | 1677 tree result = vn_reference_lookup_pieces (newvuse, ref->set, |
1678 ref->type, | |
1620 newoperands, | 1679 newoperands, |
1621 &newref, true); | 1680 &newref, true); |
1622 if (newref) | 1681 if (newref) |
1623 VEC_free (vn_reference_op_s, heap, newoperands); | 1682 VEC_free (vn_reference_op_s, heap, newoperands); |
1624 | 1683 |
1642 new_val_id = newref->value_id; | 1701 new_val_id = newref->value_id; |
1643 get_or_alloc_expression_id (expr); | 1702 get_or_alloc_expression_id (expr); |
1644 } | 1703 } |
1645 else | 1704 else |
1646 { | 1705 { |
1647 new_val_id = get_next_value_id (); | 1706 if (changed || !same_valid) |
1648 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions, | 1707 { |
1649 get_max_value_id() + 1); | 1708 new_val_id = get_next_value_id (); |
1650 newref = vn_reference_insert_pieces (newvuses, | 1709 VEC_safe_grow_cleared (bitmap_set_t, heap, |
1710 value_expressions, | |
1711 get_max_value_id() + 1); | |
1712 } | |
1713 else | |
1714 new_val_id = ref->value_id; | |
1715 newref = vn_reference_insert_pieces (newvuse, ref->set, | |
1716 ref->type, | |
1651 newoperands, | 1717 newoperands, |
1652 result, new_val_id); | 1718 result, new_val_id); |
1653 newoperands = NULL; | 1719 newoperands = NULL; |
1654 PRE_EXPR_REFERENCE (expr) = newref; | 1720 PRE_EXPR_REFERENCE (expr) = newref; |
1655 constant = fully_constant_expression (expr); | 1721 constant = fully_constant_expression (expr); |
1716 { | 1782 { |
1717 VEC (pre_expr, heap) *exprs; | 1783 VEC (pre_expr, heap) *exprs; |
1718 pre_expr expr; | 1784 pre_expr expr; |
1719 int i; | 1785 int i; |
1720 | 1786 |
1721 if (gimple_seq_empty_p (phi_nodes (phiblock))) | 1787 if (!phi_nodes (phiblock)) |
1722 { | 1788 { |
1723 bitmap_set_copy (dest, set); | 1789 bitmap_set_copy (dest, set); |
1724 return; | 1790 return; |
1725 } | 1791 } |
1726 | 1792 |
1806 ANTIC_IN set already. */ | 1872 ANTIC_IN set already. */ |
1807 | 1873 |
1808 static bool | 1874 static bool |
1809 value_dies_in_block_x (pre_expr expr, basic_block block) | 1875 value_dies_in_block_x (pre_expr expr, basic_block block) |
1810 { | 1876 { |
1811 int i; | 1877 tree vuse = PRE_EXPR_REFERENCE (expr)->vuse; |
1812 tree vuse; | 1878 vn_reference_t refx = PRE_EXPR_REFERENCE (expr); |
1813 VEC (tree, gc) *vuses = PRE_EXPR_REFERENCE (expr)->vuses; | 1879 gimple def; |
1814 | 1880 gimple_stmt_iterator gsi; |
1815 /* Conservatively, a value dies if it's vuses are defined in this | 1881 unsigned id = get_expression_id (expr); |
1816 block, unless they come from phi nodes (which are merge operations, | 1882 bool res = false; |
1817 rather than stores. */ | 1883 ao_ref ref; |
1818 for (i = 0; VEC_iterate (tree, vuses, i, vuse); i++) | 1884 |
1819 { | 1885 if (!vuse) |
1820 gimple def = SSA_NAME_DEF_STMT (vuse); | 1886 return false; |
1821 | 1887 |
1822 if (gimple_bb (def) != block) | 1888 /* Lookup a previously calculated result. */ |
1889 if (EXPR_DIES (block) | |
1890 && bitmap_bit_p (EXPR_DIES (block), id * 2)) | |
1891 return bitmap_bit_p (EXPR_DIES (block), id * 2 + 1); | |
1892 | |
1893 /* A memory expression {e, VUSE} dies in the block if there is a | |
1894 statement that may clobber e. If, starting statement walk from the | |
1895 top of the basic block, a statement uses VUSE there can be no kill | |
1896 inbetween that use and the original statement that loaded {e, VUSE}, | |
1897 so we can stop walking. */ | |
1898 ref.base = NULL_TREE; | |
1899 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi)) | |
1900 { | |
1901 tree def_vuse, def_vdef; | |
1902 def = gsi_stmt (gsi); | |
1903 def_vuse = gimple_vuse (def); | |
1904 def_vdef = gimple_vdef (def); | |
1905 | |
1906 /* Not a memory statement. */ | |
1907 if (!def_vuse) | |
1823 continue; | 1908 continue; |
1824 if (gimple_code (def) == GIMPLE_PHI) | 1909 |
1825 continue; | 1910 /* Not a may-def. */ |
1826 return true; | 1911 if (!def_vdef) |
1827 } | 1912 { |
1828 return false; | 1913 /* A load with the same VUSE, we're done. */ |
1914 if (def_vuse == vuse) | |
1915 break; | |
1916 | |
1917 continue; | |
1918 } | |
1919 | |
1920 /* Init ref only if we really need it. */ | |
1921 if (ref.base == NULL_TREE | |
1922 && !ao_ref_init_from_vn_reference (&ref, refx->set, refx->type, | |
1923 refx->operands)) | |
1924 { | |
1925 res = true; | |
1926 break; | |
1927 } | |
1928 /* If the statement may clobber expr, it dies. */ | |
1929 if (stmt_may_clobber_ref_p_1 (def, &ref)) | |
1930 { | |
1931 res = true; | |
1932 break; | |
1933 } | |
1934 } | |
1935 | |
1936 /* Remember the result. */ | |
1937 if (!EXPR_DIES (block)) | |
1938 EXPR_DIES (block) = BITMAP_ALLOC (&grand_bitmap_obstack); | |
1939 bitmap_set_bit (EXPR_DIES (block), id * 2); | |
1940 if (res) | |
1941 bitmap_set_bit (EXPR_DIES (block), id * 2 + 1); | |
1942 | |
1943 return res; | |
1829 } | 1944 } |
1830 | 1945 |
1831 | 1946 |
1832 #define union_contains_value(SET1, SET2, VAL) \ | 1947 #define union_contains_value(SET1, SET2, VAL) \ |
1833 (bitmap_set_contains_value ((SET1), (VAL)) \ | 1948 (bitmap_set_contains_value ((SET1), (VAL)) \ |
1885 | 2000 |
1886 /* Determine if the expression EXPR is valid in SET1 U SET2. | 2001 /* Determine if the expression EXPR is valid in SET1 U SET2. |
1887 ONLY SET2 CAN BE NULL. | 2002 ONLY SET2 CAN BE NULL. |
1888 This means that we have a leader for each part of the expression | 2003 This means that we have a leader for each part of the expression |
1889 (if it consists of values), or the expression is an SSA_NAME. | 2004 (if it consists of values), or the expression is an SSA_NAME. |
1890 For loads/calls, we also see if the vuses are killed in this block. | 2005 For loads/calls, we also see if the vuse is killed in this block. */ |
1891 */ | |
1892 | 2006 |
1893 static bool | 2007 static bool |
1894 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr, | 2008 valid_in_sets (bitmap_set_t set1, bitmap_set_t set2, pre_expr expr, |
1895 basic_block block) | 2009 basic_block block) |
1896 { | 2010 { |
1930 for (i = 0; VEC_iterate (vn_reference_op_s, ref->operands, i, vro); i++) | 2044 for (i = 0; VEC_iterate (vn_reference_op_s, ref->operands, i, vro); i++) |
1931 { | 2045 { |
1932 if (!vro_valid_in_sets (set1, set2, vro)) | 2046 if (!vro_valid_in_sets (set1, set2, vro)) |
1933 return false; | 2047 return false; |
1934 } | 2048 } |
2049 if (ref->vuse) | |
2050 { | |
2051 gimple def_stmt = SSA_NAME_DEF_STMT (ref->vuse); | |
2052 if (!gimple_nop_p (def_stmt) | |
2053 && gimple_bb (def_stmt) != block | |
2054 && !dominated_by_p (CDI_DOMINATORS, | |
2055 block, gimple_bb (def_stmt))) | |
2056 return false; | |
2057 } | |
1935 return !value_dies_in_block_x (expr, block); | 2058 return !value_dies_in_block_x (expr, block); |
1936 } | 2059 } |
1937 default: | 2060 default: |
1938 gcc_unreachable (); | 2061 gcc_unreachable (); |
1939 } | 2062 } |
2098 changed = true; | 2221 changed = true; |
2099 VEC_free (basic_block, heap, worklist); | 2222 VEC_free (basic_block, heap, worklist); |
2100 goto maybe_dump_sets; | 2223 goto maybe_dump_sets; |
2101 } | 2224 } |
2102 | 2225 |
2103 if (!gimple_seq_empty_p (phi_nodes (first))) | 2226 if (phi_nodes (first)) |
2104 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first); | 2227 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first); |
2105 else | 2228 else |
2106 bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first)); | 2229 bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first)); |
2107 | 2230 |
2108 for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++) | 2231 for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++) |
2109 { | 2232 { |
2110 if (!gimple_seq_empty_p (phi_nodes (bprime))) | 2233 if (phi_nodes (bprime)) |
2111 { | 2234 { |
2112 bitmap_set_t tmp = bitmap_set_new (); | 2235 bitmap_set_t tmp = bitmap_set_new (); |
2113 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime); | 2236 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime); |
2114 bitmap_set_and (ANTIC_OUT, tmp); | 2237 bitmap_set_and (ANTIC_OUT, tmp); |
2115 bitmap_set_free (tmp); | 2238 bitmap_set_free (tmp); |
2255 bitmap_iterator bi; | 2378 bitmap_iterator bi; |
2256 | 2379 |
2257 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi) | 2380 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi) |
2258 bitmap_value_insert_into_set (PA_OUT, | 2381 bitmap_value_insert_into_set (PA_OUT, |
2259 expression_for_id (i)); | 2382 expression_for_id (i)); |
2260 if (!gimple_seq_empty_p (phi_nodes (bprime))) | 2383 if (phi_nodes (bprime)) |
2261 { | 2384 { |
2262 bitmap_set_t pa_in = bitmap_set_new (); | 2385 bitmap_set_t pa_in = bitmap_set_new (); |
2263 phi_translate_set (pa_in, PA_IN (bprime), block, bprime); | 2386 phi_translate_set (pa_in, PA_IN (bprime), block, bprime); |
2264 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi) | 2387 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi) |
2265 bitmap_value_insert_into_set (PA_OUT, | 2388 bitmap_value_insert_into_set (PA_OUT, |
2427 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)) | 2550 if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)) |
2428 return true; | 2551 return true; |
2429 return false; | 2552 return false; |
2430 } | 2553 } |
2431 | 2554 |
2432 /* Return true if OP is an exception handler related operation, such as | 2555 /* Return true if OP is a tree which we can perform PRE on. |
2433 FILTER_EXPR or EXC_PTR_EXPR. */ | 2556 This may not match the operations we can value number, but in |
2434 | |
2435 static bool | |
2436 is_exception_related (gimple stmt) | |
2437 { | |
2438 return (is_gimple_assign (stmt) | |
2439 && (gimple_assign_rhs_code (stmt) == FILTER_EXPR | |
2440 || gimple_assign_rhs_code (stmt) == EXC_PTR_EXPR)); | |
2441 } | |
2442 | |
2443 /* Return true if OP is a tree which we can perform PRE on | |
2444 on. This may not match the operations we can value number, but in | |
2445 a perfect world would. */ | 2557 a perfect world would. */ |
2446 | 2558 |
2447 static bool | 2559 static bool |
2448 can_PRE_operation (tree op) | 2560 can_PRE_operation (tree op) |
2449 { | 2561 { |
2460 | 2572 |
2461 /* Inserted expressions are placed onto this worklist, which is used | 2573 /* Inserted expressions are placed onto this worklist, which is used |
2462 for performing quick dead code elimination of insertions we made | 2574 for performing quick dead code elimination of insertions we made |
2463 that didn't turn out to be necessary. */ | 2575 that didn't turn out to be necessary. */ |
2464 static VEC(gimple,heap) *inserted_exprs; | 2576 static VEC(gimple,heap) *inserted_exprs; |
2577 static bitmap inserted_phi_names; | |
2465 | 2578 |
2466 /* Pool allocated fake store expressions are placed onto this | 2579 /* Pool allocated fake store expressions are placed onto this |
2467 worklist, which, after performing dead code elimination, is walked | 2580 worklist, which, after performing dead code elimination, is walked |
2468 to see which expressions need to be put into GC'able memory */ | 2581 to see which expressions need to be put into GC'able memory */ |
2469 static VEC(gimple, heap) *need_creation; | 2582 static VEC(gimple, heap) *need_creation; |
2507 if (!sc) | 2620 if (!sc) |
2508 return NULL_TREE; | 2621 return NULL_TREE; |
2509 CALL_EXPR_STATIC_CHAIN (folded) = sc; | 2622 CALL_EXPR_STATIC_CHAIN (folded) = sc; |
2510 } | 2623 } |
2511 return folded; | 2624 return folded; |
2625 } | |
2626 break; | |
2627 case TARGET_MEM_REF: | |
2628 { | |
2629 vn_reference_op_t nextop = VEC_index (vn_reference_op_s, ref->operands, | |
2630 *operand); | |
2631 pre_expr op0expr; | |
2632 tree genop0 = NULL_TREE; | |
2633 tree baseop = create_component_ref_by_pieces_1 (block, ref, operand, | |
2634 stmts, domstmt); | |
2635 if (!baseop) | |
2636 return NULL_TREE; | |
2637 if (currop->op0) | |
2638 { | |
2639 op0expr = get_or_alloc_expr_for (currop->op0); | |
2640 genop0 = find_or_generate_expression (block, op0expr, | |
2641 stmts, domstmt); | |
2642 if (!genop0) | |
2643 return NULL_TREE; | |
2644 } | |
2645 if (DECL_P (baseop)) | |
2646 return build6 (TARGET_MEM_REF, currop->type, | |
2647 baseop, NULL_TREE, | |
2648 genop0, currop->op1, currop->op2, | |
2649 unshare_expr (nextop->op1)); | |
2650 else | |
2651 return build6 (TARGET_MEM_REF, currop->type, | |
2652 NULL_TREE, baseop, | |
2653 genop0, currop->op1, currop->op2, | |
2654 unshare_expr (nextop->op1)); | |
2512 } | 2655 } |
2513 break; | 2656 break; |
2514 case ADDR_EXPR: | 2657 case ADDR_EXPR: |
2515 if (currop->op0) | 2658 if (currop->op0) |
2516 { | 2659 { |
2587 tree genop0; | 2730 tree genop0; |
2588 tree genop1 = currop->op0; | 2731 tree genop1 = currop->op0; |
2589 pre_expr op1expr; | 2732 pre_expr op1expr; |
2590 tree genop2 = currop->op1; | 2733 tree genop2 = currop->op1; |
2591 pre_expr op2expr; | 2734 pre_expr op2expr; |
2592 tree genop3; | 2735 tree genop3 = currop->op2; |
2736 pre_expr op3expr; | |
2593 genop0 = create_component_ref_by_pieces_1 (block, ref, operand, | 2737 genop0 = create_component_ref_by_pieces_1 (block, ref, operand, |
2594 stmts, domstmt); | 2738 stmts, domstmt); |
2595 if (!genop0) | 2739 if (!genop0) |
2596 return NULL_TREE; | 2740 return NULL_TREE; |
2597 op1expr = get_or_alloc_expr_for (genop1); | 2741 op1expr = get_or_alloc_expr_for (genop1); |
2604 genop2 = find_or_generate_expression (block, op2expr, stmts, | 2748 genop2 = find_or_generate_expression (block, op2expr, stmts, |
2605 domstmt); | 2749 domstmt); |
2606 if (!genop2) | 2750 if (!genop2) |
2607 return NULL_TREE; | 2751 return NULL_TREE; |
2608 } | 2752 } |
2609 | 2753 if (genop3) |
2610 genop3 = currop->op2; | 2754 { |
2755 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0)); | |
2756 genop3 = size_binop (EXACT_DIV_EXPR, genop3, | |
2757 size_int (TYPE_ALIGN_UNIT (elmt_type))); | |
2758 op3expr = get_or_alloc_expr_for (genop3); | |
2759 genop3 = find_or_generate_expression (block, op3expr, stmts, | |
2760 domstmt); | |
2761 if (!genop3) | |
2762 return NULL_TREE; | |
2763 } | |
2611 return build4 (currop->opcode, currop->type, genop0, genop1, | 2764 return build4 (currop->opcode, currop->type, genop0, genop1, |
2612 genop2, genop3); | 2765 genop2, genop3); |
2613 } | 2766 } |
2614 case COMPONENT_REF: | 2767 case COMPONENT_REF: |
2615 { | 2768 { |
2764 static tree | 2917 static tree |
2765 create_expression_by_pieces (basic_block block, pre_expr expr, | 2918 create_expression_by_pieces (basic_block block, pre_expr expr, |
2766 gimple_seq *stmts, gimple domstmt, tree type) | 2919 gimple_seq *stmts, gimple domstmt, tree type) |
2767 { | 2920 { |
2768 tree temp, name; | 2921 tree temp, name; |
2769 tree folded, newexpr; | 2922 tree folded; |
2770 gimple_seq forced_stmts; | 2923 gimple_seq forced_stmts = NULL; |
2771 unsigned int value_id; | 2924 unsigned int value_id; |
2772 gimple_stmt_iterator gsi; | 2925 gimple_stmt_iterator gsi; |
2773 tree exprtype = type ? type : get_expr_type (expr); | 2926 tree exprtype = type ? type : get_expr_type (expr); |
2774 pre_expr nameexpr; | 2927 pre_expr nameexpr; |
2775 gimple newstmt; | 2928 gimple newstmt; |
2811 may be a constant with the wrong type. */ | 2964 may be a constant with the wrong type. */ |
2812 if (nary->opcode == POINTER_PLUS_EXPR) | 2965 if (nary->opcode == POINTER_PLUS_EXPR) |
2813 genop2 = fold_convert (sizetype, genop2); | 2966 genop2 = fold_convert (sizetype, genop2); |
2814 else | 2967 else |
2815 genop2 = fold_convert (TREE_TYPE (nary->op[1]), genop2); | 2968 genop2 = fold_convert (TREE_TYPE (nary->op[1]), genop2); |
2816 | 2969 |
2817 folded = fold_build2 (nary->opcode, nary->type, | 2970 folded = fold_build2 (nary->opcode, nary->type, |
2818 genop1, genop2); | 2971 genop1, genop2); |
2819 } | 2972 } |
2820 break; | 2973 break; |
2821 case 1: | 2974 case 1: |
2837 } | 2990 } |
2838 break; | 2991 break; |
2839 default: | 2992 default: |
2840 return NULL_TREE; | 2993 return NULL_TREE; |
2841 } | 2994 } |
2842 folded = fold_convert (exprtype, folded); | 2995 |
2996 if (!useless_type_conversion_p (exprtype, TREE_TYPE (folded))) | |
2997 folded = fold_convert (exprtype, folded); | |
2998 | |
2843 /* Force the generated expression to be a sequence of GIMPLE | 2999 /* Force the generated expression to be a sequence of GIMPLE |
2844 statements. | 3000 statements. |
2845 We have to call unshare_expr because force_gimple_operand may | 3001 We have to call unshare_expr because force_gimple_operand may |
2846 modify the tree we pass to it. */ | 3002 modify the tree we pass to it. */ |
2847 newexpr = force_gimple_operand (unshare_expr (folded), &forced_stmts, | 3003 folded = force_gimple_operand (unshare_expr (folded), &forced_stmts, |
2848 false, NULL); | 3004 false, NULL); |
2849 | 3005 |
2850 /* If we have any intermediate expressions to the value sets, add them | 3006 /* If we have any intermediate expressions to the value sets, add them |
2851 to the value sets and chain them in the instruction stream. */ | 3007 to the value sets and chain them in the instruction stream. */ |
2852 if (forced_stmts) | 3008 if (forced_stmts) |
2853 { | 3009 { |
2887 | 3043 |
2888 if (TREE_CODE (exprtype) == COMPLEX_TYPE | 3044 if (TREE_CODE (exprtype) == COMPLEX_TYPE |
2889 || TREE_CODE (exprtype) == VECTOR_TYPE) | 3045 || TREE_CODE (exprtype) == VECTOR_TYPE) |
2890 DECL_GIMPLE_REG_P (temp) = 1; | 3046 DECL_GIMPLE_REG_P (temp) = 1; |
2891 | 3047 |
2892 newstmt = gimple_build_assign (temp, newexpr); | 3048 newstmt = gimple_build_assign (temp, folded); |
2893 name = make_ssa_name (temp, newstmt); | 3049 name = make_ssa_name (temp, newstmt); |
2894 gimple_assign_set_lhs (newstmt, name); | 3050 gimple_assign_set_lhs (newstmt, name); |
2895 gimple_set_plf (newstmt, NECESSARY, false); | 3051 gimple_set_plf (newstmt, NECESSARY, false); |
2896 | 3052 |
2897 gimple_seq_add_stmt (stmts, newstmt); | 3053 gimple_seq_add_stmt (stmts, newstmt); |
2924 | 3080 |
2925 return name; | 3081 return name; |
2926 } | 3082 } |
2927 | 3083 |
2928 | 3084 |
3085 /* Returns true if we want to inhibit the insertions of PHI nodes | |
3086 for the given EXPR for basic block BB (a member of a loop). | |
3087 We want to do this, when we fear that the induction variable we | |
3088 create might inhibit vectorization. */ | |
3089 | |
3090 static bool | |
3091 inhibit_phi_insertion (basic_block bb, pre_expr expr) | |
3092 { | |
3093 vn_reference_t vr = PRE_EXPR_REFERENCE (expr); | |
3094 VEC (vn_reference_op_s, heap) *ops = vr->operands; | |
3095 vn_reference_op_t op; | |
3096 unsigned i; | |
3097 | |
3098 /* If we aren't going to vectorize we don't inhibit anything. */ | |
3099 if (!flag_tree_vectorize) | |
3100 return false; | |
3101 | |
3102 /* Otherwise we inhibit the insertion when the address of the | |
3103 memory reference is a simple induction variable. In other | |
3104 cases the vectorizer won't do anything anyway (either it's | |
3105 loop invariant or a complicated expression). */ | |
3106 for (i = 0; VEC_iterate (vn_reference_op_s, ops, i, op); ++i) | |
3107 { | |
3108 switch (op->opcode) | |
3109 { | |
3110 case ARRAY_REF: | |
3111 case ARRAY_RANGE_REF: | |
3112 if (TREE_CODE (op->op0) != SSA_NAME) | |
3113 break; | |
3114 /* Fallthru. */ | |
3115 case SSA_NAME: | |
3116 { | |
3117 basic_block defbb = gimple_bb (SSA_NAME_DEF_STMT (op->op0)); | |
3118 affine_iv iv; | |
3119 /* Default defs are loop invariant. */ | |
3120 if (!defbb) | |
3121 break; | |
3122 /* Defined outside this loop, also loop invariant. */ | |
3123 if (!flow_bb_inside_loop_p (bb->loop_father, defbb)) | |
3124 break; | |
3125 /* If it's a simple induction variable inhibit insertion, | |
3126 the vectorizer might be interested in this one. */ | |
3127 if (simple_iv (bb->loop_father, bb->loop_father, | |
3128 op->op0, &iv, true)) | |
3129 return true; | |
3130 /* No simple IV, vectorizer can't do anything, hence no | |
3131 reason to inhibit the transformation for this operand. */ | |
3132 break; | |
3133 } | |
3134 default: | |
3135 break; | |
3136 } | |
3137 } | |
3138 return false; | |
3139 } | |
3140 | |
2929 /* Insert the to-be-made-available values of expression EXPRNUM for each | 3141 /* Insert the to-be-made-available values of expression EXPRNUM for each |
2930 predecessor, stored in AVAIL, into the predecessors of BLOCK, and | 3142 predecessor, stored in AVAIL, into the predecessors of BLOCK, and |
2931 merge the result with a phi node, given the same value number as | 3143 merge the result with a phi node, given the same value number as |
2932 NODE. Return true if we have inserted new stuff. */ | 3144 NODE. Return true if we have inserted new stuff. */ |
2933 | 3145 |
2954 print_pre_expr (dump_file, expr); | 3166 print_pre_expr (dump_file, expr); |
2955 fprintf (dump_file, " (%04d)\n", val); | 3167 fprintf (dump_file, " (%04d)\n", val); |
2956 } | 3168 } |
2957 | 3169 |
2958 /* Make sure we aren't creating an induction variable. */ | 3170 /* Make sure we aren't creating an induction variable. */ |
2959 if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2 | 3171 if (block->loop_depth > 0 && EDGE_COUNT (block->preds) == 2) |
2960 && expr->kind != REFERENCE) | |
2961 { | 3172 { |
2962 bool firstinsideloop = false; | 3173 bool firstinsideloop = false; |
2963 bool secondinsideloop = false; | 3174 bool secondinsideloop = false; |
2964 firstinsideloop = flow_bb_inside_loop_p (block->loop_father, | 3175 firstinsideloop = flow_bb_inside_loop_p (block->loop_father, |
2965 EDGE_PRED (block, 0)->src); | 3176 EDGE_PRED (block, 0)->src); |
2966 secondinsideloop = flow_bb_inside_loop_p (block->loop_father, | 3177 secondinsideloop = flow_bb_inside_loop_p (block->loop_father, |
2967 EDGE_PRED (block, 1)->src); | 3178 EDGE_PRED (block, 1)->src); |
2968 /* Induction variables only have one edge inside the loop. */ | 3179 /* Induction variables only have one edge inside the loop. */ |
2969 if (firstinsideloop ^ secondinsideloop) | 3180 if ((firstinsideloop ^ secondinsideloop) |
3181 && (expr->kind != REFERENCE | |
3182 || inhibit_phi_insertion (block, expr))) | |
2970 { | 3183 { |
2971 if (dump_file && (dump_flags & TDF_DETAILS)) | 3184 if (dump_file && (dump_flags & TDF_DETAILS)) |
2972 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n"); | 3185 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n"); |
2973 nophi = true; | 3186 nophi = true; |
2974 } | 3187 } |
3010 */ | 3223 */ |
3011 tree constant = PRE_EXPR_CONSTANT (eprime); | 3224 tree constant = PRE_EXPR_CONSTANT (eprime); |
3012 if (!useless_type_conversion_p (type, TREE_TYPE (constant))) | 3225 if (!useless_type_conversion_p (type, TREE_TYPE (constant))) |
3013 { | 3226 { |
3014 tree builtexpr = fold_convert (type, constant); | 3227 tree builtexpr = fold_convert (type, constant); |
3015 if (!is_gimple_min_invariant (builtexpr)) | 3228 if (!is_gimple_min_invariant (builtexpr)) |
3016 { | 3229 { |
3017 tree forcedexpr = force_gimple_operand (builtexpr, | 3230 tree forcedexpr = force_gimple_operand (builtexpr, |
3018 &stmts, true, | 3231 &stmts, true, |
3019 NULL); | 3232 NULL); |
3020 if (!is_gimple_min_invariant (forcedexpr)) | 3233 if (!is_gimple_min_invariant (forcedexpr)) |
3105 | 3318 |
3106 gimple_set_plf (phi, NECESSARY, false); | 3319 gimple_set_plf (phi, NECESSARY, false); |
3107 VN_INFO_GET (gimple_phi_result (phi))->valnum = gimple_phi_result (phi); | 3320 VN_INFO_GET (gimple_phi_result (phi))->valnum = gimple_phi_result (phi); |
3108 VN_INFO (gimple_phi_result (phi))->value_id = val; | 3321 VN_INFO (gimple_phi_result (phi))->value_id = val; |
3109 VEC_safe_push (gimple, heap, inserted_exprs, phi); | 3322 VEC_safe_push (gimple, heap, inserted_exprs, phi); |
3323 bitmap_set_bit (inserted_phi_names, | |
3324 SSA_NAME_VERSION (gimple_phi_result (phi))); | |
3110 FOR_EACH_EDGE (pred, ei, block->preds) | 3325 FOR_EACH_EDGE (pred, ei, block->preds) |
3111 { | 3326 { |
3112 pre_expr ae = avail[pred->src->index]; | 3327 pre_expr ae = avail[pred->src->index]; |
3113 gcc_assert (get_expr_type (ae) == type | 3328 gcc_assert (get_expr_type (ae) == type |
3114 || useless_type_conversion_p (type, get_expr_type (ae))); | 3329 || useless_type_conversion_p (type, get_expr_type (ae))); |
3115 if (ae->kind == CONSTANT) | 3330 if (ae->kind == CONSTANT) |
3116 add_phi_arg (phi, PRE_EXPR_CONSTANT (ae), pred); | 3331 add_phi_arg (phi, PRE_EXPR_CONSTANT (ae), pred, UNKNOWN_LOCATION); |
3117 else | 3332 else |
3118 add_phi_arg (phi, PRE_EXPR_NAME (avail[pred->src->index]), pred); | 3333 add_phi_arg (phi, PRE_EXPR_NAME (avail[pred->src->index]), pred, |
3334 UNKNOWN_LOCATION); | |
3119 } | 3335 } |
3120 | 3336 |
3121 newphi = get_or_alloc_expr_for_name (gimple_phi_result (phi)); | 3337 newphi = get_or_alloc_expr_for_name (gimple_phi_result (phi)); |
3122 add_to_value (val, newphi); | 3338 add_to_value (val, newphi); |
3123 | 3339 |
3192 edge pred; | 3408 edge pred; |
3193 basic_block bprime; | 3409 basic_block bprime; |
3194 pre_expr eprime = NULL; | 3410 pre_expr eprime = NULL; |
3195 edge_iterator ei; | 3411 edge_iterator ei; |
3196 pre_expr edoubleprime = NULL; | 3412 pre_expr edoubleprime = NULL; |
3413 bool do_insertion = false; | |
3197 | 3414 |
3198 val = get_expr_value_id (expr); | 3415 val = get_expr_value_id (expr); |
3199 if (bitmap_set_contains_value (PHI_GEN (block), val)) | 3416 if (bitmap_set_contains_value (PHI_GEN (block), val)) |
3200 continue; | 3417 continue; |
3201 if (bitmap_set_contains_value (AVAIL_OUT (dom), val)) | 3418 if (bitmap_set_contains_value (AVAIL_OUT (dom), val)) |
3243 } | 3460 } |
3244 else | 3461 else |
3245 { | 3462 { |
3246 avail[bprime->index] = edoubleprime; | 3463 avail[bprime->index] = edoubleprime; |
3247 by_some = true; | 3464 by_some = true; |
3465 /* We want to perform insertions to remove a redundancy on | |
3466 a path in the CFG we want to optimize for speed. */ | |
3467 if (optimize_edge_for_speed_p (pred)) | |
3468 do_insertion = true; | |
3248 if (first_s == NULL) | 3469 if (first_s == NULL) |
3249 first_s = edoubleprime; | 3470 first_s = edoubleprime; |
3250 else if (!pre_expr_eq (first_s, edoubleprime)) | 3471 else if (!pre_expr_eq (first_s, edoubleprime)) |
3251 all_same = false; | 3472 all_same = false; |
3252 } | 3473 } |
3253 } | 3474 } |
3254 /* If we can insert it, it's not the same value | 3475 /* If we can insert it, it's not the same value |
3255 already existing along every predecessor, and | 3476 already existing along every predecessor, and |
3256 it's defined by some predecessor, it is | 3477 it's defined by some predecessor, it is |
3257 partially redundant. */ | 3478 partially redundant. */ |
3258 if (!cant_insert && !all_same && by_some && dbg_cnt (treepre_insert)) | 3479 if (!cant_insert && !all_same && by_some && do_insertion |
3480 && dbg_cnt (treepre_insert)) | |
3259 { | 3481 { |
3260 if (insert_into_preds_of_block (block, get_expression_id (expr), | 3482 if (insert_into_preds_of_block (block, get_expression_id (expr), |
3261 avail)) | 3483 avail)) |
3262 new_stuff = true; | 3484 new_stuff = true; |
3263 } | 3485 } |
3528 { | 3750 { |
3529 | 3751 |
3530 basic_block block, son; | 3752 basic_block block, son; |
3531 basic_block *worklist; | 3753 basic_block *worklist; |
3532 size_t sp = 0; | 3754 size_t sp = 0; |
3533 tree param; | 3755 unsigned i; |
3534 | 3756 |
3535 /* For arguments with default definitions, we pretend they are | 3757 /* We pretend that default definitions are defined in the entry block. |
3536 defined in the entry block. */ | 3758 This includes function arguments and the static chain decl. */ |
3537 for (param = DECL_ARGUMENTS (current_function_decl); | 3759 for (i = 1; i < num_ssa_names; ++i) |
3538 param; | 3760 { |
3539 param = TREE_CHAIN (param)) | 3761 tree name = ssa_name (i); |
3540 { | 3762 pre_expr e; |
3541 if (gimple_default_def (cfun, param) != NULL) | 3763 if (!name |
3542 { | 3764 || !SSA_NAME_IS_DEFAULT_DEF (name) |
3543 tree def = gimple_default_def (cfun, param); | 3765 || has_zero_uses (name) |
3544 pre_expr e = get_or_alloc_expr_for_name (def); | 3766 || !is_gimple_reg (name)) |
3545 | 3767 continue; |
3546 add_to_value (get_expr_value_id (e), e); | 3768 |
3547 if (!in_fre) | 3769 e = get_or_alloc_expr_for_name (name); |
3548 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e); | 3770 add_to_value (get_expr_value_id (e), e); |
3549 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e); | 3771 if (!in_fre) |
3550 } | 3772 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e); |
3551 } | 3773 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e); |
3552 | |
3553 /* Likewise for the static chain decl. */ | |
3554 if (cfun->static_chain_decl) | |
3555 { | |
3556 param = cfun->static_chain_decl; | |
3557 if (gimple_default_def (cfun, param) != NULL) | |
3558 { | |
3559 tree def = gimple_default_def (cfun, param); | |
3560 pre_expr e = get_or_alloc_expr_for_name (def); | |
3561 | |
3562 add_to_value (get_expr_value_id (e), e); | |
3563 if (!in_fre) | |
3564 bitmap_insert_into_set (TMP_GEN (ENTRY_BLOCK_PTR), e); | |
3565 bitmap_value_insert_into_set (AVAIL_OUT (ENTRY_BLOCK_PTR), e); | |
3566 } | |
3567 } | 3774 } |
3568 | 3775 |
3569 /* Allocate the worklist. */ | 3776 /* Allocate the worklist. */ |
3570 worklist = XNEWVEC (basic_block, n_basic_blocks); | 3777 worklist = XNEWVEC (basic_block, n_basic_blocks); |
3571 | 3778 |
3638 | 3845 |
3639 if (!can_value_number_call (stmt)) | 3846 if (!can_value_number_call (stmt)) |
3640 continue; | 3847 continue; |
3641 | 3848 |
3642 copy_reference_ops_from_call (stmt, &ops); | 3849 copy_reference_ops_from_call (stmt, &ops); |
3643 vn_reference_lookup_pieces (shared_vuses_from_stmt (stmt), | 3850 vn_reference_lookup_pieces (gimple_vuse (stmt), 0, |
3851 gimple_expr_type (stmt), | |
3644 ops, &ref, false); | 3852 ops, &ref, false); |
3645 VEC_free (vn_reference_op_s, heap, ops); | 3853 VEC_free (vn_reference_op_s, heap, ops); |
3646 if (!ref) | 3854 if (!ref) |
3647 continue; | 3855 continue; |
3648 | 3856 |
3673 { | 3881 { |
3674 pre_expr result = NULL; | 3882 pre_expr result = NULL; |
3675 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))) | 3883 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))) |
3676 { | 3884 { |
3677 case tcc_unary: | 3885 case tcc_unary: |
3678 if (is_exception_related (stmt)) | |
3679 continue; | |
3680 case tcc_binary: | 3886 case tcc_binary: |
3681 case tcc_comparison: | 3887 case tcc_comparison: |
3682 { | 3888 { |
3683 vn_nary_op_t nary; | 3889 vn_nary_op_t nary; |
3684 unsigned int i; | 3890 unsigned int i; |
3710 vn_reference_t ref; | 3916 vn_reference_t ref; |
3711 unsigned int i; | 3917 unsigned int i; |
3712 vn_reference_op_t vro; | 3918 vn_reference_op_t vro; |
3713 | 3919 |
3714 vn_reference_lookup (gimple_assign_rhs1 (stmt), | 3920 vn_reference_lookup (gimple_assign_rhs1 (stmt), |
3715 shared_vuses_from_stmt (stmt), | 3921 gimple_vuse (stmt), |
3716 false, &ref); | 3922 true, &ref); |
3717 if (!ref) | 3923 if (!ref) |
3718 continue; | 3924 continue; |
3719 | 3925 |
3720 for (i = 0; VEC_iterate (vn_reference_op_s, | 3926 for (i = 0; VEC_iterate (vn_reference_op_s, |
3721 ref->operands, i, | 3927 ref->operands, i, |
3801 /* Eliminate fully redundant computations. */ | 4007 /* Eliminate fully redundant computations. */ |
3802 | 4008 |
3803 static unsigned int | 4009 static unsigned int |
3804 eliminate (void) | 4010 eliminate (void) |
3805 { | 4011 { |
4012 VEC (gimple, heap) *to_remove = NULL; | |
3806 basic_block b; | 4013 basic_block b; |
3807 unsigned int todo = 0; | 4014 unsigned int todo = 0; |
4015 gimple_stmt_iterator gsi; | |
4016 gimple stmt; | |
4017 unsigned i; | |
3808 | 4018 |
3809 FOR_EACH_BB (b) | 4019 FOR_EACH_BB (b) |
3810 { | 4020 { |
3811 gimple_stmt_iterator i; | 4021 for (gsi = gsi_start_bb (b); !gsi_end_p (gsi); gsi_next (&gsi)) |
3812 | |
3813 for (i = gsi_start_bb (b); !gsi_end_p (i); gsi_next (&i)) | |
3814 { | 4022 { |
3815 gimple stmt = gsi_stmt (i); | 4023 stmt = gsi_stmt (gsi); |
3816 | 4024 |
3817 /* Lookup the RHS of the expression, see if we have an | 4025 /* Lookup the RHS of the expression, see if we have an |
3818 available computation for it. If so, replace the RHS with | 4026 available computation for it. If so, replace the RHS with |
3819 the available computation. */ | 4027 the available computation. */ |
3820 if (gimple_has_lhs (stmt) | 4028 if (gimple_has_lhs (stmt) |
3850 | 4058 |
3851 /* If there is no existing leader but SCCVN knows this | 4059 /* If there is no existing leader but SCCVN knows this |
3852 value is constant, use that constant. */ | 4060 value is constant, use that constant. */ |
3853 if (!sprime && is_gimple_min_invariant (VN_INFO (lhs)->valnum)) | 4061 if (!sprime && is_gimple_min_invariant (VN_INFO (lhs)->valnum)) |
3854 { | 4062 { |
3855 sprime = fold_convert (TREE_TYPE (lhs), | 4063 sprime = VN_INFO (lhs)->valnum; |
3856 VN_INFO (lhs)->valnum); | 4064 if (!useless_type_conversion_p (TREE_TYPE (lhs), |
4065 TREE_TYPE (sprime))) | |
4066 sprime = fold_convert (TREE_TYPE (lhs), sprime); | |
3857 | 4067 |
3858 if (dump_file && (dump_flags & TDF_DETAILS)) | 4068 if (dump_file && (dump_flags & TDF_DETAILS)) |
3859 { | 4069 { |
3860 fprintf (dump_file, "Replaced "); | 4070 fprintf (dump_file, "Replaced "); |
3861 print_gimple_expr (dump_file, stmt, 0, 0); | 4071 print_gimple_expr (dump_file, stmt, 0, 0); |
3863 print_generic_expr (dump_file, sprime, 0); | 4073 print_generic_expr (dump_file, sprime, 0); |
3864 fprintf (dump_file, " in "); | 4074 fprintf (dump_file, " in "); |
3865 print_gimple_stmt (dump_file, stmt, 0, 0); | 4075 print_gimple_stmt (dump_file, stmt, 0, 0); |
3866 } | 4076 } |
3867 pre_stats.eliminations++; | 4077 pre_stats.eliminations++; |
3868 propagate_tree_value_into_stmt (&i, sprime); | 4078 propagate_tree_value_into_stmt (&gsi, sprime); |
3869 stmt = gsi_stmt (i); | 4079 stmt = gsi_stmt (gsi); |
3870 update_stmt (stmt); | 4080 update_stmt (stmt); |
3871 continue; | 4081 continue; |
3872 } | 4082 } |
3873 | 4083 |
3874 /* If there is no existing usable leader but SCCVN thinks | 4084 /* If there is no existing usable leader but SCCVN thinks |
3911 && !useless_type_conversion_p (gimple_expr_type (stmt), | 4121 && !useless_type_conversion_p (gimple_expr_type (stmt), |
3912 TREE_TYPE (sprime))) | 4122 TREE_TYPE (sprime))) |
3913 sprime = fold_convert (gimple_expr_type (stmt), sprime); | 4123 sprime = fold_convert (gimple_expr_type (stmt), sprime); |
3914 | 4124 |
3915 pre_stats.eliminations++; | 4125 pre_stats.eliminations++; |
3916 propagate_tree_value_into_stmt (&i, sprime); | 4126 propagate_tree_value_into_stmt (&gsi, sprime); |
3917 stmt = gsi_stmt (i); | 4127 stmt = gsi_stmt (gsi); |
3918 update_stmt (stmt); | 4128 update_stmt (stmt); |
3919 | 4129 |
3920 /* If we removed EH side effects from the statement, clean | 4130 /* If we removed EH side effects from the statement, clean |
3921 its EH information. */ | 4131 its EH information. */ |
3922 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)) | 4132 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)) |
3924 bitmap_set_bit (need_eh_cleanup, | 4134 bitmap_set_bit (need_eh_cleanup, |
3925 gimple_bb (stmt)->index); | 4135 gimple_bb (stmt)->index); |
3926 if (dump_file && (dump_flags & TDF_DETAILS)) | 4136 if (dump_file && (dump_flags & TDF_DETAILS)) |
3927 fprintf (dump_file, " Removed EH side effects.\n"); | 4137 fprintf (dump_file, " Removed EH side effects.\n"); |
3928 } | 4138 } |
4139 } | |
4140 } | |
4141 /* If the statement is a scalar store, see if the expression | |
4142 has the same value number as its rhs. If so, the store is | |
4143 dead. */ | |
4144 else if (gimple_assign_single_p (stmt) | |
4145 && !is_gimple_reg (gimple_assign_lhs (stmt)) | |
4146 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME | |
4147 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))) | |
4148 { | |
4149 tree rhs = gimple_assign_rhs1 (stmt); | |
4150 tree val; | |
4151 val = vn_reference_lookup (gimple_assign_lhs (stmt), | |
4152 gimple_vuse (stmt), true, NULL); | |
4153 if (TREE_CODE (rhs) == SSA_NAME) | |
4154 rhs = VN_INFO (rhs)->valnum; | |
4155 if (val | |
4156 && operand_equal_p (val, rhs, 0)) | |
4157 { | |
4158 if (dump_file && (dump_flags & TDF_DETAILS)) | |
4159 { | |
4160 fprintf (dump_file, "Deleted redundant store "); | |
4161 print_gimple_stmt (dump_file, stmt, 0, 0); | |
4162 } | |
4163 | |
4164 /* Queue stmt for removal. */ | |
4165 VEC_safe_push (gimple, heap, to_remove, stmt); | |
3929 } | 4166 } |
3930 } | 4167 } |
3931 /* Visit COND_EXPRs and fold the comparison with the | 4168 /* Visit COND_EXPRs and fold the comparison with the |
3932 available value-numbers. */ | 4169 available value-numbers. */ |
3933 else if (gimple_code (stmt) == GIMPLE_COND) | 4170 else if (gimple_code (stmt) == GIMPLE_COND) |
3950 gimple_cond_make_true (stmt); | 4187 gimple_cond_make_true (stmt); |
3951 update_stmt (stmt); | 4188 update_stmt (stmt); |
3952 todo = TODO_cleanup_cfg; | 4189 todo = TODO_cleanup_cfg; |
3953 } | 4190 } |
3954 } | 4191 } |
4192 /* Visit indirect calls and turn them into direct calls if | |
4193 possible. */ | |
4194 if (gimple_code (stmt) == GIMPLE_CALL | |
4195 && TREE_CODE (gimple_call_fn (stmt)) == SSA_NAME) | |
4196 { | |
4197 tree fn = VN_INFO (gimple_call_fn (stmt))->valnum; | |
4198 if (TREE_CODE (fn) == ADDR_EXPR | |
4199 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL) | |
4200 { | |
4201 if (dump_file && (dump_flags & TDF_DETAILS)) | |
4202 { | |
4203 fprintf (dump_file, "Replacing call target with "); | |
4204 print_generic_expr (dump_file, fn, 0); | |
4205 fprintf (dump_file, " in "); | |
4206 print_gimple_stmt (dump_file, stmt, 0, 0); | |
4207 } | |
4208 | |
4209 gimple_call_set_fn (stmt, fn); | |
4210 update_stmt (stmt); | |
4211 if (maybe_clean_or_replace_eh_stmt (stmt, stmt)) | |
4212 { | |
4213 bitmap_set_bit (need_eh_cleanup, | |
4214 gimple_bb (stmt)->index); | |
4215 if (dump_file && (dump_flags & TDF_DETAILS)) | |
4216 fprintf (dump_file, " Removed EH side effects.\n"); | |
4217 } | |
4218 | |
4219 /* Changing an indirect call to a direct call may | |
4220 have exposed different semantics. This may | |
4221 require an SSA update. */ | |
4222 todo |= TODO_update_ssa_only_virtuals; | |
4223 } | |
4224 } | |
3955 } | 4225 } |
3956 } | 4226 |
4227 for (gsi = gsi_start_phis (b); !gsi_end_p (gsi);) | |
4228 { | |
4229 gimple stmt, phi = gsi_stmt (gsi); | |
4230 tree sprime = NULL_TREE, res = PHI_RESULT (phi); | |
4231 pre_expr sprimeexpr, resexpr; | |
4232 gimple_stmt_iterator gsi2; | |
4233 | |
4234 /* We want to perform redundant PHI elimination. Do so by | |
4235 replacing the PHI with a single copy if possible. | |
4236 Do not touch inserted, single-argument or virtual PHIs. */ | |
4237 if (gimple_phi_num_args (phi) == 1 | |
4238 || !is_gimple_reg (res) | |
4239 || bitmap_bit_p (inserted_phi_names, SSA_NAME_VERSION (res))) | |
4240 { | |
4241 gsi_next (&gsi); | |
4242 continue; | |
4243 } | |
4244 | |
4245 resexpr = get_or_alloc_expr_for_name (res); | |
4246 sprimeexpr = bitmap_find_leader (AVAIL_OUT (b), | |
4247 get_expr_value_id (resexpr), NULL); | |
4248 if (sprimeexpr) | |
4249 { | |
4250 if (sprimeexpr->kind == CONSTANT) | |
4251 sprime = PRE_EXPR_CONSTANT (sprimeexpr); | |
4252 else if (sprimeexpr->kind == NAME) | |
4253 sprime = PRE_EXPR_NAME (sprimeexpr); | |
4254 else | |
4255 gcc_unreachable (); | |
4256 } | |
4257 if (!sprimeexpr | |
4258 || sprime == res) | |
4259 { | |
4260 gsi_next (&gsi); | |
4261 continue; | |
4262 } | |
4263 | |
4264 if (dump_file && (dump_flags & TDF_DETAILS)) | |
4265 { | |
4266 fprintf (dump_file, "Replaced redundant PHI node defining "); | |
4267 print_generic_expr (dump_file, res, 0); | |
4268 fprintf (dump_file, " with "); | |
4269 print_generic_expr (dump_file, sprime, 0); | |
4270 fprintf (dump_file, "\n"); | |
4271 } | |
4272 | |
4273 remove_phi_node (&gsi, false); | |
4274 | |
4275 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime))) | |
4276 sprime = fold_convert (TREE_TYPE (res), sprime); | |
4277 stmt = gimple_build_assign (res, sprime); | |
4278 SSA_NAME_DEF_STMT (res) = stmt; | |
4279 if (TREE_CODE (sprime) == SSA_NAME) | |
4280 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), | |
4281 NECESSARY, true); | |
4282 gsi2 = gsi_after_labels (b); | |
4283 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT); | |
4284 /* Queue the copy for eventual removal. */ | |
4285 VEC_safe_push (gimple, heap, to_remove, stmt); | |
4286 pre_stats.eliminations++; | |
4287 } | |
4288 } | |
4289 | |
4290 /* We cannot remove stmts during BB walk, especially not release SSA | |
4291 names there as this confuses the VN machinery. The stmts ending | |
4292 up in to_remove are either stores or simple copies. */ | |
4293 for (i = 0; VEC_iterate (gimple, to_remove, i, stmt); ++i) | |
4294 { | |
4295 tree lhs = gimple_assign_lhs (stmt); | |
4296 use_operand_p use_p; | |
4297 gimple use_stmt; | |
4298 | |
4299 /* If there is a single use only, propagate the equivalency | |
4300 instead of keeping the copy. */ | |
4301 if (TREE_CODE (lhs) == SSA_NAME | |
4302 && single_imm_use (lhs, &use_p, &use_stmt) | |
4303 && may_propagate_copy (USE_FROM_PTR (use_p), | |
4304 gimple_assign_rhs1 (stmt))) | |
4305 { | |
4306 SET_USE (use_p, gimple_assign_rhs1 (stmt)); | |
4307 update_stmt (use_stmt); | |
4308 } | |
4309 | |
4310 /* If this is a store or a now unused copy, remove it. */ | |
4311 if (TREE_CODE (lhs) != SSA_NAME | |
4312 || has_zero_uses (lhs)) | |
4313 { | |
4314 gsi = gsi_for_stmt (stmt); | |
4315 unlink_stmt_vdef (stmt); | |
4316 gsi_remove (&gsi, true); | |
4317 release_defs (stmt); | |
4318 } | |
4319 } | |
4320 VEC_free (gimple, heap, to_remove); | |
3957 | 4321 |
3958 return todo; | 4322 return todo; |
3959 } | 4323 } |
3960 | 4324 |
3961 /* Borrow a bit of tree-ssa-dce.c for the moment. | 4325 /* Borrow a bit of tree-ssa-dce.c for the moment. |
4064 | 4428 |
4065 gsi = gsi_for_stmt (t); | 4429 gsi = gsi_for_stmt (t); |
4066 if (gimple_code (t) == GIMPLE_PHI) | 4430 if (gimple_code (t) == GIMPLE_PHI) |
4067 remove_phi_node (&gsi, true); | 4431 remove_phi_node (&gsi, true); |
4068 else | 4432 else |
4069 gsi_remove (&gsi, true); | 4433 { |
4070 release_defs (t); | 4434 gsi_remove (&gsi, true); |
4435 release_defs (t); | |
4436 } | |
4071 } | 4437 } |
4072 } | 4438 } |
4073 VEC_free (gimple, heap, worklist); | 4439 VEC_free (gimple, heap, worklist); |
4074 } | 4440 } |
4075 | 4441 |
4107 | 4473 |
4108 calculate_dominance_info (CDI_POST_DOMINATORS); | 4474 calculate_dominance_info (CDI_POST_DOMINATORS); |
4109 calculate_dominance_info (CDI_DOMINATORS); | 4475 calculate_dominance_info (CDI_DOMINATORS); |
4110 | 4476 |
4111 bitmap_obstack_initialize (&grand_bitmap_obstack); | 4477 bitmap_obstack_initialize (&grand_bitmap_obstack); |
4478 inserted_phi_names = BITMAP_ALLOC (&grand_bitmap_obstack); | |
4112 phi_translate_table = htab_create (5110, expr_pred_trans_hash, | 4479 phi_translate_table = htab_create (5110, expr_pred_trans_hash, |
4113 expr_pred_trans_eq, free); | 4480 expr_pred_trans_eq, free); |
4114 expression_to_id = htab_create (num_ssa_names * 3, | 4481 expression_to_id = htab_create (num_ssa_names * 3, |
4115 pre_expr_hash, | 4482 pre_expr_hash, |
4116 pre_expr_eq, NULL); | 4483 pre_expr_eq, NULL); |
4169 | 4536 |
4170 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller | 4537 /* Main entry point to the SSA-PRE pass. DO_FRE is true if the caller |
4171 only wants to do full redundancy elimination. */ | 4538 only wants to do full redundancy elimination. */ |
4172 | 4539 |
4173 static unsigned int | 4540 static unsigned int |
4174 execute_pre (bool do_fre ATTRIBUTE_UNUSED) | 4541 execute_pre (bool do_fre) |
4175 { | 4542 { |
4176 unsigned int todo = 0; | 4543 unsigned int todo = 0; |
4177 | 4544 |
4178 do_partial_partial = optimize > 2; | 4545 do_partial_partial = optimize > 2 && optimize_function_for_speed_p (cfun); |
4179 | 4546 |
4180 /* This has to happen before SCCVN runs because | 4547 /* This has to happen before SCCVN runs because |
4181 loop_optimizer_init may create new phis, etc. */ | 4548 loop_optimizer_init may create new phis, etc. */ |
4182 if (!do_fre) | 4549 if (!do_fre) |
4183 loop_optimizer_init (LOOPS_NORMAL); | 4550 loop_optimizer_init (LOOPS_NORMAL); |
4187 if (!do_fre) | 4554 if (!do_fre) |
4188 { | 4555 { |
4189 remove_dead_inserted_code (); | 4556 remove_dead_inserted_code (); |
4190 loop_optimizer_finalize (); | 4557 loop_optimizer_finalize (); |
4191 } | 4558 } |
4192 | 4559 |
4193 return 0; | 4560 return 0; |
4194 } | 4561 } |
4195 init_pre (do_fre); | 4562 init_pre (do_fre); |
4563 scev_initialize (); | |
4196 | 4564 |
4197 | 4565 |
4198 /* Collect and value number expressions computed in each basic block. */ | 4566 /* Collect and value number expressions computed in each basic block. */ |
4199 compute_avail (); | 4567 compute_avail (); |
4200 | 4568 |
4240 clear_expression_ids (); | 4608 clear_expression_ids (); |
4241 free_scc_vn (); | 4609 free_scc_vn (); |
4242 if (!do_fre) | 4610 if (!do_fre) |
4243 remove_dead_inserted_code (); | 4611 remove_dead_inserted_code (); |
4244 | 4612 |
4613 scev_finalize (); | |
4245 fini_pre (do_fre); | 4614 fini_pre (do_fre); |
4246 | 4615 |
4247 return todo; | 4616 return todo; |
4248 } | 4617 } |
4249 | 4618 |
4250 /* Gate and execute functions for PRE. */ | 4619 /* Gate and execute functions for PRE. */ |
4251 | 4620 |
4252 static unsigned int | 4621 static unsigned int |
4253 do_pre (void) | 4622 do_pre (void) |
4254 { | 4623 { |
4255 return TODO_rebuild_alias | execute_pre (false); | 4624 return execute_pre (false); |
4256 } | 4625 } |
4257 | 4626 |
4258 static bool | 4627 static bool |
4259 gate_pre (void) | 4628 gate_pre (void) |
4260 { | 4629 { |
4261 /* PRE tends to generate bigger code. */ | 4630 return flag_tree_pre != 0; |
4262 return flag_tree_pre != 0 && optimize_function_for_speed_p (cfun); | |
4263 } | 4631 } |
4264 | 4632 |
4265 struct gimple_opt_pass pass_pre = | 4633 struct gimple_opt_pass pass_pre = |
4266 { | 4634 { |
4267 { | 4635 { |
4272 NULL, /* sub */ | 4640 NULL, /* sub */ |
4273 NULL, /* next */ | 4641 NULL, /* next */ |
4274 0, /* static_pass_number */ | 4642 0, /* static_pass_number */ |
4275 TV_TREE_PRE, /* tv_id */ | 4643 TV_TREE_PRE, /* tv_id */ |
4276 PROP_no_crit_edges | PROP_cfg | 4644 PROP_no_crit_edges | PROP_cfg |
4277 | PROP_ssa | PROP_alias, /* properties_required */ | 4645 | PROP_ssa, /* properties_required */ |
4278 0, /* properties_provided */ | 4646 0, /* properties_provided */ |
4279 0, /* properties_destroyed */ | 4647 0, /* properties_destroyed */ |
4280 0, /* todo_flags_start */ | 4648 TODO_rebuild_alias, /* todo_flags_start */ |
4281 TODO_update_ssa_only_virtuals | TODO_dump_func | TODO_ggc_collect | 4649 TODO_update_ssa_only_virtuals | TODO_dump_func | TODO_ggc_collect |
4282 | TODO_verify_ssa /* todo_flags_finish */ | 4650 | TODO_verify_ssa /* todo_flags_finish */ |
4283 } | 4651 } |
4284 }; | 4652 }; |
4285 | 4653 |
4307 execute_fre, /* execute */ | 4675 execute_fre, /* execute */ |
4308 NULL, /* sub */ | 4676 NULL, /* sub */ |
4309 NULL, /* next */ | 4677 NULL, /* next */ |
4310 0, /* static_pass_number */ | 4678 0, /* static_pass_number */ |
4311 TV_TREE_FRE, /* tv_id */ | 4679 TV_TREE_FRE, /* tv_id */ |
4312 PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */ | 4680 PROP_cfg | PROP_ssa, /* properties_required */ |
4313 0, /* properties_provided */ | 4681 0, /* properties_provided */ |
4314 0, /* properties_destroyed */ | 4682 0, /* properties_destroyed */ |
4315 0, /* todo_flags_start */ | 4683 0, /* todo_flags_start */ |
4316 TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */ | 4684 TODO_dump_func | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */ |
4317 } | 4685 } |