comparison gcc/tree-ssa-dce.c @ 131:84e7813d76e9

gcc-8.2
author mir3636
date Thu, 25 Oct 2018 07:37:49 +0900
parents 04ced10e8804
children 1830386684a0
comparison
equal deleted inserted replaced
111:04ced10e8804 131:84e7813d76e9
1 /* Dead code elimination pass for the GNU compiler. 1 /* Dead code elimination pass for the GNU compiler.
2 Copyright (C) 2002-2017 Free Software Foundation, Inc. 2 Copyright (C) 2002-2018 Free Software Foundation, Inc.
3 Contributed by Ben Elliston <bje@redhat.com> 3 Contributed by Ben Elliston <bje@redhat.com>
4 and Andrew MacLeod <amacleod@redhat.com> 4 and Andrew MacLeod <amacleod@redhat.com>
5 Adapted to use control dependence by Steven Bosscher, SUSE Labs. 5 Adapted to use control dependence by Steven Bosscher, SUSE Labs.
6 6
7 This file is part of GCC. 7 This file is part of GCC.
63 #include "tree-ssa-loop-niter.h" 63 #include "tree-ssa-loop-niter.h"
64 #include "tree-into-ssa.h" 64 #include "tree-into-ssa.h"
65 #include "tree-dfa.h" 65 #include "tree-dfa.h"
66 #include "cfgloop.h" 66 #include "cfgloop.h"
67 #include "tree-scalar-evolution.h" 67 #include "tree-scalar-evolution.h"
68 #include "tree-chkp.h"
69 #include "tree-ssa-propagate.h" 68 #include "tree-ssa-propagate.h"
70 #include "gimple-fold.h" 69 #include "gimple-fold.h"
71 70
72 static struct stmt_stats 71 static struct stmt_stats
73 { 72 {
194 { 193 {
195 /* With non-call exceptions, we have to assume that all statements could 194 /* With non-call exceptions, we have to assume that all statements could
196 throw. If a statement could throw, it can be deemed necessary. */ 195 throw. If a statement could throw, it can be deemed necessary. */
197 if (cfun->can_throw_non_call_exceptions 196 if (cfun->can_throw_non_call_exceptions
198 && !cfun->can_delete_dead_exceptions 197 && !cfun->can_delete_dead_exceptions
199 && stmt_could_throw_p (stmt)) 198 && stmt_could_throw_p (cfun, stmt))
200 { 199 {
201 mark_stmt_necessary (stmt, true); 200 mark_stmt_necessary (stmt, true);
202 return; 201 return;
203 } 202 }
204 203
223 222
224 case GIMPLE_CALL: 223 case GIMPLE_CALL:
225 { 224 {
226 tree callee = gimple_call_fndecl (stmt); 225 tree callee = gimple_call_fndecl (stmt);
227 if (callee != NULL_TREE 226 if (callee != NULL_TREE
228 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL) 227 && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
229 switch (DECL_FUNCTION_CODE (callee)) 228 switch (DECL_FUNCTION_CODE (callee))
230 { 229 {
231 case BUILT_IN_MALLOC: 230 case BUILT_IN_MALLOC:
232 case BUILT_IN_ALIGNED_ALLOC: 231 case BUILT_IN_ALIGNED_ALLOC:
233 case BUILT_IN_CALLOC: 232 case BUILT_IN_CALLOC:
254 case GIMPLE_DEBUG: 253 case GIMPLE_DEBUG:
255 /* Debug temps without a value are not useful. ??? If we could 254 /* Debug temps without a value are not useful. ??? If we could
256 easily locate the debug temp bind stmt for a use thereof, 255 easily locate the debug temp bind stmt for a use thereof,
257 would could refrain from marking all debug temps here, and 256 would could refrain from marking all debug temps here, and
258 mark them only if they're used. */ 257 mark them only if they're used. */
259 if (!gimple_debug_bind_p (stmt) 258 if (gimple_debug_nonbind_marker_p (stmt)
259 || !gimple_debug_bind_p (stmt)
260 || gimple_debug_bind_has_value_p (stmt) 260 || gimple_debug_bind_has_value_p (stmt)
261 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL) 261 || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
262 mark_stmt_necessary (stmt, false); 262 mark_stmt_necessary (stmt, false);
263 return; 263 return;
264 264
471 and we can catch it in the current function where we could inspect 471 and we can catch it in the current function where we could inspect
472 the previous value. 472 the previous value.
473 ??? We only need to care about the RHS throwing. For aggregate 473 ??? We only need to care about the RHS throwing. For aggregate
474 assignments or similar calls and non-call exceptions the LHS 474 assignments or similar calls and non-call exceptions the LHS
475 might throw as well. */ 475 might throw as well. */
476 && !stmt_can_throw_internal (def_stmt)) 476 && !stmt_can_throw_internal (cfun, def_stmt))
477 { 477 {
478 tree base, lhs = gimple_get_lhs (def_stmt); 478 tree base, lhs = gimple_get_lhs (def_stmt);
479 HOST_WIDE_INT size, offset, max_size; 479 poly_int64 size, offset, max_size;
480 bool reverse; 480 bool reverse;
481 ao_ref_base (ref); 481 ao_ref_base (ref);
482 base 482 base
483 = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse); 483 = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
484 /* We can get MEM[symbol: sZ, index: D.8862_1] here, 484 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
485 so base == refd->base does not always hold. */ 485 so base == refd->base does not always hold. */
486 if (base == ref->base) 486 if (base == ref->base)
487 { 487 {
488 /* For a must-alias check we need to be able to constrain 488 /* For a must-alias check we need to be able to constrain
489 the accesses properly. */ 489 the accesses properly. */
490 if (size != -1 && size == max_size 490 if (known_eq (size, max_size)
491 && ref->max_size != -1) 491 && known_subrange_p (ref->offset, ref->max_size, offset, size))
492 { 492 return true;
493 if (offset <= ref->offset
494 && offset + size >= ref->offset + ref->max_size)
495 return true;
496 }
497 /* Or they need to be exactly the same. */ 493 /* Or they need to be exactly the same. */
498 else if (ref->ref 494 else if (ref->ref
499 /* Make sure there is no induction variable involved 495 /* Make sure there is no induction variable involved
500 in the references (gcc.c-torture/execute/pr42142.c). 496 in the references (gcc.c-torture/execute/pr42142.c).
501 The simplest way is to check if the kill dominates 497 The simplest way is to check if the kill dominates
567 a virtual definition. */ 563 a virtual definition. */
568 if (is_gimple_call (def_stmt)) 564 if (is_gimple_call (def_stmt))
569 { 565 {
570 tree callee = gimple_call_fndecl (def_stmt); 566 tree callee = gimple_call_fndecl (def_stmt);
571 if (callee != NULL_TREE 567 if (callee != NULL_TREE
572 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL) 568 && fndecl_built_in_p (callee, BUILT_IN_NORMAL))
573 switch (DECL_FUNCTION_CODE (callee)) 569 switch (DECL_FUNCTION_CODE (callee))
574 { 570 {
575 case BUILT_IN_MALLOC: 571 case BUILT_IN_MALLOC:
576 case BUILT_IN_ALIGNED_ALLOC: 572 case BUILT_IN_ALIGNED_ALLOC:
577 case BUILT_IN_CALLOC: 573 case BUILT_IN_CALLOC:
779 && (def_callee = gimple_call_fndecl (def_stmt)) 775 && (def_callee = gimple_call_fndecl (def_stmt))
780 && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL 776 && DECL_BUILT_IN_CLASS (def_callee) == BUILT_IN_NORMAL
781 && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC 777 && (DECL_FUNCTION_CODE (def_callee) == BUILT_IN_ALIGNED_ALLOC
782 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC 778 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_MALLOC
783 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC)) 779 || DECL_FUNCTION_CODE (def_callee) == BUILT_IN_CALLOC))
784 { 780 continue;
785 gimple *bounds_def_stmt;
786 tree bounds;
787
788 /* For instrumented calls we should also check used
789 bounds are returned by the same allocation call. */
790 if (!gimple_call_with_bounds_p (stmt)
791 || ((bounds = gimple_call_arg (stmt, 1))
792 && TREE_CODE (bounds) == SSA_NAME
793 && (bounds_def_stmt = SSA_NAME_DEF_STMT (bounds))
794 && chkp_gimple_call_builtin_p (bounds_def_stmt,
795 BUILT_IN_CHKP_BNDRET)
796 && gimple_call_arg (bounds_def_stmt, 0) == ptr))
797 continue;
798 }
799 } 781 }
800 782
801 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE) 783 FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
802 mark_operand_necessary (use); 784 mark_operand_necessary (use);
803 785
1079 ei_next (&ei); 1061 ei_next (&ei);
1080 } 1062 }
1081 1063
1082 /* If this is a store into a variable that is being optimized away, 1064 /* If this is a store into a variable that is being optimized away,
1083 add a debug bind stmt if possible. */ 1065 add a debug bind stmt if possible. */
1084 if (MAY_HAVE_DEBUG_STMTS 1066 if (MAY_HAVE_DEBUG_BIND_STMTS
1085 && gimple_assign_single_p (stmt) 1067 && gimple_assign_single_p (stmt)
1086 && is_gimple_val (gimple_assign_rhs1 (stmt))) 1068 && is_gimple_val (gimple_assign_rhs1 (stmt)))
1087 { 1069 {
1088 tree lhs = gimple_assign_lhs (stmt); 1070 tree lhs = gimple_assign_lhs (stmt);
1089 if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL) 1071 if ((VAR_P (lhs) || TREE_CODE (lhs) == PARM_DECL)
1276 { 1258 {
1277 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr); 1259 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
1278 if (!gimple_nop_p (def_stmt) 1260 if (!gimple_nop_p (def_stmt)
1279 && !gimple_plf (def_stmt, STMT_NECESSARY)) 1261 && !gimple_plf (def_stmt, STMT_NECESSARY))
1280 gimple_set_plf (stmt, STMT_NECESSARY, false); 1262 gimple_set_plf (stmt, STMT_NECESSARY, false);
1281 }
1282 /* We did not propagate necessity for free calls fed
1283 by allocation function to allow unnecessary
1284 alloc-free sequence elimination. For instrumented
1285 calls it also means we did not mark bounds producer
1286 as necessary and it is time to do it in case free
1287 call is not removed. */
1288 if (gimple_call_with_bounds_p (stmt))
1289 {
1290 gimple *bounds_def_stmt;
1291 tree bounds = gimple_call_arg (stmt, 1);
1292 gcc_assert (TREE_CODE (bounds) == SSA_NAME);
1293 bounds_def_stmt = SSA_NAME_DEF_STMT (bounds);
1294 if (bounds_def_stmt
1295 && !gimple_plf (bounds_def_stmt, STMT_NECESSARY))
1296 gimple_set_plf (bounds_def_stmt, STMT_NECESSARY,
1297 gimple_plf (stmt, STMT_NECESSARY));
1298 } 1263 }
1299 } 1264 }
1300 1265
1301 /* If GSI is not necessary then remove it. */ 1266 /* If GSI is not necessary then remove it. */
1302 if (!gimple_plf (stmt, STMT_NECESSARY)) 1267 if (!gimple_plf (stmt, STMT_NECESSARY))
1342 || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL 1307 || DECL_BUILT_IN_CLASS (call) != BUILT_IN_NORMAL
1343 || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC 1308 || (DECL_FUNCTION_CODE (call) != BUILT_IN_ALIGNED_ALLOC
1344 && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC 1309 && DECL_FUNCTION_CODE (call) != BUILT_IN_MALLOC
1345 && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC 1310 && DECL_FUNCTION_CODE (call) != BUILT_IN_CALLOC
1346 && !ALLOCA_FUNCTION_CODE_P 1311 && !ALLOCA_FUNCTION_CODE_P
1347 (DECL_FUNCTION_CODE (call)))) 1312 (DECL_FUNCTION_CODE (call)))))
1348 /* Avoid doing so for bndret calls for the same reason. */
1349 && !chkp_gimple_call_builtin_p (stmt, BUILT_IN_CHKP_BNDRET))
1350 { 1313 {
1351 something_changed = true; 1314 something_changed = true;
1352 if (dump_file && (dump_flags & TDF_DETAILS)) 1315 if (dump_file && (dump_flags & TDF_DETAILS))
1353 { 1316 {
1354 fprintf (dump_file, "Deleting LHS of call: "); 1317 fprintf (dump_file, "Deleting LHS of call: ");
1440 { 1403 {
1441 /* Speed up the removal of blocks that don't 1404 /* Speed up the removal of blocks that don't
1442 dominate others. Walking backwards, this should 1405 dominate others. Walking backwards, this should
1443 be the common case. ??? Do we need to recompute 1406 be the common case. ??? Do we need to recompute
1444 dominators because of cfg_altered? */ 1407 dominators because of cfg_altered? */
1445 if (!MAY_HAVE_DEBUG_STMTS 1408 if (!first_dom_son (CDI_DOMINATORS, bb))
1446 || !first_dom_son (CDI_DOMINATORS, bb))
1447 delete_basic_block (bb); 1409 delete_basic_block (bb);
1448 else 1410 else
1449 { 1411 {
1450 h = get_all_dominated_blocks (CDI_DOMINATORS, bb); 1412 h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1451 1413
1721 gimple_opt_pass * 1683 gimple_opt_pass *
1722 make_pass_cd_dce (gcc::context *ctxt) 1684 make_pass_cd_dce (gcc::context *ctxt)
1723 { 1685 {
1724 return new pass_cd_dce (ctxt); 1686 return new pass_cd_dce (ctxt);
1725 } 1687 }
1688
1689
1690 /* A cheap DCE interface. WORKLIST is a list of possibly dead stmts and
1691 is consumed by this function. The function has linear complexity in
1692 the number of dead stmts with a constant factor like the average SSA
1693 use operands number. */
1694
1695 void
1696 simple_dce_from_worklist (bitmap worklist)
1697 {
1698 while (! bitmap_empty_p (worklist))
1699 {
1700 /* Pop item. */
1701 unsigned i = bitmap_first_set_bit (worklist);
1702 bitmap_clear_bit (worklist, i);
1703
1704 tree def = ssa_name (i);
1705 /* Removed by somebody else or still in use. */
1706 if (! def || ! has_zero_uses (def))
1707 continue;
1708
1709 gimple *t = SSA_NAME_DEF_STMT (def);
1710 if (gimple_has_side_effects (t))
1711 continue;
1712
1713 /* Add uses to the worklist. */
1714 ssa_op_iter iter;
1715 use_operand_p use_p;
1716 FOR_EACH_PHI_OR_STMT_USE (use_p, t, iter, SSA_OP_USE)
1717 {
1718 tree use = USE_FROM_PTR (use_p);
1719 if (TREE_CODE (use) == SSA_NAME
1720 && ! SSA_NAME_IS_DEFAULT_DEF (use))
1721 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
1722 }
1723
1724 /* Remove stmt. */
1725 if (dump_file && (dump_flags & TDF_DETAILS))
1726 {
1727 fprintf (dump_file, "Removing dead stmt:");
1728 print_gimple_stmt (dump_file, t, 0);
1729 }
1730 gimple_stmt_iterator gsi = gsi_for_stmt (t);
1731 if (gimple_code (t) == GIMPLE_PHI)
1732 remove_phi_node (&gsi, true);
1733 else
1734 {
1735 gsi_remove (&gsi, true);
1736 release_defs (t);
1737 }
1738 }
1739 }