comparison gcc/tree-ssa-dse.c @ 145:1830386684a0

gcc-9.2.0
author anatofuz
date Thu, 13 Feb 2020 11:34:05 +0900
parents 84e7813d76e9
children
comparison
equal deleted inserted replaced
131:84e7813d76e9 145:1830386684a0
1 /* Dead store elimination 1 /* Dead and redundant store elimination
2 Copyright (C) 2004-2018 Free Software Foundation, Inc. 2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
3 3
4 This file is part of GCC. 4 This file is part of GCC.
5 5
6 GCC is free software; you can redistribute it and/or modify 6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by 7 it under the terms of the GNU General Public License as published by
31 #include "gimple-iterator.h" 31 #include "gimple-iterator.h"
32 #include "tree-cfg.h" 32 #include "tree-cfg.h"
33 #include "tree-dfa.h" 33 #include "tree-dfa.h"
34 #include "domwalk.h" 34 #include "domwalk.h"
35 #include "tree-cfgcleanup.h" 35 #include "tree-cfgcleanup.h"
36 #include "params.h"
37 #include "alias.h" 36 #include "alias.h"
38 #include "tree-ssa-loop.h" 37 #include "tree-ssa-loop.h"
38 #include "tree-ssa-dse.h"
39 #include "builtins.h"
40 #include "gimple-fold.h"
39 41
40 /* This file implements dead store elimination. 42 /* This file implements dead store elimination.
41 43
42 A dead store is a store into a memory location which will later be 44 A dead store is a store into a memory location which will later be
43 overwritten by another store without any intervening loads. In this 45 overwritten by another store without any intervening loads. In this
44 case the earlier store can be deleted. 46 case the earlier store can be deleted or trimmed if the store
47 was partially dead.
48
49 A redundant store is a store into a memory location which stores
50 the exact same value as a prior store to the same memory location.
51 While this can often be handled by dead store elimination, removing
52 the redundant store is often better than removing or trimming the
53 dead store.
45 54
46 In our SSA + virtual operand world we use immediate uses of virtual 55 In our SSA + virtual operand world we use immediate uses of virtual
47 operands to detect dead stores. If a store's virtual definition 56 operands to detect these cases. If a store's virtual definition
48 is used precisely once by a later store to the same location which 57 is used precisely once by a later store to the same location which
49 post dominates the first store, then the first store is dead. 58 post dominates the first store, then the first store is dead. If
59 the data stored is the same, then the second store is redundant.
50 60
51 The single use of the store's virtual definition ensures that 61 The single use of the store's virtual definition ensures that
52 there are no intervening aliased loads and the requirement that 62 there are no intervening aliased loads and the requirement that
53 the second load post dominate the first ensures that if the earlier 63 the second load post dominate the first ensures that if the earlier
54 store executes, then the later stores will execute before the function 64 store executes, then the later stores will execute before the function
56 66
57 It may help to think of this as first moving the earlier store to 67 It may help to think of this as first moving the earlier store to
58 the point immediately before the later store. Again, the single 68 the point immediately before the later store. Again, the single
59 use of the virtual definition and the post-dominance relationship 69 use of the virtual definition and the post-dominance relationship
60 ensure that such movement would be safe. Clearly if there are 70 ensure that such movement would be safe. Clearly if there are
61 back to back stores, then the second is redundant. 71 back to back stores, then the second is makes the first dead. If
72 the second store stores the same value, then the second store is
73 redundant.
62 74
63 Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler" 75 Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
64 may also help in understanding this code since it discusses the 76 may also help in understanding this code since it discusses the
65 relationship between dead store and redundant load elimination. In 77 relationship between dead store and redundant load elimination. In
66 fact, they are the same transformation applied to different views of 78 fact, they are the same transformation applied to different views of
67 the CFG. */ 79 the CFG. */
68 80
81 static void delete_dead_or_redundant_call (gimple_stmt_iterator *, const char *);
69 82
70 /* Bitmap of blocks that have had EH statements cleaned. We should 83 /* Bitmap of blocks that have had EH statements cleaned. We should
71 remove their dead edges eventually. */ 84 remove their dead edges eventually. */
72 static bitmap need_eh_cleanup; 85 static bitmap need_eh_cleanup;
73 86
74 /* Return value from dse_classify_store */
75 enum dse_store_status
76 {
77 DSE_STORE_LIVE,
78 DSE_STORE_MAYBE_PARTIAL_DEAD,
79 DSE_STORE_DEAD
80 };
81
82 /* STMT is a statement that may write into memory. Analyze it and 87 /* STMT is a statement that may write into memory. Analyze it and
83 initialize WRITE to describe how STMT affects memory. 88 initialize WRITE to describe how STMT affects memory.
84 89
85 Return TRUE if the the statement was analyzed, FALSE otherwise. 90 Return TRUE if the the statement was analyzed, FALSE otherwise.
86 91
93 /* It's advantageous to handle certain mem* functions. */ 98 /* It's advantageous to handle certain mem* functions. */
94 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) 99 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
95 { 100 {
96 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))) 101 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
97 { 102 {
98 case BUILT_IN_MEMCPY: 103 case BUILT_IN_MEMCPY:
99 case BUILT_IN_MEMMOVE: 104 case BUILT_IN_MEMMOVE:
100 case BUILT_IN_MEMSET: 105 case BUILT_IN_MEMSET:
101 { 106 case BUILT_IN_MEMCPY_CHK:
102 tree size = NULL_TREE; 107 case BUILT_IN_MEMMOVE_CHK:
103 if (gimple_call_num_args (stmt) == 3) 108 case BUILT_IN_MEMSET_CHK:
104 size = gimple_call_arg (stmt, 2); 109 case BUILT_IN_STRNCPY:
105 tree ptr = gimple_call_arg (stmt, 0); 110 case BUILT_IN_STRNCPY_CHK:
106 ao_ref_init_from_ptr_and_size (write, ptr, size); 111 {
107 return true; 112 tree size = gimple_call_arg (stmt, 2);
108 } 113 tree ptr = gimple_call_arg (stmt, 0);
109 default: 114 ao_ref_init_from_ptr_and_size (write, ptr, size);
110 break; 115 return true;
116 }
117
118 /* A calloc call can never be dead, but it can make
119 subsequent stores redundant if they store 0 into
120 the same memory locations. */
121 case BUILT_IN_CALLOC:
122 {
123 tree nelem = gimple_call_arg (stmt, 0);
124 tree selem = gimple_call_arg (stmt, 1);
125 tree lhs;
126 if (TREE_CODE (nelem) == INTEGER_CST
127 && TREE_CODE (selem) == INTEGER_CST
128 && (lhs = gimple_call_lhs (stmt)) != NULL_TREE)
129 {
130 tree size = fold_build2 (MULT_EXPR, TREE_TYPE (nelem),
131 nelem, selem);
132 ao_ref_init_from_ptr_and_size (write, lhs, size);
133 return true;
134 }
135 }
136
137 default:
138 break;
111 } 139 }
112 } 140 }
113 else if (is_gimple_assign (stmt)) 141 else if (is_gimple_assign (stmt))
114 { 142 {
115 ao_ref_init (write, gimple_assign_lhs (stmt)); 143 ao_ref_init (write, gimple_assign_lhs (stmt));
209 { 237 {
210 HOST_WIDE_INT const_size; 238 HOST_WIDE_INT const_size;
211 if (valid_ao_ref_for_dse (ref) 239 if (valid_ao_ref_for_dse (ref)
212 && ref->size.is_constant (&const_size) 240 && ref->size.is_constant (&const_size)
213 && (const_size / BITS_PER_UNIT 241 && (const_size / BITS_PER_UNIT
214 <= PARAM_VALUE (PARAM_DSE_MAX_OBJECT_SIZE))) 242 <= param_dse_max_object_size))
215 { 243 {
216 bitmap_clear (live_bytes); 244 bitmap_clear (live_bytes);
217 bitmap_set_range (live_bytes, 0, const_size / BITS_PER_UNIT); 245 bitmap_set_range (live_bytes, 0, const_size / BITS_PER_UNIT);
218 return true; 246 return true;
219 } 247 }
428 likely of marginal value. */ 456 likely of marginal value. */
429 457
430 static void 458 static void
431 maybe_trim_memstar_call (ao_ref *ref, sbitmap live, gimple *stmt) 459 maybe_trim_memstar_call (ao_ref *ref, sbitmap live, gimple *stmt)
432 { 460 {
461 int head_trim, tail_trim;
433 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))) 462 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
434 { 463 {
464 case BUILT_IN_STRNCPY:
465 case BUILT_IN_STRNCPY_CHK:
466 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
467 if (head_trim)
468 {
469 /* Head trimming of strncpy is only possible if we can
470 prove all bytes we would trim are non-zero (or we could
471 turn the strncpy into memset if there must be zero
472 among the head trimmed bytes). If we don't know anything
473 about those bytes, the presence or absence of '\0' bytes
474 in there will affect whether it acts for the non-trimmed
475 bytes as memset or memcpy/strncpy. */
476 c_strlen_data lendata = { };
477 int orig_head_trim = head_trim;
478 tree srcstr = gimple_call_arg (stmt, 1);
479 if (!get_range_strlen (srcstr, &lendata, /*eltsize=*/1)
480 || !tree_fits_uhwi_p (lendata.minlen))
481 head_trim = 0;
482 else if (tree_to_uhwi (lendata.minlen) < (unsigned) head_trim)
483 {
484 head_trim = tree_to_uhwi (lendata.minlen);
485 if ((orig_head_trim & (UNITS_PER_WORD - 1)) == 0)
486 head_trim &= ~(UNITS_PER_WORD - 1);
487 }
488 if (orig_head_trim != head_trim
489 && dump_file
490 && (dump_flags & TDF_DETAILS))
491 fprintf (dump_file,
492 " Adjusting strncpy trimming to (head = %d,"
493 " tail = %d)\n", head_trim, tail_trim);
494 }
495 goto do_memcpy;
496
435 case BUILT_IN_MEMCPY: 497 case BUILT_IN_MEMCPY:
436 case BUILT_IN_MEMMOVE: 498 case BUILT_IN_MEMMOVE:
437 { 499 case BUILT_IN_MEMCPY_CHK:
438 int head_trim, tail_trim; 500 case BUILT_IN_MEMMOVE_CHK:
439 compute_trims (ref, live, &head_trim, &tail_trim, stmt); 501 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
440 502
441 /* Tail trimming is easy, we can just reduce the count. */ 503 do_memcpy:
442 if (tail_trim) 504 /* Tail trimming is easy, we can just reduce the count. */
443 decrement_count (stmt, tail_trim); 505 if (tail_trim)
444 506 decrement_count (stmt, tail_trim);
445 /* Head trimming requires adjusting all the arguments. */ 507
446 if (head_trim) 508 /* Head trimming requires adjusting all the arguments. */
447 { 509 if (head_trim)
448 tree *dst = gimple_call_arg_ptr (stmt, 0); 510 {
449 increment_start_addr (stmt, dst, head_trim); 511 /* For __*_chk need to adjust also the last argument. */
450 tree *src = gimple_call_arg_ptr (stmt, 1); 512 if (gimple_call_num_args (stmt) == 4)
451 increment_start_addr (stmt, src, head_trim); 513 {
452 decrement_count (stmt, head_trim); 514 tree size = gimple_call_arg (stmt, 3);
453 } 515 if (!tree_fits_uhwi_p (size))
454 break; 516 break;
455 } 517 if (!integer_all_onesp (size))
518 {
519 unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
520 if (sz < (unsigned) head_trim)
521 break;
522 tree arg = wide_int_to_tree (TREE_TYPE (size),
523 sz - head_trim);
524 gimple_call_set_arg (stmt, 3, arg);
525 }
526 }
527 tree *dst = gimple_call_arg_ptr (stmt, 0);
528 increment_start_addr (stmt, dst, head_trim);
529 tree *src = gimple_call_arg_ptr (stmt, 1);
530 increment_start_addr (stmt, src, head_trim);
531 decrement_count (stmt, head_trim);
532 }
533 break;
456 534
457 case BUILT_IN_MEMSET: 535 case BUILT_IN_MEMSET:
458 { 536 case BUILT_IN_MEMSET_CHK:
459 int head_trim, tail_trim; 537 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
460 compute_trims (ref, live, &head_trim, &tail_trim, stmt); 538
461 539 /* Tail trimming is easy, we can just reduce the count. */
462 /* Tail trimming is easy, we can just reduce the count. */ 540 if (tail_trim)
463 if (tail_trim) 541 decrement_count (stmt, tail_trim);
464 decrement_count (stmt, tail_trim); 542
465 543 /* Head trimming requires adjusting all the arguments. */
466 /* Head trimming requires adjusting all the arguments. */ 544 if (head_trim)
467 if (head_trim) 545 {
468 { 546 /* For __*_chk need to adjust also the last argument. */
469 tree *dst = gimple_call_arg_ptr (stmt, 0); 547 if (gimple_call_num_args (stmt) == 4)
470 increment_start_addr (stmt, dst, head_trim); 548 {
471 decrement_count (stmt, head_trim); 549 tree size = gimple_call_arg (stmt, 3);
472 } 550 if (!tree_fits_uhwi_p (size))
473 break; 551 break;
474 } 552 if (!integer_all_onesp (size))
475 553 {
476 default: 554 unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
477 break; 555 if (sz < (unsigned) head_trim)
556 break;
557 tree arg = wide_int_to_tree (TREE_TYPE (size),
558 sz - head_trim);
559 gimple_call_set_arg (stmt, 3, arg);
560 }
561 }
562 tree *dst = gimple_call_arg_ptr (stmt, 0);
563 increment_start_addr (stmt, dst, head_trim);
564 decrement_count (stmt, head_trim);
565 }
566 break;
567
568 default:
569 break;
478 } 570 }
479 } 571 }
480 572
481 /* STMT is a memory write where one or more bytes written are dead 573 /* STMT is a memory write where one or more bytes written are dead
482 stores. ORIG is the bitmap of bytes stored by STMT. LIVE is the 574 stores. ORIG is the bitmap of bytes stored by STMT. LIVE is the
549 phi_bb)) 641 phi_bb))
550 return false; 642 return false;
551 return true; 643 return true;
552 } 644 }
553 645
646 /* STMT stores the value 0 into one or more memory locations
647 (via memset, empty constructor, calloc call, etc).
648
649 See if there is a subsequent store of the value 0 to one
650 or more of the same memory location(s). If so, the subsequent
651 store is redundant and can be removed.
652
653 The subsequent stores could be via memset, empty constructors,
654 simple MEM stores, etc. */
655
656 static void
657 dse_optimize_redundant_stores (gimple *stmt)
658 {
659 int cnt = 0;
660
661 /* We could do something fairly complex and look through PHIs
662 like DSE_CLASSIFY_STORE, but it doesn't seem to be worth
663 the effort.
664
665 Look at all the immediate uses of the VDEF (which are obviously
666 dominated by STMT). See if one or more stores 0 into the same
667 memory locations a STMT, if so remove the immediate use statements. */
668 tree defvar = gimple_vdef (stmt);
669 imm_use_iterator ui;
670 gimple *use_stmt;
671 FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
672 {
673 /* Limit stmt walking. */
674 if (++cnt > param_dse_max_alias_queries_per_store)
675 BREAK_FROM_IMM_USE_STMT (ui);
676
677 /* If USE_STMT stores 0 into one or more of the same locations
678 as STMT and STMT would kill USE_STMT, then we can just remove
679 USE_STMT. */
680 tree fndecl;
681 if ((is_gimple_assign (use_stmt)
682 && gimple_vdef (use_stmt)
683 && (gimple_assign_single_p (use_stmt)
684 && initializer_zerop (gimple_assign_rhs1 (use_stmt))))
685 || (gimple_call_builtin_p (use_stmt, BUILT_IN_NORMAL)
686 && (fndecl = gimple_call_fndecl (use_stmt)) != NULL
687 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
688 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
689 && integer_zerop (gimple_call_arg (use_stmt, 1))))
690 {
691 ao_ref write;
692
693 if (!initialize_ao_ref_for_dse (use_stmt, &write))
694 BREAK_FROM_IMM_USE_STMT (ui)
695
696 if (valid_ao_ref_for_dse (&write)
697 && stmt_kills_ref_p (stmt, &write))
698 {
699 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
700 if (is_gimple_assign (use_stmt))
701 delete_dead_or_redundant_assignment (&gsi, "redundant",
702 need_eh_cleanup);
703 else if (is_gimple_call (use_stmt))
704 delete_dead_or_redundant_call (&gsi, "redundant");
705 else
706 gcc_unreachable ();
707 }
708 }
709 }
710 }
711
554 /* A helper of dse_optimize_stmt. 712 /* A helper of dse_optimize_stmt.
555 Given a GIMPLE_ASSIGN in STMT that writes to REF, classify it 713 Given a GIMPLE_ASSIGN in STMT that writes to REF, classify it
556 according to downstream uses and defs. Sets *BY_CLOBBER_P to true 714 according to downstream uses and defs. Sets *BY_CLOBBER_P to true
557 if only clobber statements influenced the classification result. 715 if only clobber statements influenced the classification result.
558 Returns the classification. */ 716 Returns the classification. */
559 717
560 static dse_store_status 718 dse_store_status
561 dse_classify_store (ao_ref *ref, gimple *stmt, 719 dse_classify_store (ao_ref *ref, gimple *stmt,
562 bool byte_tracking_enabled, sbitmap live_bytes, 720 bool byte_tracking_enabled, sbitmap live_bytes,
563 bool *by_clobber_p = NULL) 721 bool *by_clobber_p, tree stop_at_vuse)
564 { 722 {
565 gimple *temp; 723 gimple *temp;
566 int cnt = 0; 724 int cnt = 0;
567 auto_bitmap visited; 725 auto_bitmap visited;
568 726
594 defvar = PHI_RESULT (temp); 752 defvar = PHI_RESULT (temp);
595 bitmap_set_bit (visited, SSA_NAME_VERSION (defvar)); 753 bitmap_set_bit (visited, SSA_NAME_VERSION (defvar));
596 } 754 }
597 else 755 else
598 defvar = gimple_vdef (temp); 756 defvar = gimple_vdef (temp);
757
758 /* If we're instructed to stop walking at region boundary, do so. */
759 if (defvar == stop_at_vuse)
760 return DSE_STORE_LIVE;
761
599 auto_vec<gimple *, 10> defs; 762 auto_vec<gimple *, 10> defs;
600 gimple *phi_def = NULL; 763 gimple *phi_def = NULL;
601 FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar) 764 FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
602 { 765 {
603 /* Limit stmt walking. */ 766 /* Limit stmt walking. */
604 if (++cnt > PARAM_VALUE (PARAM_DSE_MAX_ALIAS_QUERIES_PER_STORE)) 767 if (++cnt > param_dse_max_alias_queries_per_store)
605 { 768 {
606 fail = true; 769 fail = true;
607 BREAK_FROM_IMM_USE_STMT (ui); 770 BREAK_FROM_IMM_USE_STMT (ui);
608 } 771 }
609 772
748 class dse_dom_walker : public dom_walker 911 class dse_dom_walker : public dom_walker
749 { 912 {
750 public: 913 public:
751 dse_dom_walker (cdi_direction direction) 914 dse_dom_walker (cdi_direction direction)
752 : dom_walker (direction), 915 : dom_walker (direction),
753 m_live_bytes (PARAM_VALUE (PARAM_DSE_MAX_OBJECT_SIZE)), 916 m_live_bytes (param_dse_max_object_size),
754 m_byte_tracking_enabled (false) {} 917 m_byte_tracking_enabled (false) {}
755 918
756 virtual edge before_dom_children (basic_block); 919 virtual edge before_dom_children (basic_block);
757 920
758 private: 921 private:
761 void dse_optimize_stmt (gimple_stmt_iterator *); 924 void dse_optimize_stmt (gimple_stmt_iterator *);
762 }; 925 };
763 926
764 /* Delete a dead call at GSI, which is mem* call of some kind. */ 927 /* Delete a dead call at GSI, which is mem* call of some kind. */
765 static void 928 static void
766 delete_dead_call (gimple_stmt_iterator *gsi) 929 delete_dead_or_redundant_call (gimple_stmt_iterator *gsi, const char *type)
767 { 930 {
768 gimple *stmt = gsi_stmt (*gsi); 931 gimple *stmt = gsi_stmt (*gsi);
769 if (dump_file && (dump_flags & TDF_DETAILS)) 932 if (dump_file && (dump_flags & TDF_DETAILS))
770 { 933 {
771 fprintf (dump_file, " Deleted dead call: "); 934 fprintf (dump_file, " Deleted %s call: ", type);
772 print_gimple_stmt (dump_file, stmt, 0, dump_flags); 935 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
773 fprintf (dump_file, "\n"); 936 fprintf (dump_file, "\n");
774 } 937 }
775 938
776 tree lhs = gimple_call_lhs (stmt); 939 tree lhs = gimple_call_lhs (stmt);
794 } 957 }
795 } 958 }
796 959
797 /* Delete a dead store at GSI, which is a gimple assignment. */ 960 /* Delete a dead store at GSI, which is a gimple assignment. */
798 961
799 static void 962 void
800 delete_dead_assignment (gimple_stmt_iterator *gsi) 963 delete_dead_or_redundant_assignment (gimple_stmt_iterator *gsi, const char *type,
964 bitmap need_eh_cleanup)
801 { 965 {
802 gimple *stmt = gsi_stmt (*gsi); 966 gimple *stmt = gsi_stmt (*gsi);
803 if (dump_file && (dump_flags & TDF_DETAILS)) 967 if (dump_file && (dump_flags & TDF_DETAILS))
804 { 968 {
805 fprintf (dump_file, " Deleted dead store: "); 969 fprintf (dump_file, " Deleted %s store: ", type);
806 print_gimple_stmt (dump_file, stmt, 0, dump_flags); 970 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
807 fprintf (dump_file, "\n"); 971 fprintf (dump_file, "\n");
808 } 972 }
809 973
810 /* Then we need to fix the operand of the consuming stmt. */ 974 /* Then we need to fix the operand of the consuming stmt. */
811 unlink_stmt_vdef (stmt); 975 unlink_stmt_vdef (stmt);
812 976
813 /* Remove the dead store. */ 977 /* Remove the dead store. */
814 basic_block bb = gimple_bb (stmt); 978 basic_block bb = gimple_bb (stmt);
815 if (gsi_remove (gsi, true)) 979 if (gsi_remove (gsi, true) && need_eh_cleanup)
816 bitmap_set_bit (need_eh_cleanup, bb->index); 980 bitmap_set_bit (need_eh_cleanup, bb->index);
817 981
818 /* And release any SSA_NAMEs set in this statement back to the 982 /* And release any SSA_NAMEs set in this statement back to the
819 SSA_NAME manager. */ 983 SSA_NAME manager. */
820 release_defs (stmt); 984 release_defs (stmt);
853 1017
854 /* We know we have virtual definitions. We can handle assignments and 1018 /* We know we have virtual definitions. We can handle assignments and
855 some builtin calls. */ 1019 some builtin calls. */
856 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) 1020 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
857 { 1021 {
858 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))) 1022 tree fndecl = gimple_call_fndecl (stmt);
859 { 1023 switch (DECL_FUNCTION_CODE (fndecl))
860 case BUILT_IN_MEMCPY: 1024 {
861 case BUILT_IN_MEMMOVE: 1025 case BUILT_IN_MEMCPY:
862 case BUILT_IN_MEMSET: 1026 case BUILT_IN_MEMMOVE:
863 { 1027 case BUILT_IN_STRNCPY:
864 /* Occasionally calls with an explicit length of zero 1028 case BUILT_IN_MEMSET:
865 show up in the IL. It's pointless to do analysis 1029 case BUILT_IN_MEMCPY_CHK:
866 on them, they're trivially dead. */ 1030 case BUILT_IN_MEMMOVE_CHK:
867 tree size = gimple_call_arg (stmt, 2); 1031 case BUILT_IN_STRNCPY_CHK:
868 if (integer_zerop (size)) 1032 case BUILT_IN_MEMSET_CHK:
869 { 1033 {
870 delete_dead_call (gsi); 1034 /* Occasionally calls with an explicit length of zero
871 return; 1035 show up in the IL. It's pointless to do analysis
872 } 1036 on them, they're trivially dead. */
873 1037 tree size = gimple_call_arg (stmt, 2);
874 enum dse_store_status store_status; 1038 if (integer_zerop (size))
875 m_byte_tracking_enabled 1039 {
876 = setup_live_bytes_from_ref (&ref, m_live_bytes); 1040 delete_dead_or_redundant_call (gsi, "dead");
877 store_status = dse_classify_store (&ref, stmt,
878 m_byte_tracking_enabled,
879 m_live_bytes);
880 if (store_status == DSE_STORE_LIVE)
881 return; 1041 return;
882 1042 }
883 if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD) 1043
884 { 1044 /* If this is a memset call that initializes an object
885 maybe_trim_memstar_call (&ref, m_live_bytes, stmt); 1045 to zero, it may be redundant with an earlier memset
886 return; 1046 or empty CONSTRUCTOR of a larger object. */
887 } 1047 if ((DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
888 1048 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
889 if (store_status == DSE_STORE_DEAD) 1049 && integer_zerop (gimple_call_arg (stmt, 1)))
890 delete_dead_call (gsi); 1050 dse_optimize_redundant_stores (stmt);
1051
1052 enum dse_store_status store_status;
1053 m_byte_tracking_enabled
1054 = setup_live_bytes_from_ref (&ref, m_live_bytes);
1055 store_status = dse_classify_store (&ref, stmt,
1056 m_byte_tracking_enabled,
1057 m_live_bytes);
1058 if (store_status == DSE_STORE_LIVE)
891 return; 1059 return;
892 } 1060
893 1061 if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
894 default: 1062 {
1063 maybe_trim_memstar_call (&ref, m_live_bytes, stmt);
1064 return;
1065 }
1066
1067 if (store_status == DSE_STORE_DEAD)
1068 delete_dead_or_redundant_call (gsi, "dead");
895 return; 1069 return;
1070 }
1071
1072 case BUILT_IN_CALLOC:
1073 /* We already know the arguments are integer constants. */
1074 dse_optimize_redundant_stores (stmt);
1075 return;
1076
1077 default:
1078 return;
896 } 1079 }
897 } 1080 }
898 1081
899 if (is_gimple_assign (stmt)) 1082 if (is_gimple_assign (stmt))
900 { 1083 {
901 bool by_clobber_p = false; 1084 bool by_clobber_p = false;
1085
1086 /* Check if this statement stores zero to a memory location,
1087 and if there is a subsequent store of zero to the same
1088 memory location. If so, remove the subsequent store. */
1089 if (gimple_assign_single_p (stmt)
1090 && initializer_zerop (gimple_assign_rhs1 (stmt)))
1091 dse_optimize_redundant_stores (stmt);
902 1092
903 /* Self-assignments are zombies. */ 1093 /* Self-assignments are zombies. */
904 if (operand_equal_p (gimple_assign_rhs1 (stmt), 1094 if (operand_equal_p (gimple_assign_rhs1 (stmt),
905 gimple_assign_lhs (stmt), 0)) 1095 gimple_assign_lhs (stmt), 0))
906 ; 1096 ;
928 another clobber stmt. */ 1118 another clobber stmt. */
929 if (gimple_clobber_p (stmt) 1119 if (gimple_clobber_p (stmt)
930 && !by_clobber_p) 1120 && !by_clobber_p)
931 return; 1121 return;
932 1122
933 delete_dead_assignment (gsi); 1123 delete_dead_or_redundant_assignment (gsi, "dead", need_eh_cleanup);
934 } 1124 }
935 } 1125 }
936 1126
937 edge 1127 edge
938 dse_dom_walker::before_dom_children (basic_block bb) 1128 dse_dom_walker::before_dom_children (basic_block bb)
982 unsigned int 1172 unsigned int
983 pass_dse::execute (function *fun) 1173 pass_dse::execute (function *fun)
984 { 1174 {
985 need_eh_cleanup = BITMAP_ALLOC (NULL); 1175 need_eh_cleanup = BITMAP_ALLOC (NULL);
986 1176
987 renumber_gimple_stmt_uids (); 1177 renumber_gimple_stmt_uids (cfun);
988 1178
989 /* We might consider making this a property of each pass so that it 1179 /* We might consider making this a property of each pass so that it
990 can be [re]computed on an as-needed basis. Particularly since 1180 can be [re]computed on an as-needed basis. Particularly since
991 this pass could be seen as an extension of DCE which needs post 1181 this pass could be seen as an extension of DCE which needs post
992 dominators. */ 1182 dominators. */