Mercurial > hg > CbC > CbC_gcc
comparison gcc/tree-ssa-alias.c @ 145:1830386684a0
gcc-9.2.0
author | anatofuz |
---|---|
date | Thu, 13 Feb 2020 11:34:05 +0900 |
parents | 84e7813d76e9 |
children |
comparison
equal
deleted
inserted
replaced
131:84e7813d76e9 | 145:1830386684a0 |
---|---|
1 /* Alias analysis for trees. | 1 /* Alias analysis for trees. |
2 Copyright (C) 2004-2018 Free Software Foundation, Inc. | 2 Copyright (C) 2004-2020 Free Software Foundation, Inc. |
3 Contributed by Diego Novillo <dnovillo@redhat.com> | 3 Contributed by Diego Novillo <dnovillo@redhat.com> |
4 | 4 |
5 This file is part of GCC. | 5 This file is part of GCC. |
6 | 6 |
7 GCC is free software; you can redistribute it and/or modify | 7 GCC is free software; you can redistribute it and/or modify |
85 | 85 |
86 More low-level disambiguators are available and documented in | 86 More low-level disambiguators are available and documented in |
87 this file. Low-level disambiguators dealing with points-to | 87 this file. Low-level disambiguators dealing with points-to |
88 information are in tree-ssa-structalias.c. */ | 88 information are in tree-ssa-structalias.c. */ |
89 | 89 |
90 static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool); | |
91 static bool nonoverlapping_component_refs_p (const_tree, const_tree); | |
90 | 92 |
91 /* Query statistics for the different low-level disambiguators. | 93 /* Query statistics for the different low-level disambiguators. |
92 A high-level query may trigger multiple of them. */ | 94 A high-level query may trigger multiple of them. */ |
93 | 95 |
94 static struct { | 96 static struct { |
96 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias; | 98 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias; |
97 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias; | 99 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias; |
98 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias; | 100 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias; |
99 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias; | 101 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias; |
100 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias; | 102 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias; |
103 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias; | |
104 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias; | |
105 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias; | |
106 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias; | |
107 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias; | |
108 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap; | |
109 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias; | |
101 } alias_stats; | 110 } alias_stats; |
102 | 111 |
103 void | 112 void |
104 dump_alias_stats (FILE *s) | 113 dump_alias_stats (FILE *s) |
105 { | 114 { |
120 HOST_WIDE_INT_PRINT_DEC" disambiguations, " | 129 HOST_WIDE_INT_PRINT_DEC" disambiguations, " |
121 HOST_WIDE_INT_PRINT_DEC" queries\n", | 130 HOST_WIDE_INT_PRINT_DEC" queries\n", |
122 alias_stats.call_may_clobber_ref_p_no_alias, | 131 alias_stats.call_may_clobber_ref_p_no_alias, |
123 alias_stats.call_may_clobber_ref_p_no_alias | 132 alias_stats.call_may_clobber_ref_p_no_alias |
124 + alias_stats.call_may_clobber_ref_p_may_alias); | 133 + alias_stats.call_may_clobber_ref_p_may_alias); |
134 fprintf (s, " nonoverlapping_component_refs_p: " | |
135 HOST_WIDE_INT_PRINT_DEC" disambiguations, " | |
136 HOST_WIDE_INT_PRINT_DEC" queries\n", | |
137 alias_stats.nonoverlapping_component_refs_p_no_alias, | |
138 alias_stats.nonoverlapping_component_refs_p_no_alias | |
139 + alias_stats.nonoverlapping_component_refs_p_may_alias); | |
140 fprintf (s, " nonoverlapping_refs_since_match_p: " | |
141 HOST_WIDE_INT_PRINT_DEC" disambiguations, " | |
142 HOST_WIDE_INT_PRINT_DEC" must overlaps, " | |
143 HOST_WIDE_INT_PRINT_DEC" queries\n", | |
144 alias_stats.nonoverlapping_refs_since_match_p_no_alias, | |
145 alias_stats.nonoverlapping_refs_since_match_p_must_overlap, | |
146 alias_stats.nonoverlapping_refs_since_match_p_no_alias | |
147 + alias_stats.nonoverlapping_refs_since_match_p_may_alias | |
148 + alias_stats.nonoverlapping_refs_since_match_p_must_overlap); | |
149 fprintf (s, " aliasing_component_refs_p: " | |
150 HOST_WIDE_INT_PRINT_DEC" disambiguations, " | |
151 HOST_WIDE_INT_PRINT_DEC" queries\n", | |
152 alias_stats.aliasing_component_refs_p_no_alias, | |
153 alias_stats.aliasing_component_refs_p_no_alias | |
154 + alias_stats.aliasing_component_refs_p_may_alias); | |
125 dump_alias_stats_in_alias_c (s); | 155 dump_alias_stats_in_alias_c (s); |
126 } | 156 } |
127 | 157 |
128 | 158 |
129 /* Return true, if dereferencing PTR may alias with a global variable. */ | 159 /* Return true, if dereferencing PTR may alias with a global variable. */ |
200 return false; | 230 return false; |
201 else | 231 else |
202 return true; | 232 return true; |
203 } | 233 } |
204 | 234 |
205 /* Non-aliased variables can not be pointed to. */ | 235 /* Non-aliased variables cannot be pointed to. */ |
206 if (!may_be_aliased (decl)) | 236 if (!may_be_aliased (decl)) |
207 return false; | 237 return false; |
208 | 238 |
209 /* If we do not have useful points-to information for this pointer | 239 /* If we do not have useful points-to information for this pointer |
210 we cannot disambiguate anything else. */ | 240 we cannot disambiguate anything else. */ |
708 ref->base = get_base_address (TREE_OPERAND (ptr, 0)); | 738 ref->base = get_base_address (TREE_OPERAND (ptr, 0)); |
709 } | 739 } |
710 } | 740 } |
711 else | 741 else |
712 { | 742 { |
743 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr))); | |
713 ref->base = build2 (MEM_REF, char_type_node, | 744 ref->base = build2 (MEM_REF, char_type_node, |
714 ptr, null_pointer_node); | 745 ptr, null_pointer_node); |
715 ref->offset = 0; | 746 ref->offset = 0; |
716 } | 747 } |
717 ref->offset += extra_offset; | 748 ref->offset += extra_offset; |
724 ref->ref_alias_set = 0; | 755 ref->ref_alias_set = 0; |
725 ref->base_alias_set = 0; | 756 ref->base_alias_set = 0; |
726 ref->volatile_p = false; | 757 ref->volatile_p = false; |
727 } | 758 } |
728 | 759 |
760 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them: | |
761 Return -1 if S1 < S2 | |
762 Return 1 if S1 > S2 | |
763 Return 0 if equal or incomparable. */ | |
764 | |
765 static int | |
766 compare_sizes (tree s1, tree s2) | |
767 { | |
768 if (!s1 || !s2) | |
769 return 0; | |
770 | |
771 poly_uint64 size1; | |
772 poly_uint64 size2; | |
773 | |
774 if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2)) | |
775 return 0; | |
776 if (known_lt (size1, size2)) | |
777 return -1; | |
778 if (known_lt (size2, size1)) | |
779 return 1; | |
780 return 0; | |
781 } | |
782 | |
783 /* Compare TYPE1 and TYPE2 by its size. | |
784 Return -1 if size of TYPE1 < size of TYPE2 | |
785 Return 1 if size of TYPE1 > size of TYPE2 | |
786 Return 0 if types are of equal sizes or we can not compare them. */ | |
787 | |
788 static int | |
789 compare_type_sizes (tree type1, tree type2) | |
790 { | |
791 /* Be conservative for arrays and vectors. We want to support partial | |
792 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */ | |
793 while (TREE_CODE (type1) == ARRAY_TYPE | |
794 || TREE_CODE (type1) == VECTOR_TYPE) | |
795 type1 = TREE_TYPE (type1); | |
796 while (TREE_CODE (type2) == ARRAY_TYPE | |
797 || TREE_CODE (type2) == VECTOR_TYPE) | |
798 type2 = TREE_TYPE (type2); | |
799 return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2)); | |
800 } | |
801 | |
729 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the | 802 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the |
730 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot | 803 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot |
731 decide. */ | 804 decide. */ |
732 | 805 |
733 static inline int | 806 static inline int |
734 same_type_for_tbaa (tree type1, tree type2) | 807 same_type_for_tbaa (tree type1, tree type2) |
735 { | 808 { |
736 type1 = TYPE_MAIN_VARIANT (type1); | 809 type1 = TYPE_MAIN_VARIANT (type1); |
737 type2 = TYPE_MAIN_VARIANT (type2); | 810 type2 = TYPE_MAIN_VARIANT (type2); |
811 | |
812 /* Handle the most common case first. */ | |
813 if (type1 == type2) | |
814 return 1; | |
738 | 815 |
739 /* If we would have to do structural comparison bail out. */ | 816 /* If we would have to do structural comparison bail out. */ |
740 if (TYPE_STRUCTURAL_EQUALITY_P (type1) | 817 if (TYPE_STRUCTURAL_EQUALITY_P (type1) |
741 || TYPE_STRUCTURAL_EQUALITY_P (type2)) | 818 || TYPE_STRUCTURAL_EQUALITY_P (type2)) |
742 return -1; | 819 return -1; |
765 | 842 |
766 /* The types are known to be not equal. */ | 843 /* The types are known to be not equal. */ |
767 return 0; | 844 return 0; |
768 } | 845 } |
769 | 846 |
847 /* Return true if TYPE is a composite type (i.e. we may apply one of handled | |
848 components on it). */ | |
849 | |
850 static bool | |
851 type_has_components_p (tree type) | |
852 { | |
853 return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type) | |
854 || TREE_CODE (type) == COMPLEX_TYPE; | |
855 } | |
856 | |
857 /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2 | |
858 respectively are either pointing to same address or are completely | |
859 disjoint. If PARTIAL_OVERLAP is true, assume that outermost arrays may | |
860 just partly overlap. | |
861 | |
862 Try to disambiguate using the access path starting from the match | |
863 and return false if there is no conflict. | |
864 | |
865 Helper for aliasing_component_refs_p. */ | |
866 | |
867 static bool | |
868 aliasing_matching_component_refs_p (tree match1, tree ref1, | |
869 poly_int64 offset1, poly_int64 max_size1, | |
870 tree match2, tree ref2, | |
871 poly_int64 offset2, poly_int64 max_size2, | |
872 bool partial_overlap) | |
873 { | |
874 poly_int64 offadj, sztmp, msztmp; | |
875 bool reverse; | |
876 | |
877 if (!partial_overlap) | |
878 { | |
879 get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse); | |
880 offset2 -= offadj; | |
881 get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse); | |
882 offset1 -= offadj; | |
883 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2)) | |
884 { | |
885 ++alias_stats.aliasing_component_refs_p_no_alias; | |
886 return false; | |
887 } | |
888 } | |
889 | |
890 int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2, | |
891 partial_overlap); | |
892 if (cmp == 1 | |
893 || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2))) | |
894 { | |
895 ++alias_stats.aliasing_component_refs_p_no_alias; | |
896 return false; | |
897 } | |
898 ++alias_stats.aliasing_component_refs_p_may_alias; | |
899 return true; | |
900 } | |
901 | |
902 /* Return true if REF is reference to zero sized trailing array. I.e. | |
903 struct foo {int bar; int array[0];} *fooptr; | |
904 fooptr->array. */ | |
905 | |
906 static bool | |
907 component_ref_to_zero_sized_trailing_array_p (tree ref) | |
908 { | |
909 return (TREE_CODE (ref) == COMPONENT_REF | |
910 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE | |
911 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1))) | |
912 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1))))) | |
913 && array_at_struct_end_p (ref)); | |
914 } | |
915 | |
916 /* Worker for aliasing_component_refs_p. Most parameters match parameters of | |
917 aliasing_component_refs_p. | |
918 | |
919 Walk access path REF2 and try to find type matching TYPE1 | |
920 (which is a start of possibly aliasing access path REF1). | |
921 If match is found, try to disambiguate. | |
922 | |
923 Return 0 for sucessful disambiguation. | |
924 Return 1 if match was found but disambiguation failed | |
925 Return -1 if there is no match. | |
926 In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1 | |
927 in access patch REF2 and -1 if we are not sure. */ | |
928 | |
929 static int | |
930 aliasing_component_refs_walk (tree ref1, tree type1, tree base1, | |
931 poly_int64 offset1, poly_int64 max_size1, | |
932 tree end_struct_ref1, | |
933 tree ref2, tree base2, | |
934 poly_int64 offset2, poly_int64 max_size2, | |
935 bool *maybe_match) | |
936 { | |
937 tree ref = ref2; | |
938 int same_p = 0; | |
939 | |
940 while (true) | |
941 { | |
942 /* We walk from inner type to the outer types. If type we see is | |
943 already too large to be part of type1, terminate the search. */ | |
944 int cmp = compare_type_sizes (type1, TREE_TYPE (ref)); | |
945 | |
946 if (cmp < 0 | |
947 && (!end_struct_ref1 | |
948 || compare_type_sizes (TREE_TYPE (end_struct_ref1), | |
949 TREE_TYPE (ref)) < 0)) | |
950 break; | |
951 /* If types may be of same size, see if we can decide about their | |
952 equality. */ | |
953 if (cmp == 0) | |
954 { | |
955 same_p = same_type_for_tbaa (TREE_TYPE (ref), type1); | |
956 if (same_p == 1) | |
957 break; | |
958 /* In case we can't decide whether types are same try to | |
959 continue looking for the exact match. | |
960 Remember however that we possibly saw a match | |
961 to bypass the access path continuations tests we do later. */ | |
962 if (same_p == -1) | |
963 *maybe_match = true; | |
964 } | |
965 if (!handled_component_p (ref)) | |
966 break; | |
967 ref = TREE_OPERAND (ref, 0); | |
968 } | |
969 if (same_p == 1) | |
970 { | |
971 bool partial_overlap = false; | |
972 | |
973 /* We assume that arrays can overlap by multiple of their elements | |
974 size as tested in gcc.dg/torture/alias-2.c. | |
975 This partial overlap happen only when both arrays are bases of | |
976 the access and not contained within another component ref. | |
977 To be safe we also assume partial overlap for VLAs. */ | |
978 if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE | |
979 && (!TYPE_SIZE (TREE_TYPE (base1)) | |
980 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST | |
981 || ref == base2)) | |
982 { | |
983 /* Setting maybe_match to true triggers | |
984 nonoverlapping_component_refs_p test later that still may do | |
985 useful disambiguation. */ | |
986 *maybe_match = true; | |
987 partial_overlap = true; | |
988 } | |
989 return aliasing_matching_component_refs_p (base1, ref1, | |
990 offset1, max_size1, | |
991 ref, ref2, | |
992 offset2, max_size2, | |
993 partial_overlap); | |
994 } | |
995 return -1; | |
996 } | |
997 | |
770 /* Determine if the two component references REF1 and REF2 which are | 998 /* Determine if the two component references REF1 and REF2 which are |
771 based on access types TYPE1 and TYPE2 and of which at least one is based | 999 based on access types TYPE1 and TYPE2 and of which at least one is based |
772 on an indirect reference may alias. REF2 is the only one that can | 1000 on an indirect reference may alias. |
773 be a decl in which case REF2_IS_DECL is true. | |
774 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET | 1001 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET |
775 are the respective alias sets. */ | 1002 are the respective alias sets. */ |
776 | 1003 |
777 static bool | 1004 static bool |
778 aliasing_component_refs_p (tree ref1, | 1005 aliasing_component_refs_p (tree ref1, |
780 alias_set_type base1_alias_set, | 1007 alias_set_type base1_alias_set, |
781 poly_int64 offset1, poly_int64 max_size1, | 1008 poly_int64 offset1, poly_int64 max_size1, |
782 tree ref2, | 1009 tree ref2, |
783 alias_set_type ref2_alias_set, | 1010 alias_set_type ref2_alias_set, |
784 alias_set_type base2_alias_set, | 1011 alias_set_type base2_alias_set, |
785 poly_int64 offset2, poly_int64 max_size2, | 1012 poly_int64 offset2, poly_int64 max_size2) |
786 bool ref2_is_decl) | |
787 { | 1013 { |
788 /* If one reference is a component references through pointers try to find a | 1014 /* If one reference is a component references through pointers try to find a |
789 common base and apply offset based disambiguation. This handles | 1015 common base and apply offset based disambiguation. This handles |
790 for example | 1016 for example |
791 struct A { int i; int j; } *q; | 1017 struct A { int i; int j; } *q; |
792 struct B { struct A a; int k; } *p; | 1018 struct B { struct A a; int k; } *p; |
793 disambiguating q->i and p->a.j. */ | 1019 disambiguating q->i and p->a.j. */ |
794 tree base1, base2; | 1020 tree base1, base2; |
795 tree type1, type2; | 1021 tree type1, type2; |
796 tree *refp; | 1022 bool maybe_match = false; |
797 int same_p; | 1023 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL; |
798 | 1024 |
799 /* Choose bases and base types to search for. */ | 1025 /* Choose bases and base types to search for. */ |
800 base1 = ref1; | 1026 base1 = ref1; |
801 while (handled_component_p (base1)) | 1027 while (handled_component_p (base1)) |
802 base1 = TREE_OPERAND (base1, 0); | 1028 { |
1029 /* Generally access paths are monotous in the size of object. The | |
1030 exception are trailing arrays of structures. I.e. | |
1031 struct a {int array[0];}; | |
1032 or | |
1033 struct a {int array1[0]; int array[];}; | |
1034 Such struct has size 0 but accesses to a.array may have non-zero size. | |
1035 In this case the size of TREE_TYPE (base1) is smaller than | |
1036 size of TREE_TYPE (TREE_OPERNAD (base1, 0)). | |
1037 | |
1038 Because we compare sizes of arrays just by sizes of their elements, | |
1039 we only need to care about zero sized array fields here. */ | |
1040 if (component_ref_to_zero_sized_trailing_array_p (base1)) | |
1041 { | |
1042 gcc_checking_assert (!end_struct_ref1); | |
1043 end_struct_ref1 = base1; | |
1044 } | |
1045 if (TREE_CODE (base1) == VIEW_CONVERT_EXPR | |
1046 || TREE_CODE (base1) == BIT_FIELD_REF) | |
1047 ref1 = TREE_OPERAND (base1, 0); | |
1048 base1 = TREE_OPERAND (base1, 0); | |
1049 } | |
803 type1 = TREE_TYPE (base1); | 1050 type1 = TREE_TYPE (base1); |
804 base2 = ref2; | 1051 base2 = ref2; |
805 while (handled_component_p (base2)) | 1052 while (handled_component_p (base2)) |
806 base2 = TREE_OPERAND (base2, 0); | 1053 { |
1054 if (component_ref_to_zero_sized_trailing_array_p (base2)) | |
1055 { | |
1056 gcc_checking_assert (!end_struct_ref2); | |
1057 end_struct_ref2 = base2; | |
1058 } | |
1059 if (TREE_CODE (base2) == VIEW_CONVERT_EXPR | |
1060 || TREE_CODE (base2) == BIT_FIELD_REF) | |
1061 ref2 = TREE_OPERAND (base2, 0); | |
1062 base2 = TREE_OPERAND (base2, 0); | |
1063 } | |
807 type2 = TREE_TYPE (base2); | 1064 type2 = TREE_TYPE (base2); |
808 | 1065 |
809 /* Now search for the type1 in the access path of ref2. This | 1066 /* Now search for the type1 in the access path of ref2. This |
810 would be a common base for doing offset based disambiguation on. */ | 1067 would be a common base for doing offset based disambiguation on. |
811 refp = &ref2; | 1068 This however only makes sense if type2 is big enough to hold type1. */ |
812 while (handled_component_p (*refp) | 1069 int cmp_outer = compare_type_sizes (type2, type1); |
813 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0) | 1070 |
814 refp = &TREE_OPERAND (*refp, 0); | 1071 /* If type2 is big enough to contain type1 walk its access path. |
815 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1); | 1072 We also need to care of arrays at the end of structs that may extend |
816 /* If we couldn't compare types we have to bail out. */ | 1073 beyond the end of structure. */ |
817 if (same_p == -1) | 1074 if (cmp_outer >= 0 |
818 return true; | 1075 || (end_struct_ref2 |
819 else if (same_p == 1) | 1076 && compare_type_sizes (TREE_TYPE (end_struct_ref2), type1) >= 0)) |
820 { | 1077 { |
821 poly_int64 offadj, sztmp, msztmp; | 1078 int res = aliasing_component_refs_walk (ref1, type1, base1, |
822 bool reverse; | 1079 offset1, max_size1, |
823 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse); | 1080 end_struct_ref1, |
824 offset2 -= offadj; | 1081 ref2, base2, offset2, max_size2, |
825 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp, &reverse); | 1082 &maybe_match); |
826 offset1 -= offadj; | 1083 if (res != -1) |
827 return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2); | 1084 return res; |
828 } | 1085 } |
1086 | |
829 /* If we didn't find a common base, try the other way around. */ | 1087 /* If we didn't find a common base, try the other way around. */ |
830 refp = &ref1; | 1088 if (cmp_outer <= 0 |
831 while (handled_component_p (*refp) | 1089 || (end_struct_ref1 |
832 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0) | 1090 && compare_type_sizes (TREE_TYPE (end_struct_ref1), type1) <= 0)) |
833 refp = &TREE_OPERAND (*refp, 0); | 1091 { |
834 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type2); | 1092 int res = aliasing_component_refs_walk (ref2, type2, base2, |
835 /* If we couldn't compare types we have to bail out. */ | 1093 offset2, max_size2, |
836 if (same_p == -1) | 1094 end_struct_ref2, |
837 return true; | 1095 ref1, base1, offset1, max_size1, |
838 else if (same_p == 1) | 1096 &maybe_match); |
839 { | 1097 if (res != -1) |
840 poly_int64 offadj, sztmp, msztmp; | 1098 return res; |
841 bool reverse; | 1099 } |
842 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse); | 1100 |
843 offset1 -= offadj; | 1101 /* In the following code we make an assumption that the types in access |
844 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp, &reverse); | 1102 paths do not overlap and thus accesses alias only if one path can be |
845 offset2 -= offadj; | 1103 continuation of another. If we was not able to decide about equivalence, |
846 return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2); | 1104 we need to give up. */ |
1105 if (maybe_match) | |
1106 { | |
1107 if (!nonoverlapping_component_refs_p (ref1, ref2)) | |
1108 { | |
1109 ++alias_stats.aliasing_component_refs_p_may_alias; | |
1110 return true; | |
1111 } | |
1112 ++alias_stats.aliasing_component_refs_p_no_alias; | |
1113 return false; | |
847 } | 1114 } |
848 | 1115 |
849 /* If we have two type access paths B1.path1 and B2.path2 they may | 1116 /* If we have two type access paths B1.path1 and B2.path2 they may |
850 only alias if either B1 is in B2.path2 or B2 is in B1.path1. | 1117 only alias if either B1 is in B2.path2 or B2 is in B1.path1. |
851 But we can still have a path that goes B1.path1...B2.path2 with | 1118 But we can still have a path that goes B1.path1...B2.path2 with |
852 a part that we do not see. So we can only disambiguate now | 1119 a part that we do not see. So we can only disambiguate now |
853 if there is no B2 in the tail of path1 and no B1 on the | 1120 if there is no B2 in the tail of path1 and no B1 on the |
854 tail of path2. */ | 1121 tail of path2. */ |
855 if (base1_alias_set == ref2_alias_set | 1122 if (compare_type_sizes (TREE_TYPE (ref2), type1) >= 0 |
856 || alias_set_subset_of (base1_alias_set, ref2_alias_set)) | 1123 && (!end_struct_ref1 |
857 return true; | 1124 || compare_type_sizes (TREE_TYPE (ref2), |
1125 TREE_TYPE (end_struct_ref1)) >= 0) | |
1126 && type_has_components_p (TREE_TYPE (ref2)) | |
1127 && (base1_alias_set == ref2_alias_set | |
1128 || alias_set_subset_of (base1_alias_set, ref2_alias_set))) | |
1129 { | |
1130 ++alias_stats.aliasing_component_refs_p_may_alias; | |
1131 return true; | |
1132 } | |
858 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */ | 1133 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */ |
859 if (!ref2_is_decl) | 1134 if (compare_type_sizes (TREE_TYPE (ref1), type2) >= 0 |
860 return (base2_alias_set == ref1_alias_set | 1135 && (!end_struct_ref2 |
861 || alias_set_subset_of (base2_alias_set, ref1_alias_set)); | 1136 || compare_type_sizes (TREE_TYPE (ref1), |
1137 TREE_TYPE (end_struct_ref2)) >= 0) | |
1138 && type_has_components_p (TREE_TYPE (ref1)) | |
1139 && (base2_alias_set == ref1_alias_set | |
1140 || alias_set_subset_of (base2_alias_set, ref1_alias_set))) | |
1141 { | |
1142 ++alias_stats.aliasing_component_refs_p_may_alias; | |
1143 return true; | |
1144 } | |
1145 ++alias_stats.aliasing_component_refs_p_no_alias; | |
862 return false; | 1146 return false; |
863 } | 1147 } |
864 | 1148 |
865 /* Return true if we can determine that component references REF1 and REF2, | 1149 /* FIELD1 and FIELD2 are two fields of component refs. We assume |
866 that are within a common DECL, cannot overlap. */ | 1150 that bases of both component refs are either equivalent or nonoverlapping. |
867 | 1151 We do not assume that the containers of FIELD1 and FIELD2 are of the |
868 static bool | 1152 same type or size. |
869 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2) | 1153 |
870 { | 1154 Return 0 in case the base address of component_refs are same then |
1155 FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2 | |
1156 may not be of same type or size. | |
1157 | |
1158 Return 1 if FIELD1 and FIELD2 are non-overlapping. | |
1159 | |
1160 Return -1 otherwise. | |
1161 | |
1162 Main difference between 0 and -1 is to let | |
1163 nonoverlapping_component_refs_since_match_p discover the semantically | |
1164 equivalent part of the access path. | |
1165 | |
1166 Note that this function is used even with -fno-strict-aliasing | |
1167 and makes use of no TBAA assumptions. */ | |
1168 | |
1169 static int | |
1170 nonoverlapping_component_refs_p_1 (const_tree field1, const_tree field2) | |
1171 { | |
1172 /* If both fields are of the same type, we could save hard work of | |
1173 comparing offsets. */ | |
1174 tree type1 = DECL_CONTEXT (field1); | |
1175 tree type2 = DECL_CONTEXT (field2); | |
1176 | |
1177 if (TREE_CODE (type1) == RECORD_TYPE | |
1178 && DECL_BIT_FIELD_REPRESENTATIVE (field1)) | |
1179 field1 = DECL_BIT_FIELD_REPRESENTATIVE (field1); | |
1180 if (TREE_CODE (type2) == RECORD_TYPE | |
1181 && DECL_BIT_FIELD_REPRESENTATIVE (field2)) | |
1182 field2 = DECL_BIT_FIELD_REPRESENTATIVE (field2); | |
1183 | |
1184 /* ??? Bitfields can overlap at RTL level so punt on them. | |
1185 FIXME: RTL expansion should be fixed by adjusting the access path | |
1186 when producing MEM_ATTRs for MEMs which are wider than | |
1187 the bitfields similarly as done in set_mem_attrs_minus_bitpos. */ | |
1188 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2)) | |
1189 return -1; | |
1190 | |
1191 /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE. */ | |
1192 if (type1 == type2 && TREE_CODE (type1) == RECORD_TYPE) | |
1193 return field1 != field2; | |
1194 | |
1195 /* In common case the offsets and bit offsets will be the same. | |
1196 However if frontends do not agree on the alignment, they may be | |
1197 different even if they actually represent same address. | |
1198 Try the common case first and if that fails calcualte the | |
1199 actual bit offset. */ | |
1200 if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1), | |
1201 DECL_FIELD_OFFSET (field2)) | |
1202 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1), | |
1203 DECL_FIELD_BIT_OFFSET (field2))) | |
1204 return 0; | |
1205 | |
1206 /* Note that it may be possible to use component_ref_field_offset | |
1207 which would provide offsets as trees. However constructing and folding | |
1208 trees is expensive and does not seem to be worth the compile time | |
1209 cost. */ | |
1210 | |
1211 poly_uint64 offset1, offset2; | |
1212 poly_uint64 bit_offset1, bit_offset2; | |
1213 | |
1214 if (poly_int_tree_p (DECL_FIELD_OFFSET (field1), &offset1) | |
1215 && poly_int_tree_p (DECL_FIELD_OFFSET (field2), &offset2) | |
1216 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1), &bit_offset1) | |
1217 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2), &bit_offset2)) | |
1218 { | |
1219 offset1 = (offset1 << LOG2_BITS_PER_UNIT) + bit_offset1; | |
1220 offset2 = (offset2 << LOG2_BITS_PER_UNIT) + bit_offset2; | |
1221 | |
1222 if (known_eq (offset1, offset2)) | |
1223 return 0; | |
1224 | |
1225 poly_uint64 size1, size2; | |
1226 | |
1227 if (poly_int_tree_p (DECL_SIZE (field1), &size1) | |
1228 && poly_int_tree_p (DECL_SIZE (field2), &size2) | |
1229 && !ranges_maybe_overlap_p (offset1, size1, offset2, size2)) | |
1230 return 1; | |
1231 } | |
1232 /* Resort to slower overlap checking by looking for matching types in | |
1233 the middle of access path. */ | |
1234 return -1; | |
1235 } | |
1236 | |
1237 /* Return low bound of array. Do not produce new trees | |
1238 and thus do not care about particular type of integer constant | |
1239 and placeholder exprs. */ | |
1240 | |
1241 static tree | |
1242 cheap_array_ref_low_bound (tree ref) | |
1243 { | |
1244 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0))); | |
1245 | |
1246 /* Avoid expensive array_ref_low_bound. | |
1247 low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain | |
1248 type or it is zero. */ | |
1249 if (TREE_OPERAND (ref, 2)) | |
1250 return TREE_OPERAND (ref, 2); | |
1251 else if (domain_type && TYPE_MIN_VALUE (domain_type)) | |
1252 return TYPE_MIN_VALUE (domain_type); | |
1253 else | |
1254 return integer_zero_node; | |
1255 } | |
1256 | |
1257 /* REF1 and REF2 are ARRAY_REFs with either same base address or which are | |
1258 completely disjoint. | |
1259 | |
1260 Return 1 if the refs are non-overlapping. | |
1261 Return 0 if they are possibly overlapping but if so the overlap again | |
1262 starts on the same address. | |
1263 Return -1 otherwise. */ | |
1264 | |
1265 int | |
1266 nonoverlapping_array_refs_p (tree ref1, tree ref2) | |
1267 { | |
1268 tree index1 = TREE_OPERAND (ref1, 1); | |
1269 tree index2 = TREE_OPERAND (ref2, 1); | |
1270 tree low_bound1 = cheap_array_ref_low_bound(ref1); | |
1271 tree low_bound2 = cheap_array_ref_low_bound(ref2); | |
1272 | |
1273 /* Handle zero offsets first: we do not need to match type size in this | |
1274 case. */ | |
1275 if (operand_equal_p (index1, low_bound1, 0) | |
1276 && operand_equal_p (index2, low_bound2, 0)) | |
1277 return 0; | |
1278 | |
1279 /* If type sizes are different, give up. | |
1280 | |
1281 Avoid expensive array_ref_element_size. | |
1282 If operand 3 is present it denotes size in the alignmnet units. | |
1283 Otherwise size is TYPE_SIZE of the element type. | |
1284 Handle only common cases where types are of the same "kind". */ | |
1285 if ((TREE_OPERAND (ref1, 3) == NULL) != (TREE_OPERAND (ref2, 3) == NULL)) | |
1286 return -1; | |
1287 | |
1288 tree elmt_type1 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1, 0))); | |
1289 tree elmt_type2 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2, 0))); | |
1290 | |
1291 if (TREE_OPERAND (ref1, 3)) | |
1292 { | |
1293 if (TYPE_ALIGN (elmt_type1) != TYPE_ALIGN (elmt_type2) | |
1294 || !operand_equal_p (TREE_OPERAND (ref1, 3), | |
1295 TREE_OPERAND (ref2, 3), 0)) | |
1296 return -1; | |
1297 } | |
1298 else | |
1299 { | |
1300 if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1), | |
1301 TYPE_SIZE_UNIT (elmt_type2), 0)) | |
1302 return -1; | |
1303 } | |
1304 | |
1305 /* Since we know that type sizes are the same, there is no need to return | |
1306 -1 after this point. Partial overlap can not be introduced. */ | |
1307 | |
1308 /* We may need to fold trees in this case. | |
1309 TODO: Handle integer constant case at least. */ | |
1310 if (!operand_equal_p (low_bound1, low_bound2, 0)) | |
1311 return 0; | |
1312 | |
1313 if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST) | |
1314 { | |
1315 if (tree_int_cst_equal (index1, index2)) | |
1316 return 0; | |
1317 return 1; | |
1318 } | |
1319 /* TODO: We can use VRP to further disambiguate here. */ | |
1320 return 0; | |
1321 } | |
1322 | |
1323 /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and | |
1324 MATCH2 either point to the same address or are disjoint. | |
1325 MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2 | |
1326 respectively or NULL in the case we established equivalence of bases. | |
1327 If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually | |
1328 overlap by exact multiply of their element size. | |
1329 | |
1330 This test works by matching the initial segment of the access path | |
1331 and does not rely on TBAA thus is safe for !flag_strict_aliasing if | |
1332 match was determined without use of TBAA oracle. | |
1333 | |
1334 Return 1 if we can determine that component references REF1 and REF2, | |
1335 that are within a common DECL, cannot overlap. | |
1336 | |
1337 Return 0 if paths are same and thus there is nothing to disambiguate more | |
1338 (i.e. there is must alias assuming there is must alias between MATCH1 and | |
1339 MATCH2) | |
1340 | |
1341 Return -1 if we can not determine 0 or 1 - this happens when we met | |
1342 non-matching types was met in the path. | |
1343 In this case it may make sense to continue by other disambiguation | |
1344 oracles. */ | |
1345 | |
1346 static int | |
1347 nonoverlapping_refs_since_match_p (tree match1, tree ref1, | |
1348 tree match2, tree ref2, | |
1349 bool partial_overlap) | |
1350 { | |
1351 /* Early return if there are no references to match, we do not need | |
1352 to walk the access paths. | |
1353 | |
1354 Do not consider this as may-alias for stats - it is more useful | |
1355 to have information how many disambiguations happened provided that | |
1356 the query was meaningful. */ | |
1357 | |
1358 if (match1 == ref1 || !handled_component_p (ref1) | |
1359 || match2 == ref2 || !handled_component_p (ref2)) | |
1360 return -1; | |
1361 | |
871 auto_vec<tree, 16> component_refs1; | 1362 auto_vec<tree, 16> component_refs1; |
872 auto_vec<tree, 16> component_refs2; | 1363 auto_vec<tree, 16> component_refs2; |
873 | 1364 |
874 /* Create the stack of handled components for REF1. */ | 1365 /* Create the stack of handled components for REF1. */ |
875 while (handled_component_p (ref1)) | 1366 while (handled_component_p (ref1) && ref1 != match1) |
876 { | 1367 { |
877 component_refs1.safe_push (ref1); | 1368 if (TREE_CODE (ref1) == VIEW_CONVERT_EXPR |
1369 || TREE_CODE (ref1) == BIT_FIELD_REF) | |
1370 component_refs1.truncate (0); | |
1371 else | |
1372 component_refs1.safe_push (ref1); | |
878 ref1 = TREE_OPERAND (ref1, 0); | 1373 ref1 = TREE_OPERAND (ref1, 0); |
879 } | 1374 } |
880 if (TREE_CODE (ref1) == MEM_REF) | |
881 { | |
882 if (!integer_zerop (TREE_OPERAND (ref1, 1))) | |
883 return false; | |
884 ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0); | |
885 } | |
886 | 1375 |
887 /* Create the stack of handled components for REF2. */ | 1376 /* Create the stack of handled components for REF2. */ |
888 while (handled_component_p (ref2)) | 1377 while (handled_component_p (ref2) && ref2 != match2) |
889 { | 1378 { |
890 component_refs2.safe_push (ref2); | 1379 if (TREE_CODE (ref2) == VIEW_CONVERT_EXPR |
1380 || TREE_CODE (ref2) == BIT_FIELD_REF) | |
1381 component_refs2.truncate (0); | |
1382 else | |
1383 component_refs2.safe_push (ref2); | |
891 ref2 = TREE_OPERAND (ref2, 0); | 1384 ref2 = TREE_OPERAND (ref2, 0); |
892 } | 1385 } |
893 if (TREE_CODE (ref2) == MEM_REF) | 1386 |
894 { | 1387 bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1; |
895 if (!integer_zerop (TREE_OPERAND (ref2, 1))) | 1388 bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2; |
896 return false; | 1389 |
897 ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0); | 1390 /* If only one of access path starts with MEM_REF check that offset is 0 |
898 } | 1391 so the addresses stays the same after stripping it. |
899 | 1392 TODO: In this case we may walk the other access path until we get same |
900 /* Bases must be either same or uncomparable. */ | 1393 offset. |
901 gcc_checking_assert (ref1 == ref2 | 1394 |
902 || (DECL_P (ref1) && DECL_P (ref2) | 1395 If both starts with MEM_REF, offset has to be same. */ |
903 && compare_base_decls (ref1, ref2) != 0)); | 1396 if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1))) |
1397 || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1))) | |
1398 || (mem_ref1 && mem_ref2 | |
1399 && !tree_int_cst_equal (TREE_OPERAND (ref1, 1), | |
1400 TREE_OPERAND (ref2, 1)))) | |
1401 { | |
1402 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias; | |
1403 return -1; | |
1404 } | |
1405 | |
1406 /* TARGET_MEM_REF are never wrapped in handled components, so we do not need | |
1407 to handle them here at all. */ | |
1408 gcc_checking_assert (TREE_CODE (ref1) != TARGET_MEM_REF | |
1409 && TREE_CODE (ref2) != TARGET_MEM_REF); | |
904 | 1410 |
905 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same | 1411 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same |
906 rank. This is sufficient because we start from the same DECL and you | 1412 rank. This is sufficient because we start from the same DECL and you |
907 cannot reference several fields at a time with COMPONENT_REFs (unlike | 1413 cannot reference several fields at a time with COMPONENT_REFs (unlike |
908 with ARRAY_RANGE_REFs for arrays) so you always need the same number | 1414 with ARRAY_RANGE_REFs for arrays) so you always need the same number |
909 of them to access a sub-component, unless you're in a union, in which | 1415 of them to access a sub-component, unless you're in a union, in which |
910 case the return value will precisely be false. */ | 1416 case the return value will precisely be false. */ |
911 while (true) | 1417 while (true) |
912 { | 1418 { |
1419 /* Track if we seen unmatched ref with non-zero offset. In this case | |
1420 we must look for partial overlaps. */ | |
1421 bool seen_unmatched_ref_p = false; | |
1422 | |
1423 /* First match ARRAY_REFs an try to disambiguate. */ | |
1424 if (!component_refs1.is_empty () | |
1425 && !component_refs2.is_empty ()) | |
1426 { | |
1427 unsigned int narray_refs1=0, narray_refs2=0; | |
1428 | |
1429 /* We generally assume that both access paths starts by same sequence | |
1430 of refs. However if number of array refs is not in sync, try | |
1431 to recover and pop elts until number match. This helps the case | |
1432 where one access path starts by array and other by element. */ | |
1433 for (narray_refs1 = 0; narray_refs1 < component_refs1.length (); | |
1434 narray_refs1++) | |
1435 if (TREE_CODE (component_refs1 [component_refs1.length() | |
1436 - 1 - narray_refs1]) != ARRAY_REF) | |
1437 break; | |
1438 | |
1439 for (narray_refs2 = 0; narray_refs2 < component_refs2.length (); | |
1440 narray_refs2++) | |
1441 if (TREE_CODE (component_refs2 [component_refs2.length() | |
1442 - 1 - narray_refs2]) != ARRAY_REF) | |
1443 break; | |
1444 for (; narray_refs1 > narray_refs2; narray_refs1--) | |
1445 { | |
1446 ref1 = component_refs1.pop (); | |
1447 | |
1448 /* If index is non-zero we need to check whether the reference | |
1449 does not break the main invariant that bases are either | |
1450 disjoint or equal. Consider the example: | |
1451 | |
1452 unsigned char out[][1]; | |
1453 out[1]="a"; | |
1454 out[i][0]; | |
1455 | |
1456 Here bases out and out are same, but after removing the | |
1457 [i] index, this invariant no longer holds, because | |
1458 out[i] points to the middle of array out. | |
1459 | |
1460 TODO: If size of type of the skipped reference is an integer | |
1461 multiply of the size of type of the other reference this | |
1462 invariant can be verified, but even then it is not completely | |
1463 safe with !flag_strict_aliasing if the other reference contains | |
1464 unbounded array accesses. | |
1465 See */ | |
1466 | |
1467 if (!operand_equal_p (TREE_OPERAND (ref1, 1), | |
1468 cheap_array_ref_low_bound (ref1), 0)) | |
1469 return 0; | |
1470 } | |
1471 for (; narray_refs2 > narray_refs1; narray_refs2--) | |
1472 { | |
1473 ref2 = component_refs2.pop (); | |
1474 if (!operand_equal_p (TREE_OPERAND (ref2, 1), | |
1475 cheap_array_ref_low_bound (ref2), 0)) | |
1476 return 0; | |
1477 } | |
1478 /* Try to disambiguate matched arrays. */ | |
1479 for (unsigned int i = 0; i < narray_refs1; i++) | |
1480 { | |
1481 int cmp = nonoverlapping_array_refs_p (component_refs1.pop (), | |
1482 component_refs2.pop ()); | |
1483 if (cmp == 1 && !partial_overlap) | |
1484 { | |
1485 ++alias_stats | |
1486 .nonoverlapping_refs_since_match_p_no_alias; | |
1487 return 1; | |
1488 } | |
1489 partial_overlap = false; | |
1490 if (cmp == -1) | |
1491 seen_unmatched_ref_p = true; | |
1492 } | |
1493 } | |
1494 | |
1495 /* Next look for component_refs. */ | |
913 do | 1496 do |
914 { | 1497 { |
915 if (component_refs1.is_empty ()) | 1498 if (component_refs1.is_empty ()) |
916 return false; | 1499 { |
1500 ++alias_stats | |
1501 .nonoverlapping_refs_since_match_p_must_overlap; | |
1502 return 0; | |
1503 } | |
917 ref1 = component_refs1.pop (); | 1504 ref1 = component_refs1.pop (); |
1505 if (TREE_CODE (ref1) != COMPONENT_REF) | |
1506 seen_unmatched_ref_p = true; | |
918 } | 1507 } |
919 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0)))); | 1508 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0)))); |
920 | 1509 |
921 do | 1510 do |
922 { | 1511 { |
923 if (component_refs2.is_empty ()) | 1512 if (component_refs2.is_empty ()) |
924 return false; | 1513 { |
1514 ++alias_stats | |
1515 .nonoverlapping_refs_since_match_p_must_overlap; | |
1516 return 0; | |
1517 } | |
925 ref2 = component_refs2.pop (); | 1518 ref2 = component_refs2.pop (); |
1519 if (TREE_CODE (ref2) != COMPONENT_REF) | |
1520 seen_unmatched_ref_p = true; | |
926 } | 1521 } |
927 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0)))); | 1522 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0)))); |
928 | 1523 |
929 /* Beware of BIT_FIELD_REF. */ | 1524 /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors |
930 if (TREE_CODE (ref1) != COMPONENT_REF | 1525 earlier. */ |
931 || TREE_CODE (ref2) != COMPONENT_REF) | 1526 gcc_checking_assert (TREE_CODE (ref1) == COMPONENT_REF |
932 return false; | 1527 && TREE_CODE (ref2) == COMPONENT_REF); |
933 | 1528 |
934 tree field1 = TREE_OPERAND (ref1, 1); | 1529 tree field1 = TREE_OPERAND (ref1, 1); |
935 tree field2 = TREE_OPERAND (ref2, 1); | 1530 tree field2 = TREE_OPERAND (ref2, 1); |
936 | 1531 |
937 /* ??? We cannot simply use the type of operand #0 of the refs here | 1532 /* ??? We cannot simply use the type of operand #0 of the refs here |
938 as the Fortran compiler smuggles type punning into COMPONENT_REFs | 1533 as the Fortran compiler smuggles type punning into COMPONENT_REFs |
939 for common blocks instead of using unions like everyone else. */ | 1534 for common blocks instead of using unions like everyone else. */ |
940 tree type1 = DECL_CONTEXT (field1); | 1535 tree type1 = DECL_CONTEXT (field1); |
941 tree type2 = DECL_CONTEXT (field2); | 1536 tree type2 = DECL_CONTEXT (field2); |
942 | 1537 |
943 /* We cannot disambiguate fields in a union or qualified union. */ | 1538 partial_overlap = false; |
944 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE) | 1539 |
945 return false; | 1540 /* If we skipped array refs on type of different sizes, we can |
946 | 1541 no longer be sure that there are not partial overlaps. */ |
947 if (field1 != field2) | 1542 if (seen_unmatched_ref_p |
948 { | 1543 && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), 0)) |
949 /* A field and its representative need to be considered the | 1544 { |
950 same. */ | 1545 ++alias_stats |
951 if (DECL_BIT_FIELD_REPRESENTATIVE (field1) == field2 | 1546 .nonoverlapping_refs_since_match_p_may_alias; |
952 || DECL_BIT_FIELD_REPRESENTATIVE (field2) == field1) | 1547 return -1; |
953 return false; | 1548 } |
954 /* Different fields of the same record type cannot overlap. | 1549 |
955 ??? Bitfields can overlap at RTL level so punt on them. */ | 1550 int cmp = nonoverlapping_component_refs_p_1 (field1, field2); |
956 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2)) | 1551 if (cmp == -1) |
957 return false; | 1552 { |
958 return true; | 1553 ++alias_stats |
959 } | 1554 .nonoverlapping_refs_since_match_p_may_alias; |
960 } | 1555 return -1; |
961 | 1556 } |
962 return false; | 1557 else if (cmp == 1) |
1558 { | |
1559 ++alias_stats | |
1560 .nonoverlapping_refs_since_match_p_no_alias; | |
1561 return 1; | |
1562 } | |
1563 } | |
1564 | |
1565 ++alias_stats.nonoverlapping_refs_since_match_p_must_overlap; | |
1566 return 0; | |
1567 } | |
1568 | |
1569 /* Return TYPE_UID which can be used to match record types we consider | |
1570 same for TBAA purposes. */ | |
1571 | |
1572 static inline int | |
1573 ncr_type_uid (const_tree field) | |
1574 { | |
1575 /* ??? We cannot simply use the type of operand #0 of the refs here | |
1576 as the Fortran compiler smuggles type punning into COMPONENT_REFs | |
1577 for common blocks instead of using unions like everyone else. */ | |
1578 tree type = DECL_FIELD_CONTEXT (field); | |
1579 /* With LTO types considered same_type_for_tbaa_p | |
1580 from different translation unit may not have same | |
1581 main variant. They however have same TYPE_CANONICAL. */ | |
1582 if (TYPE_CANONICAL (type)) | |
1583 return TYPE_UID (TYPE_CANONICAL (type)); | |
1584 return TYPE_UID (type); | |
963 } | 1585 } |
964 | 1586 |
965 /* qsort compare function to sort FIELD_DECLs after their | 1587 /* qsort compare function to sort FIELD_DECLs after their |
966 DECL_FIELD_CONTEXT TYPE_UID. */ | 1588 DECL_FIELD_CONTEXT TYPE_UID. */ |
967 | 1589 |
968 static inline int | 1590 static inline int |
969 ncr_compar (const void *field1_, const void *field2_) | 1591 ncr_compar (const void *field1_, const void *field2_) |
970 { | 1592 { |
971 const_tree field1 = *(const_tree *) const_cast <void *>(field1_); | 1593 const_tree field1 = *(const_tree *) const_cast <void *>(field1_); |
972 const_tree field2 = *(const_tree *) const_cast <void *>(field2_); | 1594 const_tree field2 = *(const_tree *) const_cast <void *>(field2_); |
973 unsigned int uid1 = TYPE_UID (DECL_FIELD_CONTEXT (field1)); | 1595 unsigned int uid1 = ncr_type_uid (field1); |
974 unsigned int uid2 = TYPE_UID (DECL_FIELD_CONTEXT (field2)); | 1596 unsigned int uid2 = ncr_type_uid (field2); |
1597 | |
975 if (uid1 < uid2) | 1598 if (uid1 < uid2) |
976 return -1; | 1599 return -1; |
977 else if (uid1 > uid2) | 1600 else if (uid1 > uid2) |
978 return 1; | 1601 return 1; |
979 return 0; | 1602 return 0; |
980 } | 1603 } |
981 | 1604 |
982 /* Return true if we can determine that the fields referenced cannot | 1605 /* Return true if we can determine that the fields referenced cannot |
983 overlap for any pair of objects. */ | 1606 overlap for any pair of objects. This relies on TBAA. */ |
984 | 1607 |
985 static bool | 1608 static bool |
986 nonoverlapping_component_refs_p (const_tree x, const_tree y) | 1609 nonoverlapping_component_refs_p (const_tree x, const_tree y) |
987 { | 1610 { |
1611 /* Early return if we have nothing to do. | |
1612 | |
1613 Do not consider this as may-alias for stats - it is more useful | |
1614 to have information how many disambiguations happened provided that | |
1615 the query was meaningful. */ | |
988 if (!flag_strict_aliasing | 1616 if (!flag_strict_aliasing |
989 || !x || !y | 1617 || !x || !y |
990 || TREE_CODE (x) != COMPONENT_REF | 1618 || !handled_component_p (x) |
991 || TREE_CODE (y) != COMPONENT_REF) | 1619 || !handled_component_p (y)) |
992 return false; | 1620 return false; |
993 | 1621 |
994 auto_vec<const_tree, 16> fieldsx; | 1622 auto_vec<const_tree, 16> fieldsx; |
995 while (TREE_CODE (x) == COMPONENT_REF) | 1623 while (handled_component_p (x)) |
996 { | 1624 { |
997 tree field = TREE_OPERAND (x, 1); | 1625 if (TREE_CODE (x) == COMPONENT_REF) |
998 tree type = DECL_FIELD_CONTEXT (field); | 1626 { |
999 if (TREE_CODE (type) == RECORD_TYPE) | 1627 tree field = TREE_OPERAND (x, 1); |
1000 fieldsx.safe_push (field); | 1628 tree type = DECL_FIELD_CONTEXT (field); |
1629 if (TREE_CODE (type) == RECORD_TYPE) | |
1630 fieldsx.safe_push (field); | |
1631 } | |
1632 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR | |
1633 || TREE_CODE (x) == BIT_FIELD_REF) | |
1634 fieldsx.truncate (0); | |
1001 x = TREE_OPERAND (x, 0); | 1635 x = TREE_OPERAND (x, 0); |
1002 } | 1636 } |
1003 if (fieldsx.length () == 0) | 1637 if (fieldsx.length () == 0) |
1004 return false; | 1638 return false; |
1005 auto_vec<const_tree, 16> fieldsy; | 1639 auto_vec<const_tree, 16> fieldsy; |
1006 while (TREE_CODE (y) == COMPONENT_REF) | 1640 while (handled_component_p (y)) |
1007 { | 1641 { |
1008 tree field = TREE_OPERAND (y, 1); | 1642 if (TREE_CODE (y) == COMPONENT_REF) |
1009 tree type = DECL_FIELD_CONTEXT (field); | 1643 { |
1010 if (TREE_CODE (type) == RECORD_TYPE) | 1644 tree field = TREE_OPERAND (y, 1); |
1011 fieldsy.safe_push (TREE_OPERAND (y, 1)); | 1645 tree type = DECL_FIELD_CONTEXT (field); |
1646 if (TREE_CODE (type) == RECORD_TYPE) | |
1647 fieldsy.safe_push (TREE_OPERAND (y, 1)); | |
1648 } | |
1649 else if (TREE_CODE (y) == VIEW_CONVERT_EXPR | |
1650 || TREE_CODE (y) == BIT_FIELD_REF) | |
1651 fieldsy.truncate (0); | |
1012 y = TREE_OPERAND (y, 0); | 1652 y = TREE_OPERAND (y, 0); |
1013 } | 1653 } |
1014 if (fieldsy.length () == 0) | 1654 if (fieldsy.length () == 0) |
1015 return false; | 1655 { |
1656 ++alias_stats.nonoverlapping_component_refs_p_may_alias; | |
1657 return false; | |
1658 } | |
1016 | 1659 |
1017 /* Most common case first. */ | 1660 /* Most common case first. */ |
1018 if (fieldsx.length () == 1 | 1661 if (fieldsx.length () == 1 |
1019 && fieldsy.length () == 1) | 1662 && fieldsy.length () == 1) |
1020 return ((DECL_FIELD_CONTEXT (fieldsx[0]) | 1663 { |
1021 == DECL_FIELD_CONTEXT (fieldsy[0])) | 1664 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx[0]), |
1022 && fieldsx[0] != fieldsy[0] | 1665 DECL_FIELD_CONTEXT (fieldsy[0])) == 1 |
1023 && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0]))); | 1666 && nonoverlapping_component_refs_p_1 (fieldsx[0], fieldsy[0]) == 1) |
1667 { | |
1668 ++alias_stats.nonoverlapping_component_refs_p_no_alias; | |
1669 return true; | |
1670 } | |
1671 else | |
1672 { | |
1673 ++alias_stats.nonoverlapping_component_refs_p_may_alias; | |
1674 return false; | |
1675 } | |
1676 } | |
1024 | 1677 |
1025 if (fieldsx.length () == 2) | 1678 if (fieldsx.length () == 2) |
1026 { | 1679 { |
1027 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1) | 1680 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1) |
1028 std::swap (fieldsx[0], fieldsx[1]); | 1681 std::swap (fieldsx[0], fieldsx[1]); |
1041 unsigned i = 0, j = 0; | 1694 unsigned i = 0, j = 0; |
1042 do | 1695 do |
1043 { | 1696 { |
1044 const_tree fieldx = fieldsx[i]; | 1697 const_tree fieldx = fieldsx[i]; |
1045 const_tree fieldy = fieldsy[j]; | 1698 const_tree fieldy = fieldsy[j]; |
1046 tree typex = DECL_FIELD_CONTEXT (fieldx); | 1699 |
1047 tree typey = DECL_FIELD_CONTEXT (fieldy); | 1700 /* We're left with accessing different fields of a structure, |
1048 if (typex == typey) | 1701 no possible overlap. */ |
1049 { | 1702 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx), |
1050 /* We're left with accessing different fields of a structure, | 1703 DECL_FIELD_CONTEXT (fieldy)) == 1 |
1051 no possible overlap. */ | 1704 && nonoverlapping_component_refs_p_1 (fieldx, fieldy) == 1) |
1052 if (fieldx != fieldy) | 1705 { |
1053 { | 1706 ++alias_stats.nonoverlapping_component_refs_p_no_alias; |
1054 /* A field and its representative need to be considered the | 1707 return true; |
1055 same. */ | 1708 } |
1056 if (DECL_BIT_FIELD_REPRESENTATIVE (fieldx) == fieldy | 1709 |
1057 || DECL_BIT_FIELD_REPRESENTATIVE (fieldy) == fieldx) | 1710 if (ncr_type_uid (fieldx) < ncr_type_uid (fieldy)) |
1058 return false; | |
1059 /* Different fields of the same record type cannot overlap. | |
1060 ??? Bitfields can overlap at RTL level so punt on them. */ | |
1061 if (DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy)) | |
1062 return false; | |
1063 return true; | |
1064 } | |
1065 } | |
1066 if (TYPE_UID (typex) < TYPE_UID (typey)) | |
1067 { | 1711 { |
1068 i++; | 1712 i++; |
1069 if (i == fieldsx.length ()) | 1713 if (i == fieldsx.length ()) |
1070 break; | 1714 break; |
1071 } | 1715 } |
1076 break; | 1720 break; |
1077 } | 1721 } |
1078 } | 1722 } |
1079 while (1); | 1723 while (1); |
1080 | 1724 |
1725 ++alias_stats.nonoverlapping_component_refs_p_may_alias; | |
1081 return false; | 1726 return false; |
1082 } | 1727 } |
1083 | 1728 |
1084 | 1729 |
1085 /* Return true if two memory references based on the variables BASE1 | 1730 /* Return true if two memory references based on the variables BASE1 |
1088 if non-NULL are the complete memory reference trees. */ | 1733 if non-NULL are the complete memory reference trees. */ |
1089 | 1734 |
1090 static bool | 1735 static bool |
1091 decl_refs_may_alias_p (tree ref1, tree base1, | 1736 decl_refs_may_alias_p (tree ref1, tree base1, |
1092 poly_int64 offset1, poly_int64 max_size1, | 1737 poly_int64 offset1, poly_int64 max_size1, |
1738 poly_int64 size1, | |
1093 tree ref2, tree base2, | 1739 tree ref2, tree base2, |
1094 poly_int64 offset2, poly_int64 max_size2) | 1740 poly_int64 offset2, poly_int64 max_size2, |
1741 poly_int64 size2) | |
1095 { | 1742 { |
1096 gcc_checking_assert (DECL_P (base1) && DECL_P (base2)); | 1743 gcc_checking_assert (DECL_P (base1) && DECL_P (base2)); |
1097 | 1744 |
1098 /* If both references are based on different variables, they cannot alias. */ | 1745 /* If both references are based on different variables, they cannot alias. */ |
1099 if (compare_base_decls (base1, base2) == 0) | 1746 if (compare_base_decls (base1, base2) == 0) |
1102 /* If both references are based on the same variable, they cannot alias if | 1749 /* If both references are based on the same variable, they cannot alias if |
1103 the accesses do not overlap. */ | 1750 the accesses do not overlap. */ |
1104 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2)) | 1751 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2)) |
1105 return false; | 1752 return false; |
1106 | 1753 |
1754 /* If there is must alias, there is no use disambiguating further. */ | |
1755 if (known_eq (size1, max_size1) && known_eq (size2, max_size2)) | |
1756 return true; | |
1757 | |
1107 /* For components with variable position, the above test isn't sufficient, | 1758 /* For components with variable position, the above test isn't sufficient, |
1108 so we disambiguate component references manually. */ | 1759 so we disambiguate component references manually. */ |
1109 if (ref1 && ref2 | 1760 if (ref1 && ref2 |
1110 && handled_component_p (ref1) && handled_component_p (ref2) | 1761 && handled_component_p (ref1) && handled_component_p (ref2) |
1111 && nonoverlapping_component_refs_of_decl_p (ref1, ref2)) | 1762 && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, false) == 1) |
1112 return false; | 1763 return false; |
1113 | 1764 |
1114 return true; | 1765 return true; |
1115 } | 1766 } |
1116 | 1767 |
1122 if non-NULL are the complete memory reference trees. */ | 1773 if non-NULL are the complete memory reference trees. */ |
1123 | 1774 |
1124 static bool | 1775 static bool |
1125 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1, | 1776 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1, |
1126 poly_int64 offset1, poly_int64 max_size1, | 1777 poly_int64 offset1, poly_int64 max_size1, |
1778 poly_int64 size1, | |
1127 alias_set_type ref1_alias_set, | 1779 alias_set_type ref1_alias_set, |
1128 alias_set_type base1_alias_set, | 1780 alias_set_type base1_alias_set, |
1129 tree ref2 ATTRIBUTE_UNUSED, tree base2, | 1781 tree ref2 ATTRIBUTE_UNUSED, tree base2, |
1130 poly_int64 offset2, poly_int64 max_size2, | 1782 poly_int64 offset2, poly_int64 max_size2, |
1783 poly_int64 size2, | |
1131 alias_set_type ref2_alias_set, | 1784 alias_set_type ref2_alias_set, |
1132 alias_set_type base2_alias_set, bool tbaa_p) | 1785 alias_set_type base2_alias_set, bool tbaa_p) |
1133 { | 1786 { |
1134 tree ptr1; | 1787 tree ptr1; |
1135 tree ptrtype1, dbase2; | 1788 tree ptrtype1, dbase2; |
1156 | 1809 |
1157 /* Disambiguations that rely on strict aliasing rules follow. */ | 1810 /* Disambiguations that rely on strict aliasing rules follow. */ |
1158 if (!flag_strict_aliasing || !tbaa_p) | 1811 if (!flag_strict_aliasing || !tbaa_p) |
1159 return true; | 1812 return true; |
1160 | 1813 |
1161 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1)); | |
1162 | |
1163 /* If the alias set for a pointer access is zero all bets are off. */ | 1814 /* If the alias set for a pointer access is zero all bets are off. */ |
1164 if (base1_alias_set == 0) | 1815 if (base1_alias_set == 0 || base2_alias_set == 0) |
1165 return true; | 1816 return true; |
1166 | 1817 |
1167 /* When we are trying to disambiguate an access with a pointer dereference | 1818 /* When we are trying to disambiguate an access with a pointer dereference |
1168 as base versus one with a decl as base we can use both the size | 1819 as base versus one with a decl as base we can use both the size |
1169 of the decl and its dynamic type for extra disambiguation. | 1820 of the decl and its dynamic type for extra disambiguation. |
1177 static type. Then we can check | 1828 static type. Then we can check |
1178 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */ | 1829 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */ |
1179 if (base1_alias_set != base2_alias_set | 1830 if (base1_alias_set != base2_alias_set |
1180 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set)) | 1831 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set)) |
1181 return false; | 1832 return false; |
1833 | |
1834 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1)); | |
1835 | |
1182 /* If the size of the access relevant for TBAA through the pointer | 1836 /* If the size of the access relevant for TBAA through the pointer |
1183 is bigger than the size of the decl we can't possibly access the | 1837 is bigger than the size of the decl we can't possibly access the |
1184 decl via that pointer. */ | 1838 decl via that pointer. */ |
1185 if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1)) | 1839 if (/* ??? This in turn may run afoul when a decl of type T which is |
1186 && poly_int_tree_p (DECL_SIZE (base2)) | |
1187 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (ptrtype1))) | |
1188 /* ??? This in turn may run afoul when a decl of type T which is | |
1189 a member of union type U is accessed through a pointer to | 1840 a member of union type U is accessed through a pointer to |
1190 type U and sizeof T is smaller than sizeof U. */ | 1841 type U and sizeof T is smaller than sizeof U. */ |
1191 && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE | 1842 TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE |
1192 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE | 1843 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE |
1193 && known_lt (wi::to_poly_widest (DECL_SIZE (base2)), | 1844 && compare_sizes (DECL_SIZE (base2), |
1194 wi::to_poly_widest (TYPE_SIZE (TREE_TYPE (ptrtype1))))) | 1845 TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0) |
1195 return false; | 1846 return false; |
1196 | 1847 |
1197 if (!ref2) | 1848 if (!ref2) |
1198 return true; | 1849 return true; |
1199 | 1850 |
1204 dbase2 = TREE_OPERAND (dbase2, 0); | 1855 dbase2 = TREE_OPERAND (dbase2, 0); |
1205 poly_int64 doffset1 = offset1; | 1856 poly_int64 doffset1 = offset1; |
1206 poly_offset_int doffset2 = offset2; | 1857 poly_offset_int doffset2 = offset2; |
1207 if (TREE_CODE (dbase2) == MEM_REF | 1858 if (TREE_CODE (dbase2) == MEM_REF |
1208 || TREE_CODE (dbase2) == TARGET_MEM_REF) | 1859 || TREE_CODE (dbase2) == TARGET_MEM_REF) |
1209 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT; | 1860 { |
1210 | 1861 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT; |
1211 /* If either reference is view-converted, give up now. */ | 1862 tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1)); |
1212 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1 | 1863 /* If second reference is view-converted, give up now. */ |
1213 || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1) | 1864 if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1) |
1865 return true; | |
1866 } | |
1867 | |
1868 /* If first reference is view-converted, give up now. */ | |
1869 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1) | |
1214 return true; | 1870 return true; |
1215 | 1871 |
1216 /* If both references are through the same type, they do not alias | 1872 /* If both references are through the same type, they do not alias |
1217 if the accesses do not overlap. This does extra disambiguation | 1873 if the accesses do not overlap. This does extra disambiguation |
1218 for mixed/pointer accesses but requires strict aliasing. | 1874 for mixed/pointer accesses but requires strict aliasing. |
1219 For MEM_REFs we require that the component-ref offset we computed | 1875 For MEM_REFs we require that the component-ref offset we computed |
1220 is relative to the start of the type which we ensure by | 1876 is relative to the start of the type which we ensure by |
1221 comparing rvalue and access type and disregarding the constant | 1877 comparing rvalue and access type and disregarding the constant |
1222 pointer offset. */ | 1878 pointer offset. |
1223 if ((TREE_CODE (base1) != TARGET_MEM_REF | 1879 |
1880 But avoid treating variable length arrays as "objects", instead assume they | |
1881 can overlap by an exact multiple of their element size. | |
1882 See gcc.dg/torture/alias-2.c. */ | |
1883 if (((TREE_CODE (base1) != TARGET_MEM_REF | |
1224 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1))) | 1884 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1))) |
1885 && (TREE_CODE (dbase2) != TARGET_MEM_REF | |
1886 || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2)))) | |
1225 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1) | 1887 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1) |
1226 return ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2); | 1888 { |
1227 | 1889 bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE |
1228 if (ref1 && ref2 | 1890 && (TYPE_SIZE (TREE_TYPE (base1)) |
1229 && nonoverlapping_component_refs_p (ref1, ref2)) | 1891 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) |
1230 return false; | 1892 != INTEGER_CST)); |
1893 if (!partial_overlap | |
1894 && !ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2)) | |
1895 return false; | |
1896 if (!ref1 || !ref2 | |
1897 /* If there is must alias, there is no use disambiguating further. */ | |
1898 || (!partial_overlap | |
1899 && known_eq (size1, max_size1) && known_eq (size2, max_size2))) | |
1900 return true; | |
1901 int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2, | |
1902 partial_overlap); | |
1903 if (res == -1) | |
1904 return !nonoverlapping_component_refs_p (ref1, ref2); | |
1905 return !res; | |
1906 } | |
1231 | 1907 |
1232 /* Do access-path based disambiguation. */ | 1908 /* Do access-path based disambiguation. */ |
1233 if (ref1 && ref2 | 1909 if (ref1 && ref2 |
1234 && (handled_component_p (ref1) || handled_component_p (ref2))) | 1910 && (handled_component_p (ref1) || handled_component_p (ref2))) |
1235 return aliasing_component_refs_p (ref1, | 1911 return aliasing_component_refs_p (ref1, |
1236 ref1_alias_set, base1_alias_set, | 1912 ref1_alias_set, base1_alias_set, |
1237 offset1, max_size1, | 1913 offset1, max_size1, |
1238 ref2, | 1914 ref2, |
1239 ref2_alias_set, base2_alias_set, | 1915 ref2_alias_set, base2_alias_set, |
1240 offset2, max_size2, true); | 1916 offset2, max_size2); |
1241 | 1917 |
1242 return true; | 1918 return true; |
1243 } | 1919 } |
1244 | 1920 |
1245 /* Return true if two indirect references based on *PTR1 | 1921 /* Return true if two indirect references based on *PTR1 |
1250 if non-NULL are the complete memory reference trees. */ | 1926 if non-NULL are the complete memory reference trees. */ |
1251 | 1927 |
1252 static bool | 1928 static bool |
1253 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1, | 1929 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1, |
1254 poly_int64 offset1, poly_int64 max_size1, | 1930 poly_int64 offset1, poly_int64 max_size1, |
1931 poly_int64 size1, | |
1255 alias_set_type ref1_alias_set, | 1932 alias_set_type ref1_alias_set, |
1256 alias_set_type base1_alias_set, | 1933 alias_set_type base1_alias_set, |
1257 tree ref2 ATTRIBUTE_UNUSED, tree base2, | 1934 tree ref2 ATTRIBUTE_UNUSED, tree base2, |
1258 poly_int64 offset2, poly_int64 max_size2, | 1935 poly_int64 offset2, poly_int64 max_size2, |
1936 poly_int64 size2, | |
1259 alias_set_type ref2_alias_set, | 1937 alias_set_type ref2_alias_set, |
1260 alias_set_type base2_alias_set, bool tbaa_p) | 1938 alias_set_type base2_alias_set, bool tbaa_p) |
1261 { | 1939 { |
1262 tree ptr1; | 1940 tree ptr1; |
1263 tree ptr2; | 1941 tree ptr2; |
1295 && operand_equal_p (TMR_INDEX2 (base1), | 1973 && operand_equal_p (TMR_INDEX2 (base1), |
1296 TMR_INDEX2 (base2), 0)))))) | 1974 TMR_INDEX2 (base2), 0)))))) |
1297 { | 1975 { |
1298 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT; | 1976 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT; |
1299 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT; | 1977 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT; |
1300 return ranges_maybe_overlap_p (offset1 + moff1, max_size1, | 1978 if (!ranges_maybe_overlap_p (offset1 + moff1, max_size1, |
1301 offset2 + moff2, max_size2); | 1979 offset2 + moff2, max_size2)) |
1980 return false; | |
1981 /* If there is must alias, there is no use disambiguating further. */ | |
1982 if (known_eq (size1, max_size1) && known_eq (size2, max_size2)) | |
1983 return true; | |
1984 if (ref1 && ref2) | |
1985 { | |
1986 int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, | |
1987 false); | |
1988 if (res != -1) | |
1989 return !res; | |
1990 } | |
1302 } | 1991 } |
1303 if (!ptr_derefs_may_alias_p (ptr1, ptr2)) | 1992 if (!ptr_derefs_may_alias_p (ptr1, ptr2)) |
1304 return false; | 1993 return false; |
1305 | 1994 |
1306 /* Disambiguations that rely on strict aliasing rules follow. */ | 1995 /* Disambiguations that rely on strict aliasing rules follow. */ |
1311 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1)); | 2000 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1)); |
1312 | 2001 |
1313 /* If the alias set for a pointer access is zero all bets are off. */ | 2002 /* If the alias set for a pointer access is zero all bets are off. */ |
1314 if (base1_alias_set == 0 | 2003 if (base1_alias_set == 0 |
1315 || base2_alias_set == 0) | 2004 || base2_alias_set == 0) |
2005 return true; | |
2006 | |
2007 /* Do type-based disambiguation. */ | |
2008 if (base1_alias_set != base2_alias_set | |
2009 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set)) | |
2010 return false; | |
2011 | |
2012 /* If either reference is view-converted, give up now. */ | |
2013 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1 | |
2014 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1) | |
1316 return true; | 2015 return true; |
1317 | 2016 |
1318 /* If both references are through the same type, they do not alias | 2017 /* If both references are through the same type, they do not alias |
1319 if the accesses do not overlap. This does extra disambiguation | 2018 if the accesses do not overlap. This does extra disambiguation |
1320 for mixed/pointer accesses but requires strict aliasing. */ | 2019 for mixed/pointer accesses but requires strict aliasing. */ |
1321 if ((TREE_CODE (base1) != TARGET_MEM_REF | 2020 if ((TREE_CODE (base1) != TARGET_MEM_REF |
1322 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1))) | 2021 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1))) |
1323 && (TREE_CODE (base2) != TARGET_MEM_REF | 2022 && (TREE_CODE (base2) != TARGET_MEM_REF |
1324 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))) | 2023 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))) |
1325 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1 | |
1326 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1 | |
1327 && same_type_for_tbaa (TREE_TYPE (ptrtype1), | 2024 && same_type_for_tbaa (TREE_TYPE (ptrtype1), |
1328 TREE_TYPE (ptrtype2)) == 1 | 2025 TREE_TYPE (ptrtype2)) == 1) |
2026 { | |
1329 /* But avoid treating arrays as "objects", instead assume they | 2027 /* But avoid treating arrays as "objects", instead assume they |
1330 can overlap by an exact multiple of their element size. */ | 2028 can overlap by an exact multiple of their element size. |
1331 && TREE_CODE (TREE_TYPE (ptrtype1)) != ARRAY_TYPE) | 2029 See gcc.dg/torture/alias-2.c. */ |
1332 return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2); | 2030 bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE; |
1333 | 2031 |
1334 /* Do type-based disambiguation. */ | 2032 if (!partial_overlap |
1335 if (base1_alias_set != base2_alias_set | 2033 && !ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2)) |
1336 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set)) | 2034 return false; |
1337 return false; | 2035 if (!ref1 || !ref2 |
1338 | 2036 || (!partial_overlap |
1339 /* If either reference is view-converted, give up now. */ | 2037 && known_eq (size1, max_size1) && known_eq (size2, max_size2))) |
1340 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1 | 2038 return true; |
1341 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1) | 2039 int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2, |
1342 return true; | 2040 partial_overlap); |
1343 | 2041 if (res == -1) |
1344 if (ref1 && ref2 | 2042 return !nonoverlapping_component_refs_p (ref1, ref2); |
1345 && nonoverlapping_component_refs_p (ref1, ref2)) | 2043 return !res; |
1346 return false; | 2044 } |
1347 | 2045 |
1348 /* Do access-path based disambiguation. */ | 2046 /* Do access-path based disambiguation. */ |
1349 if (ref1 && ref2 | 2047 if (ref1 && ref2 |
1350 && (handled_component_p (ref1) || handled_component_p (ref2))) | 2048 && (handled_component_p (ref1) || handled_component_p (ref2))) |
1351 return aliasing_component_refs_p (ref1, | 2049 return aliasing_component_refs_p (ref1, |
1352 ref1_alias_set, base1_alias_set, | 2050 ref1_alias_set, base1_alias_set, |
1353 offset1, max_size1, | 2051 offset1, max_size1, |
1354 ref2, | 2052 ref2, |
1355 ref2_alias_set, base2_alias_set, | 2053 ref2_alias_set, base2_alias_set, |
1356 offset2, max_size2, false); | 2054 offset2, max_size2); |
1357 | 2055 |
1358 return true; | 2056 return true; |
1359 } | 2057 } |
1360 | 2058 |
1361 /* Return true, if the two memory references REF1 and REF2 may alias. */ | 2059 /* Return true, if the two memory references REF1 and REF2 may alias. */ |
1362 | 2060 |
1363 bool | 2061 static bool |
1364 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p) | 2062 refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p) |
1365 { | 2063 { |
1366 tree base1, base2; | 2064 tree base1, base2; |
1367 poly_int64 offset1 = 0, offset2 = 0; | 2065 poly_int64 offset1 = 0, offset2 = 0; |
1368 poly_int64 max_size1 = -1, max_size2 = -1; | 2066 poly_int64 max_size1 = -1, max_size2 = -1; |
1369 bool var1_p, var2_p, ind1_p, ind2_p; | 2067 bool var1_p, var2_p, ind1_p, ind2_p; |
1426 GCC extension of allowing type-punning through unions. */ | 2124 GCC extension of allowing type-punning through unions. */ |
1427 var1_p = DECL_P (base1); | 2125 var1_p = DECL_P (base1); |
1428 var2_p = DECL_P (base2); | 2126 var2_p = DECL_P (base2); |
1429 if (var1_p && var2_p) | 2127 if (var1_p && var2_p) |
1430 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1, | 2128 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1, |
1431 ref2->ref, base2, offset2, max_size2); | 2129 ref1->size, |
2130 ref2->ref, base2, offset2, max_size2, | |
2131 ref2->size); | |
1432 | 2132 |
1433 /* Handle restrict based accesses. | 2133 /* Handle restrict based accesses. |
1434 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that | 2134 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that |
1435 here. */ | 2135 here. */ |
1436 tree rbase1 = base1; | 2136 tree rbase1 = base1; |
1494 return false; | 2194 return false; |
1495 | 2195 |
1496 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */ | 2196 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */ |
1497 if (var1_p && ind2_p) | 2197 if (var1_p && ind2_p) |
1498 return indirect_ref_may_alias_decl_p (ref2->ref, base2, | 2198 return indirect_ref_may_alias_decl_p (ref2->ref, base2, |
1499 offset2, max_size2, | 2199 offset2, max_size2, ref2->size, |
1500 ao_ref_alias_set (ref2), | 2200 ao_ref_alias_set (ref2), |
1501 ao_ref_base_alias_set (ref2), | 2201 ao_ref_base_alias_set (ref2), |
1502 ref1->ref, base1, | 2202 ref1->ref, base1, |
1503 offset1, max_size1, | 2203 offset1, max_size1, ref1->size, |
1504 ao_ref_alias_set (ref1), | 2204 ao_ref_alias_set (ref1), |
1505 ao_ref_base_alias_set (ref1), | 2205 ao_ref_base_alias_set (ref1), |
1506 tbaa_p); | 2206 tbaa_p); |
1507 else if (ind1_p && ind2_p) | 2207 else if (ind1_p && ind2_p) |
1508 return indirect_refs_may_alias_p (ref1->ref, base1, | 2208 return indirect_refs_may_alias_p (ref1->ref, base1, |
1509 offset1, max_size1, | 2209 offset1, max_size1, ref1->size, |
1510 ao_ref_alias_set (ref1), | 2210 ao_ref_alias_set (ref1), |
1511 ao_ref_base_alias_set (ref1), | 2211 ao_ref_base_alias_set (ref1), |
1512 ref2->ref, base2, | 2212 ref2->ref, base2, |
1513 offset2, max_size2, | 2213 offset2, max_size2, ref2->size, |
1514 ao_ref_alias_set (ref2), | 2214 ao_ref_alias_set (ref2), |
1515 ao_ref_base_alias_set (ref2), | 2215 ao_ref_base_alias_set (ref2), |
1516 tbaa_p); | 2216 tbaa_p); |
1517 | 2217 |
1518 gcc_unreachable (); | 2218 gcc_unreachable (); |
1519 } | 2219 } |
1520 | 2220 |
1521 static bool | 2221 /* Return true, if the two memory references REF1 and REF2 may alias |
1522 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p) | 2222 and update statistics. */ |
1523 { | |
1524 ao_ref r1; | |
1525 ao_ref_init (&r1, ref1); | |
1526 return refs_may_alias_p_1 (&r1, ref2, tbaa_p); | |
1527 } | |
1528 | 2223 |
1529 bool | 2224 bool |
1530 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p) | 2225 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p) |
1531 { | 2226 { |
1532 ao_ref r1, r2; | 2227 bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p); |
1533 bool res; | |
1534 ao_ref_init (&r1, ref1); | |
1535 ao_ref_init (&r2, ref2); | |
1536 res = refs_may_alias_p_1 (&r1, &r2, tbaa_p); | |
1537 if (res) | 2228 if (res) |
1538 ++alias_stats.refs_may_alias_p_may_alias; | 2229 ++alias_stats.refs_may_alias_p_may_alias; |
1539 else | 2230 else |
1540 ++alias_stats.refs_may_alias_p_no_alias; | 2231 ++alias_stats.refs_may_alias_p_no_alias; |
1541 return res; | 2232 return res; |
2233 } | |
2234 | |
2235 static bool | |
2236 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p) | |
2237 { | |
2238 ao_ref r1; | |
2239 ao_ref_init (&r1, ref1); | |
2240 return refs_may_alias_p_1 (&r1, ref2, tbaa_p); | |
2241 } | |
2242 | |
2243 bool | |
2244 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p) | |
2245 { | |
2246 ao_ref r1, r2; | |
2247 ao_ref_init (&r1, ref1); | |
2248 ao_ref_init (&r2, ref2); | |
2249 return refs_may_alias_p_1 (&r1, &r2, tbaa_p); | |
1542 } | 2250 } |
1543 | 2251 |
1544 /* Returns true if there is a anti-dependence for the STORE that | 2252 /* Returns true if there is a anti-dependence for the STORE that |
1545 executes after the LOAD. */ | 2253 executes after the LOAD. */ |
1546 | 2254 |
1819 /* Check if base is a global static variable that is not read | 2527 /* Check if base is a global static variable that is not read |
1820 by the function. */ | 2528 by the function. */ |
1821 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base)) | 2529 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base)) |
1822 { | 2530 { |
1823 struct cgraph_node *node = cgraph_node::get (callee); | 2531 struct cgraph_node *node = cgraph_node::get (callee); |
1824 bitmap not_read; | 2532 bitmap read; |
2533 int id; | |
1825 | 2534 |
1826 /* FIXME: Callee can be an OMP builtin that does not have a call graph | 2535 /* FIXME: Callee can be an OMP builtin that does not have a call graph |
1827 node yet. We should enforce that there are nodes for all decls in the | 2536 node yet. We should enforce that there are nodes for all decls in the |
1828 IL and remove this check instead. */ | 2537 IL and remove this check instead. */ |
1829 if (node | 2538 if (node |
1830 && (not_read = ipa_reference_get_not_read_global (node)) | 2539 && (id = ipa_reference_var_uid (base)) != -1 |
1831 && bitmap_bit_p (not_read, ipa_reference_var_uid (base))) | 2540 && (read = ipa_reference_get_read_global (node)) |
2541 && !bitmap_bit_p (read, id)) | |
1832 goto process_args; | 2542 goto process_args; |
1833 } | 2543 } |
1834 | 2544 |
1835 /* Check if the base variable is call-used. */ | 2545 /* Check if the base variable is call-used. */ |
1836 if (DECL_P (base)) | 2546 if (DECL_P (base)) |
2214 /* Check if base is a global static variable that is not written | 2924 /* Check if base is a global static variable that is not written |
2215 by the function. */ | 2925 by the function. */ |
2216 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base)) | 2926 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base)) |
2217 { | 2927 { |
2218 struct cgraph_node *node = cgraph_node::get (callee); | 2928 struct cgraph_node *node = cgraph_node::get (callee); |
2219 bitmap not_written; | 2929 bitmap written; |
2930 int id; | |
2220 | 2931 |
2221 if (node | 2932 if (node |
2222 && (not_written = ipa_reference_get_not_written_global (node)) | 2933 && (id = ipa_reference_var_uid (base)) != -1 |
2223 && bitmap_bit_p (not_written, ipa_reference_var_uid (base))) | 2934 && (written = ipa_reference_get_written_global (node)) |
2935 && !bitmap_bit_p (written, id)) | |
2224 return false; | 2936 return false; |
2225 } | 2937 } |
2226 | 2938 |
2227 /* Check if the base variable is call-clobbered. */ | 2939 /* Check if the base variable is call-clobbered. */ |
2228 if (DECL_P (base)) | 2940 if (DECL_P (base)) |
2362 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid)) | 3074 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid)) |
2363 return false; | 3075 return false; |
2364 | 3076 |
2365 /* Be conservative with non-call exceptions when the address might | 3077 /* Be conservative with non-call exceptions when the address might |
2366 be NULL. */ | 3078 be NULL. */ |
2367 if (flag_non_call_exceptions && pi->pt.null) | 3079 if (cfun->can_throw_non_call_exceptions && pi->pt.null) |
2368 return false; | 3080 return false; |
2369 | 3081 |
2370 /* Check that ptr points relative to obj. */ | 3082 /* Check that ptr points relative to obj. */ |
2371 unsigned int obj_uid = DECL_PT_UID (obj); | 3083 unsigned int obj_uid = DECL_PT_UID (obj); |
2372 if (obj_uid != pt_uid) | 3084 if (obj_uid != pt_uid) |
2528 case BUILT_IN_MEMPCPY_CHK: | 3240 case BUILT_IN_MEMPCPY_CHK: |
2529 case BUILT_IN_MEMMOVE_CHK: | 3241 case BUILT_IN_MEMMOVE_CHK: |
2530 case BUILT_IN_MEMSET_CHK: | 3242 case BUILT_IN_MEMSET_CHK: |
2531 case BUILT_IN_STRNCPY: | 3243 case BUILT_IN_STRNCPY: |
2532 case BUILT_IN_STPNCPY: | 3244 case BUILT_IN_STPNCPY: |
3245 case BUILT_IN_CALLOC: | |
2533 { | 3246 { |
2534 /* For a must-alias check we need to be able to constrain | 3247 /* For a must-alias check we need to be able to constrain |
2535 the access properly. */ | 3248 the access properly. */ |
2536 if (!ref->max_size_known_p ()) | 3249 if (!ref->max_size_known_p ()) |
2537 return false; | 3250 return false; |
2538 tree dest = gimple_call_arg (stmt, 0); | 3251 tree dest; |
2539 tree len = gimple_call_arg (stmt, 2); | 3252 tree len; |
3253 | |
3254 /* In execution order a calloc call will never kill | |
3255 anything. However, DSE will (ab)use this interface | |
3256 to ask if a calloc call writes the same memory locations | |
3257 as a later assignment, memset, etc. So handle calloc | |
3258 in the expected way. */ | |
3259 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC) | |
3260 { | |
3261 tree arg0 = gimple_call_arg (stmt, 0); | |
3262 tree arg1 = gimple_call_arg (stmt, 1); | |
3263 if (TREE_CODE (arg0) != INTEGER_CST | |
3264 || TREE_CODE (arg1) != INTEGER_CST) | |
3265 return false; | |
3266 | |
3267 dest = gimple_call_lhs (stmt); | |
3268 if (!dest) | |
3269 return false; | |
3270 len = fold_build2 (MULT_EXPR, TREE_TYPE (arg0), arg0, arg1); | |
3271 } | |
3272 else | |
3273 { | |
3274 dest = gimple_call_arg (stmt, 0); | |
3275 len = gimple_call_arg (stmt, 2); | |
3276 } | |
2540 if (!poly_int_tree_p (len)) | 3277 if (!poly_int_tree_p (len)) |
2541 return false; | 3278 return false; |
2542 tree rbase = ref->base; | 3279 tree rbase = ref->base; |
2543 poly_offset_int roffset = ref->offset; | 3280 poly_offset_int roffset = ref->offset; |
2544 ao_ref dref; | 3281 ao_ref dref; |
2595 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand | 3332 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand |
2596 TARGET or a statement clobbering the memory reference REF in which | 3333 TARGET or a statement clobbering the memory reference REF in which |
2597 case false is returned. The walk starts with VUSE, one argument of PHI. */ | 3334 case false is returned. The walk starts with VUSE, one argument of PHI. */ |
2598 | 3335 |
2599 static bool | 3336 static bool |
2600 maybe_skip_until (gimple *phi, tree target, ao_ref *ref, | 3337 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb, |
2601 tree vuse, unsigned int *cnt, bitmap *visited, | 3338 ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit, |
2602 bool abort_on_visited, | 3339 bitmap *visited, bool abort_on_visited, |
2603 void *(*translate)(ao_ref *, tree, void *, bool *), | 3340 void *(*translate)(ao_ref *, tree, void *, translate_flags *), |
3341 translate_flags disambiguate_only, | |
2604 void *data) | 3342 void *data) |
2605 { | 3343 { |
2606 basic_block bb = gimple_bb (phi); | 3344 basic_block bb = gimple_bb (phi); |
2607 | 3345 |
2608 if (!*visited) | 3346 if (!*visited) |
2612 | 3350 |
2613 /* Walk until we hit the target. */ | 3351 /* Walk until we hit the target. */ |
2614 while (vuse != target) | 3352 while (vuse != target) |
2615 { | 3353 { |
2616 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse); | 3354 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse); |
3355 /* If we are searching for the target VUSE by walking up to | |
3356 TARGET_BB dominating the original PHI we are finished once | |
3357 we reach a default def or a definition in a block dominating | |
3358 that block. Update TARGET and return. */ | |
3359 if (!target | |
3360 && (gimple_nop_p (def_stmt) | |
3361 || dominated_by_p (CDI_DOMINATORS, | |
3362 target_bb, gimple_bb (def_stmt)))) | |
3363 { | |
3364 target = vuse; | |
3365 return true; | |
3366 } | |
3367 | |
2617 /* Recurse for PHI nodes. */ | 3368 /* Recurse for PHI nodes. */ |
2618 if (gimple_code (def_stmt) == GIMPLE_PHI) | 3369 if (gimple_code (def_stmt) == GIMPLE_PHI) |
2619 { | 3370 { |
2620 /* An already visited PHI node ends the walk successfully. */ | 3371 /* An already visited PHI node ends the walk successfully. */ |
2621 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt)))) | 3372 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt)))) |
2622 return !abort_on_visited; | 3373 return !abort_on_visited; |
2623 vuse = get_continuation_for_phi (def_stmt, ref, cnt, | 3374 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit, |
2624 visited, abort_on_visited, | 3375 visited, abort_on_visited, |
2625 translate, data); | 3376 translate, data, disambiguate_only); |
2626 if (!vuse) | 3377 if (!vuse) |
2627 return false; | 3378 return false; |
2628 continue; | 3379 continue; |
2629 } | 3380 } |
2630 else if (gimple_nop_p (def_stmt)) | 3381 else if (gimple_nop_p (def_stmt)) |
2631 return false; | 3382 return false; |
2632 else | 3383 else |
2633 { | 3384 { |
2634 /* A clobbering statement or the end of the IL ends it failing. */ | 3385 /* A clobbering statement or the end of the IL ends it failing. */ |
2635 ++*cnt; | 3386 if ((int)limit <= 0) |
2636 if (stmt_may_clobber_ref_p_1 (def_stmt, ref)) | 3387 return false; |
3388 --limit; | |
3389 if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p)) | |
2637 { | 3390 { |
2638 bool disambiguate_only = true; | 3391 translate_flags tf = disambiguate_only; |
2639 if (translate | 3392 if (translate |
2640 && (*translate) (ref, vuse, data, &disambiguate_only) == NULL) | 3393 && (*translate) (ref, vuse, data, &tf) == NULL) |
2641 ; | 3394 ; |
2642 else | 3395 else |
2643 return false; | 3396 return false; |
2644 } | 3397 } |
2645 } | 3398 } |
2658 | 3411 |
2659 | 3412 |
2660 /* Starting from a PHI node for the virtual operand of the memory reference | 3413 /* Starting from a PHI node for the virtual operand of the memory reference |
2661 REF find a continuation virtual operand that allows to continue walking | 3414 REF find a continuation virtual operand that allows to continue walking |
2662 statements dominating PHI skipping only statements that cannot possibly | 3415 statements dominating PHI skipping only statements that cannot possibly |
2663 clobber REF. Increments *CNT for each alias disambiguation done. | 3416 clobber REF. Decrements LIMIT for each alias disambiguation done |
3417 and aborts the walk, returning NULL_TREE if it reaches zero. | |
2664 Returns NULL_TREE if no suitable virtual operand can be found. */ | 3418 Returns NULL_TREE if no suitable virtual operand can be found. */ |
2665 | 3419 |
2666 tree | 3420 tree |
2667 get_continuation_for_phi (gimple *phi, ao_ref *ref, | 3421 get_continuation_for_phi (gimple *phi, ao_ref *ref, bool tbaa_p, |
2668 unsigned int *cnt, bitmap *visited, | 3422 unsigned int &limit, bitmap *visited, |
2669 bool abort_on_visited, | 3423 bool abort_on_visited, |
2670 void *(*translate)(ao_ref *, tree, void *, bool *), | 3424 void *(*translate)(ao_ref *, tree, void *, |
2671 void *data) | 3425 translate_flags *), |
3426 void *data, | |
3427 translate_flags disambiguate_only) | |
2672 { | 3428 { |
2673 unsigned nargs = gimple_phi_num_args (phi); | 3429 unsigned nargs = gimple_phi_num_args (phi); |
2674 | 3430 |
2675 /* Through a single-argument PHI we can simply look through. */ | 3431 /* Through a single-argument PHI we can simply look through. */ |
2676 if (nargs == 1) | 3432 if (nargs == 1) |
2695 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb)) | 3451 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb)) |
2696 break; | 3452 break; |
2697 arg0 = NULL_TREE; | 3453 arg0 = NULL_TREE; |
2698 } | 3454 } |
2699 /* If not, look if we can reach such candidate by walking defs | 3455 /* If not, look if we can reach such candidate by walking defs |
2700 of a PHI arg without crossing other PHIs. */ | 3456 until we hit the immediate dominator. maybe_skip_until will |
2701 if (! arg0) | 3457 do that for us. */ |
2702 for (i = 0; i < nargs; ++i) | 3458 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb); |
2703 { | 3459 |
2704 arg0 = PHI_ARG_DEF (phi, i); | 3460 /* Then check against the (to be) found candidate. */ |
2705 gimple *def = SSA_NAME_DEF_STMT (arg0); | |
2706 /* Backedges can't work. */ | |
2707 if (dominated_by_p (CDI_DOMINATORS, | |
2708 gimple_bb (def), phi_bb)) | |
2709 continue; | |
2710 /* See below. */ | |
2711 if (gimple_code (def) == GIMPLE_PHI) | |
2712 continue; | |
2713 while (! dominated_by_p (CDI_DOMINATORS, | |
2714 phi_bb, gimple_bb (def))) | |
2715 { | |
2716 arg0 = gimple_vuse (def); | |
2717 if (SSA_NAME_IS_DEFAULT_DEF (arg0)) | |
2718 break; | |
2719 def = SSA_NAME_DEF_STMT (arg0); | |
2720 if (gimple_code (def) == GIMPLE_PHI) | |
2721 { | |
2722 /* Do not try to look through arbitrarily complicated | |
2723 CFGs. For those looking for the first VUSE starting | |
2724 from the end of the immediate dominator of phi_bb | |
2725 is likely faster. */ | |
2726 arg0 = NULL_TREE; | |
2727 goto next; | |
2728 } | |
2729 } | |
2730 break; | |
2731 next:; | |
2732 } | |
2733 if (! arg0) | |
2734 return NULL_TREE; | |
2735 | |
2736 /* Then check against the found candidate. */ | |
2737 for (i = 0; i < nargs; ++i) | 3461 for (i = 0; i < nargs; ++i) |
2738 { | 3462 { |
2739 arg1 = PHI_ARG_DEF (phi, i); | 3463 arg1 = PHI_ARG_DEF (phi, i); |
2740 if (arg1 == arg0) | 3464 if (arg1 == arg0) |
2741 ; | 3465 ; |
2742 else if (! maybe_skip_until (phi, arg0, ref, arg1, cnt, visited, | 3466 else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, tbaa_p, |
3467 limit, visited, | |
2743 abort_on_visited, | 3468 abort_on_visited, |
2744 /* Do not translate when walking over | 3469 translate, |
3470 /* Do not valueize when walking over | |
2745 backedges. */ | 3471 backedges. */ |
2746 dominated_by_p | 3472 dominated_by_p |
2747 (CDI_DOMINATORS, | 3473 (CDI_DOMINATORS, |
2748 gimple_bb (SSA_NAME_DEF_STMT (arg1)), | 3474 gimple_bb (SSA_NAME_DEF_STMT (arg1)), |
2749 phi_bb) | 3475 phi_bb) |
2750 ? NULL : translate, data)) | 3476 ? TR_DISAMBIGUATE |
3477 : disambiguate_only, data)) | |
2751 return NULL_TREE; | 3478 return NULL_TREE; |
2752 } | 3479 } |
2753 | 3480 |
2754 return arg0; | 3481 return arg0; |
2755 } | 3482 } |
2773 VALUEIZE if non-NULL is called with the next VUSE that is considered | 3500 VALUEIZE if non-NULL is called with the next VUSE that is considered |
2774 and return value is substituted for that. This can be used to | 3501 and return value is substituted for that. This can be used to |
2775 implement optimistic value-numbering for example. Note that the | 3502 implement optimistic value-numbering for example. Note that the |
2776 VUSE argument is assumed to be valueized already. | 3503 VUSE argument is assumed to be valueized already. |
2777 | 3504 |
3505 LIMIT specifies the number of alias queries we are allowed to do, | |
3506 the walk stops when it reaches zero and NULL is returned. LIMIT | |
3507 is decremented by the number of alias queries (plus adjustments | |
3508 done by the callbacks) upon return. | |
3509 | |
2778 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */ | 3510 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */ |
2779 | 3511 |
2780 void * | 3512 void * |
2781 walk_non_aliased_vuses (ao_ref *ref, tree vuse, | 3513 walk_non_aliased_vuses (ao_ref *ref, tree vuse, bool tbaa_p, |
2782 void *(*walker)(ao_ref *, tree, unsigned int, void *), | 3514 void *(*walker)(ao_ref *, tree, void *), |
2783 void *(*translate)(ao_ref *, tree, void *, bool *), | 3515 void *(*translate)(ao_ref *, tree, void *, |
3516 translate_flags *), | |
2784 tree (*valueize)(tree), | 3517 tree (*valueize)(tree), |
2785 void *data) | 3518 unsigned &limit, void *data) |
2786 { | 3519 { |
2787 bitmap visited = NULL; | 3520 bitmap visited = NULL; |
2788 void *res; | 3521 void *res; |
2789 unsigned int cnt = 0; | |
2790 bool translated = false; | 3522 bool translated = false; |
2791 | 3523 |
2792 timevar_push (TV_ALIAS_STMT_WALK); | 3524 timevar_push (TV_ALIAS_STMT_WALK); |
2793 | 3525 |
2794 do | 3526 do |
2795 { | 3527 { |
2796 gimple *def_stmt; | 3528 gimple *def_stmt; |
2797 | 3529 |
2798 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */ | 3530 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */ |
2799 res = (*walker) (ref, vuse, cnt, data); | 3531 res = (*walker) (ref, vuse, data); |
2800 /* Abort walk. */ | 3532 /* Abort walk. */ |
2801 if (res == (void *)-1) | 3533 if (res == (void *)-1) |
2802 { | 3534 { |
2803 res = NULL; | 3535 res = NULL; |
2804 break; | 3536 break; |
2818 } | 3550 } |
2819 def_stmt = SSA_NAME_DEF_STMT (vuse); | 3551 def_stmt = SSA_NAME_DEF_STMT (vuse); |
2820 if (gimple_nop_p (def_stmt)) | 3552 if (gimple_nop_p (def_stmt)) |
2821 break; | 3553 break; |
2822 else if (gimple_code (def_stmt) == GIMPLE_PHI) | 3554 else if (gimple_code (def_stmt) == GIMPLE_PHI) |
2823 vuse = get_continuation_for_phi (def_stmt, ref, &cnt, | 3555 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit, |
2824 &visited, translated, translate, data); | 3556 &visited, translated, translate, data); |
2825 else | 3557 else |
2826 { | 3558 { |
2827 cnt++; | 3559 if ((int)limit <= 0) |
2828 if (stmt_may_clobber_ref_p_1 (def_stmt, ref)) | 3560 { |
3561 res = NULL; | |
3562 break; | |
3563 } | |
3564 --limit; | |
3565 if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p)) | |
2829 { | 3566 { |
2830 if (!translate) | 3567 if (!translate) |
2831 break; | 3568 break; |
2832 bool disambiguate_only = false; | 3569 translate_flags disambiguate_only = TR_TRANSLATE; |
2833 res = (*translate) (ref, vuse, data, &disambiguate_only); | 3570 res = (*translate) (ref, vuse, data, &disambiguate_only); |
2834 /* Failed lookup and translation. */ | 3571 /* Failed lookup and translation. */ |
2835 if (res == (void *)-1) | 3572 if (res == (void *)-1) |
2836 { | 3573 { |
2837 res = NULL; | 3574 res = NULL; |
2839 } | 3576 } |
2840 /* Lookup succeeded. */ | 3577 /* Lookup succeeded. */ |
2841 else if (res != NULL) | 3578 else if (res != NULL) |
2842 break; | 3579 break; |
2843 /* Translation succeeded, continue walking. */ | 3580 /* Translation succeeded, continue walking. */ |
2844 translated = translated || !disambiguate_only; | 3581 translated = translated || disambiguate_only == TR_TRANSLATE; |
2845 } | 3582 } |
2846 vuse = gimple_vuse (def_stmt); | 3583 vuse = gimple_vuse (def_stmt); |
2847 } | 3584 } |
2848 } | 3585 } |
2849 while (vuse); | 3586 while (vuse); |