comparison gcc/cfgexpand.c @ 132:d34655255c78

update gcc-8.2
author mir3636
date Thu, 25 Oct 2018 10:21:07 +0900
parents f81c5aa9f14f 84e7813d76e9
children 4e440907fcbf
comparison
equal deleted inserted replaced
130:e108057fa461 132:d34655255c78
1 /* A pass for lowering trees to RTL. 1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2017 Free Software Foundation, Inc. 2 Copyright (C) 2004-2018 Free Software Foundation, Inc.
3 3
4 This file is part of GCC. 4 This file is part of GCC.
5 5
6 GCC is free software; you can redistribute it and/or modify 6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by 7 it under the terms of the GNU General Public License as published by
75 #include "attribs.h" 75 #include "attribs.h"
76 #include "asan.h" 76 #include "asan.h"
77 #include "tree-ssa-address.h" 77 #include "tree-ssa-address.h"
78 #include "output.h" 78 #include "output.h"
79 #include "builtins.h" 79 #include "builtins.h"
80 #include "tree-chkp.h"
81 #include "rtl-chkp.h"
82 80
83 /* Some systems use __main in a way incompatible with its use in gcc, in these 81 /* Some systems use __main in a way incompatible with its use in gcc, in these
84 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to 82 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
85 give the same symbol without quotes for an alternative entry point. You 83 give the same symbol without quotes for an alternative entry point. You
86 must define both, or neither. */ 84 must define both, or neither. */
315 /* The Variable. */ 313 /* The Variable. */
316 tree decl; 314 tree decl;
317 315
318 /* Initially, the size of the variable. Later, the size of the partition, 316 /* Initially, the size of the variable. Later, the size of the partition,
319 if this variable becomes it's partition's representative. */ 317 if this variable becomes it's partition's representative. */
320 HOST_WIDE_INT size; 318 poly_uint64 size;
321 319
322 /* The *byte* alignment required for this variable. Or as, with the 320 /* The *byte* alignment required for this variable. Or as, with the
323 size, the alignment for this partition. */ 321 size, the alignment for this partition. */
324 unsigned int alignb; 322 unsigned int alignb;
325 323
390 } 388 }
391 389
392 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame. 390 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
393 Return the frame offset. */ 391 Return the frame offset. */
394 392
395 static HOST_WIDE_INT 393 static poly_int64
396 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align) 394 alloc_stack_frame_space (poly_int64 size, unsigned HOST_WIDE_INT align)
397 { 395 {
398 HOST_WIDE_INT offset, new_frame_offset; 396 poly_int64 offset, new_frame_offset;
399 397
400 if (FRAME_GROWS_DOWNWARD) 398 if (FRAME_GROWS_DOWNWARD)
401 { 399 {
402 new_frame_offset 400 new_frame_offset
403 = align_base (frame_offset - frame_phase - size, 401 = aligned_lower_bound (frame_offset - frame_phase - size,
404 align, false) + frame_phase; 402 align) + frame_phase;
405 offset = new_frame_offset; 403 offset = new_frame_offset;
406 } 404 }
407 else 405 else
408 { 406 {
409 new_frame_offset 407 new_frame_offset
410 = align_base (frame_offset - frame_phase, align, true) + frame_phase; 408 = aligned_upper_bound (frame_offset - frame_phase,
409 align) + frame_phase;
411 offset = new_frame_offset; 410 offset = new_frame_offset;
412 new_frame_offset += size; 411 new_frame_offset += size;
413 } 412 }
414 frame_offset = new_frame_offset; 413 frame_offset = new_frame_offset;
415 414
443 442
444 v->decl = decl; 443 v->decl = decl;
445 tree size = TREE_CODE (decl) == SSA_NAME 444 tree size = TREE_CODE (decl) == SSA_NAME
446 ? TYPE_SIZE_UNIT (TREE_TYPE (decl)) 445 ? TYPE_SIZE_UNIT (TREE_TYPE (decl))
447 : DECL_SIZE_UNIT (decl); 446 : DECL_SIZE_UNIT (decl);
448 v->size = tree_to_uhwi (size); 447 v->size = tree_to_poly_uint64 (size);
449 /* Ensure that all variables have size, so that &a != &b for any two 448 /* Ensure that all variables have size, so that &a != &b for any two
450 variables that are simultaneously live. */ 449 variables that are simultaneously live. */
451 if (v->size == 0) 450 if (known_eq (v->size, 0U))
452 v->size = 1; 451 v->size = 1;
453 v->alignb = align_local_variable (decl); 452 v->alignb = align_local_variable (decl);
454 /* An alignment of zero can mightily confuse us later. */ 453 /* An alignment of zero can mightily confuse us later. */
455 gcc_assert (v->alignb != 0); 454 gcc_assert (v->alignb != 0);
456 455
676 { 675 {
677 size_t ia = *(const size_t *)a; 676 size_t ia = *(const size_t *)a;
678 size_t ib = *(const size_t *)b; 677 size_t ib = *(const size_t *)b;
679 unsigned int aligna = stack_vars[ia].alignb; 678 unsigned int aligna = stack_vars[ia].alignb;
680 unsigned int alignb = stack_vars[ib].alignb; 679 unsigned int alignb = stack_vars[ib].alignb;
681 HOST_WIDE_INT sizea = stack_vars[ia].size; 680 poly_int64 sizea = stack_vars[ia].size;
682 HOST_WIDE_INT sizeb = stack_vars[ib].size; 681 poly_int64 sizeb = stack_vars[ib].size;
683 tree decla = stack_vars[ia].decl; 682 tree decla = stack_vars[ia].decl;
684 tree declb = stack_vars[ib].decl; 683 tree declb = stack_vars[ib].decl;
685 bool largea, largeb; 684 bool largea, largeb;
686 unsigned int uida, uidb; 685 unsigned int uida, uidb;
687 686
690 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT); 689 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT);
691 if (largea != largeb) 690 if (largea != largeb)
692 return (int)largeb - (int)largea; 691 return (int)largeb - (int)largea;
693 692
694 /* Secondary compare on size, decreasing */ 693 /* Secondary compare on size, decreasing */
695 if (sizea > sizeb) 694 int diff = compare_sizes_for_sort (sizeb, sizea);
696 return -1; 695 if (diff != 0)
697 if (sizea < sizeb) 696 return diff;
698 return 1;
699 697
700 /* Tertiary compare on true alignment, decreasing. */ 698 /* Tertiary compare on true alignment, decreasing. */
701 if (aligna < alignb) 699 if (aligna < alignb)
702 return -1; 700 return -1;
703 if (aligna > alignb) 701 if (aligna > alignb)
904 902
905 for (si = 0; si < n; ++si) 903 for (si = 0; si < n; ++si)
906 { 904 {
907 size_t i = stack_vars_sorted[si]; 905 size_t i = stack_vars_sorted[si];
908 unsigned int ialign = stack_vars[i].alignb; 906 unsigned int ialign = stack_vars[i].alignb;
909 HOST_WIDE_INT isize = stack_vars[i].size; 907 poly_int64 isize = stack_vars[i].size;
910 908
911 /* Ignore objects that aren't partition representatives. If we 909 /* Ignore objects that aren't partition representatives. If we
912 see a var that is not a partition representative, it must 910 see a var that is not a partition representative, it must
913 have been merged earlier. */ 911 have been merged earlier. */
914 if (stack_vars[i].representative != i) 912 if (stack_vars[i].representative != i)
916 914
917 for (sj = si + 1; sj < n; ++sj) 915 for (sj = si + 1; sj < n; ++sj)
918 { 916 {
919 size_t j = stack_vars_sorted[sj]; 917 size_t j = stack_vars_sorted[sj];
920 unsigned int jalign = stack_vars[j].alignb; 918 unsigned int jalign = stack_vars[j].alignb;
921 HOST_WIDE_INT jsize = stack_vars[j].size; 919 poly_int64 jsize = stack_vars[j].size;
922 920
923 /* Ignore objects that aren't partition representatives. */ 921 /* Ignore objects that aren't partition representatives. */
924 if (stack_vars[j].representative != j) 922 if (stack_vars[j].representative != j)
925 continue; 923 continue;
926 924
932 930
933 /* For Address Sanitizer do not mix objects with different 931 /* For Address Sanitizer do not mix objects with different
934 sizes, as the shorter vars wouldn't be adequately protected. 932 sizes, as the shorter vars wouldn't be adequately protected.
935 Don't do that for "large" (unsupported) alignment objects, 933 Don't do that for "large" (unsupported) alignment objects,
936 those aren't protected anyway. */ 934 those aren't protected anyway. */
937 if ((asan_sanitize_stack_p ()) 935 if (asan_sanitize_stack_p ()
938 && isize != jsize 936 && maybe_ne (isize, jsize)
939 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT) 937 && ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
940 break; 938 break;
941 939
942 /* Ignore conflicting objects. */ 940 /* Ignore conflicting objects. */
943 if (stack_var_conflict_p (i, j)) 941 if (stack_var_conflict_p (i, j))
964 962
965 /* Skip variables that aren't partition representatives, for now. */ 963 /* Skip variables that aren't partition representatives, for now. */
966 if (stack_vars[i].representative != i) 964 if (stack_vars[i].representative != i)
967 continue; 965 continue;
968 966
969 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC 967 fprintf (dump_file, "Partition %lu: size ", (unsigned long) i);
970 " align %u\n", (unsigned long) i, stack_vars[i].size, 968 print_dec (stack_vars[i].size, dump_file);
971 stack_vars[i].alignb); 969 fprintf (dump_file, " align %u\n", stack_vars[i].alignb);
972 970
973 for (j = i; j != EOC; j = stack_vars[j].next) 971 for (j = i; j != EOC; j = stack_vars[j].next)
974 { 972 {
975 fputc ('\t', dump_file); 973 fputc ('\t', dump_file);
976 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags); 974 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
981 979
982 /* Assign rtl to DECL at BASE + OFFSET. */ 980 /* Assign rtl to DECL at BASE + OFFSET. */
983 981
984 static void 982 static void
985 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align, 983 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align,
986 HOST_WIDE_INT offset) 984 poly_int64 offset)
987 { 985 {
988 unsigned align; 986 unsigned align;
989 rtx x; 987 rtx x;
990 988
991 /* If this fails, we've overflowed the stack frame. Error nicely? */ 989 /* If this fails, we've overflowed the stack frame. Error nicely? */
992 gcc_assert (offset == trunc_int_for_mode (offset, Pmode)); 990 gcc_assert (known_eq (offset, trunc_int_for_mode (offset, Pmode)));
993 991
994 x = plus_constant (Pmode, base, offset); 992 x = plus_constant (Pmode, base, offset);
995 x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME 993 x = gen_rtx_MEM (TREE_CODE (decl) == SSA_NAME
996 ? TYPE_MODE (TREE_TYPE (decl)) 994 ? TYPE_MODE (TREE_TYPE (decl))
997 : DECL_MODE (SSAVAR (decl)), x); 995 : DECL_MODE (SSAVAR (decl)), x);
1001 /* Set alignment we actually gave this decl if it isn't an SSA name. 999 /* Set alignment we actually gave this decl if it isn't an SSA name.
1002 If it is we generate stack slots only accidentally so it isn't as 1000 If it is we generate stack slots only accidentally so it isn't as
1003 important, we'll simply use the alignment that is already set. */ 1001 important, we'll simply use the alignment that is already set. */
1004 if (base == virtual_stack_vars_rtx) 1002 if (base == virtual_stack_vars_rtx)
1005 offset -= frame_phase; 1003 offset -= frame_phase;
1006 align = least_bit_hwi (offset); 1004 align = known_alignment (offset);
1007 align *= BITS_PER_UNIT; 1005 align *= BITS_PER_UNIT;
1008 if (align == 0 || align > base_align) 1006 if (align == 0 || align > base_align)
1009 align = base_align; 1007 align = base_align;
1010 1008
1011 /* One would think that we could assert that we're not decreasing 1009 /* One would think that we could assert that we're not decreasing
1042 1040
1043 static void 1041 static void
1044 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data) 1042 expand_stack_vars (bool (*pred) (size_t), struct stack_vars_data *data)
1045 { 1043 {
1046 size_t si, i, j, n = stack_vars_num; 1044 size_t si, i, j, n = stack_vars_num;
1047 HOST_WIDE_INT large_size = 0, large_alloc = 0; 1045 poly_uint64 large_size = 0, large_alloc = 0;
1048 rtx large_base = NULL; 1046 rtx large_base = NULL;
1049 unsigned large_align = 0; 1047 unsigned large_align = 0;
1050 bool large_allocation_done = false; 1048 bool large_allocation_done = false;
1051 tree decl; 1049 tree decl;
1052 1050
1085 if (TREE_CODE (decl) == SSA_NAME 1083 if (TREE_CODE (decl) == SSA_NAME
1086 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX 1084 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] != NULL_RTX
1087 : DECL_RTL (decl) != pc_rtx) 1085 : DECL_RTL (decl) != pc_rtx)
1088 continue; 1086 continue;
1089 1087
1090 large_size += alignb - 1; 1088 large_size = aligned_upper_bound (large_size, alignb);
1091 large_size &= -(HOST_WIDE_INT)alignb;
1092 large_size += stack_vars[i].size; 1089 large_size += stack_vars[i].size;
1093 } 1090 }
1094 } 1091 }
1095 1092
1096 for (si = 0; si < n; ++si) 1093 for (si = 0; si < n; ++si)
1097 { 1094 {
1098 rtx base; 1095 rtx base;
1099 unsigned base_align, alignb; 1096 unsigned base_align, alignb;
1100 HOST_WIDE_INT offset; 1097 poly_int64 offset;
1101 1098
1102 i = stack_vars_sorted[si]; 1099 i = stack_vars_sorted[si];
1103 1100
1104 /* Skip variables that aren't partition representatives, for now. */ 1101 /* Skip variables that aren't partition representatives, for now. */
1105 if (stack_vars[i].representative != i) 1102 if (stack_vars[i].representative != i)
1120 1117
1121 alignb = stack_vars[i].alignb; 1118 alignb = stack_vars[i].alignb;
1122 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT) 1119 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)
1123 { 1120 {
1124 base = virtual_stack_vars_rtx; 1121 base = virtual_stack_vars_rtx;
1125 if ((asan_sanitize_stack_p ()) 1122 /* ASAN description strings don't yet have a syntax for expressing
1126 && pred) 1123 polynomial offsets. */
1124 HOST_WIDE_INT prev_offset;
1125 if (asan_sanitize_stack_p ()
1126 && pred
1127 && frame_offset.is_constant (&prev_offset)
1128 && stack_vars[i].size.is_constant ())
1127 { 1129 {
1128 HOST_WIDE_INT prev_offset 1130 prev_offset = align_base (prev_offset,
1129 = align_base (frame_offset, 1131 MAX (alignb, ASAN_RED_ZONE_SIZE),
1130 MAX (alignb, ASAN_RED_ZONE_SIZE), 1132 !FRAME_GROWS_DOWNWARD);
1131 !FRAME_GROWS_DOWNWARD);
1132 tree repr_decl = NULL_TREE; 1133 tree repr_decl = NULL_TREE;
1133 offset 1134 offset
1134 = alloc_stack_frame_space (stack_vars[i].size 1135 = alloc_stack_frame_space (stack_vars[i].size
1135 + ASAN_RED_ZONE_SIZE, 1136 + ASAN_RED_ZONE_SIZE,
1136 MAX (alignb, ASAN_RED_ZONE_SIZE)); 1137 MAX (alignb, ASAN_RED_ZONE_SIZE));
1137 1138
1138 data->asan_vec.safe_push (prev_offset); 1139 data->asan_vec.safe_push (prev_offset);
1139 data->asan_vec.safe_push (offset + stack_vars[i].size); 1140 /* Allocating a constant amount of space from a constant
1141 starting offset must give a constant result. */
1142 data->asan_vec.safe_push ((offset + stack_vars[i].size)
1143 .to_constant ());
1140 /* Find best representative of the partition. 1144 /* Find best representative of the partition.
1141 Prefer those with DECL_NAME, even better 1145 Prefer those with DECL_NAME, even better
1142 satisfying asan_protect_stack_decl predicate. */ 1146 satisfying asan_protect_stack_decl predicate. */
1143 for (j = i; j != EOC; j = stack_vars[j].next) 1147 for (j = i; j != EOC; j = stack_vars[j].next)
1144 if (asan_protect_stack_decl (stack_vars[j].decl) 1148 if (asan_protect_stack_decl (stack_vars[j].decl)
1152 && DECL_NAME (stack_vars[j].decl)) 1156 && DECL_NAME (stack_vars[j].decl))
1153 repr_decl = stack_vars[j].decl; 1157 repr_decl = stack_vars[j].decl;
1154 if (repr_decl == NULL_TREE) 1158 if (repr_decl == NULL_TREE)
1155 repr_decl = stack_vars[i].decl; 1159 repr_decl = stack_vars[i].decl;
1156 data->asan_decl_vec.safe_push (repr_decl); 1160 data->asan_decl_vec.safe_push (repr_decl);
1161
1162 /* Make sure a representative is unpoison if another
1163 variable in the partition is handled by
1164 use-after-scope sanitization. */
1165 if (asan_handled_variables != NULL
1166 && !asan_handled_variables->contains (repr_decl))
1167 {
1168 for (j = i; j != EOC; j = stack_vars[j].next)
1169 if (asan_handled_variables->contains (stack_vars[j].decl))
1170 break;
1171 if (j != EOC)
1172 asan_handled_variables->add (repr_decl);
1173 }
1174
1157 data->asan_alignb = MAX (data->asan_alignb, alignb); 1175 data->asan_alignb = MAX (data->asan_alignb, alignb);
1158 if (data->asan_base == NULL) 1176 if (data->asan_base == NULL)
1159 data->asan_base = gen_reg_rtx (Pmode); 1177 data->asan_base = gen_reg_rtx (Pmode);
1160 base = data->asan_base; 1178 base = data->asan_base;
1161 1179
1178 if (pred) 1196 if (pred)
1179 continue; 1197 continue;
1180 1198
1181 /* If there were any variables requiring "large" alignment, allocate 1199 /* If there were any variables requiring "large" alignment, allocate
1182 space. */ 1200 space. */
1183 if (large_size > 0 && ! large_allocation_done) 1201 if (maybe_ne (large_size, 0U) && ! large_allocation_done)
1184 { 1202 {
1185 HOST_WIDE_INT loffset; 1203 poly_int64 loffset;
1186 rtx large_allocsize; 1204 rtx large_allocsize;
1187 1205
1188 large_allocsize = GEN_INT (large_size); 1206 large_allocsize = gen_int_mode (large_size, Pmode);
1189 get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL); 1207 get_dynamic_stack_size (&large_allocsize, 0, large_align, NULL);
1190 loffset = alloc_stack_frame_space 1208 loffset = alloc_stack_frame_space
1191 (INTVAL (large_allocsize), 1209 (rtx_to_poly_int64 (large_allocsize),
1192 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT); 1210 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT);
1193 large_base = get_dynamic_stack_base (loffset, large_align); 1211 large_base = get_dynamic_stack_base (loffset, large_align);
1194 large_allocation_done = true; 1212 large_allocation_done = true;
1195 } 1213 }
1196 gcc_assert (large_base != NULL); 1214 gcc_assert (large_base != NULL);
1197 1215
1198 large_alloc += alignb - 1; 1216 large_alloc = aligned_upper_bound (large_alloc, alignb);
1199 large_alloc &= -(HOST_WIDE_INT)alignb;
1200 offset = large_alloc; 1217 offset = large_alloc;
1201 large_alloc += stack_vars[i].size; 1218 large_alloc += stack_vars[i].size;
1202 1219
1203 base = large_base; 1220 base = large_base;
1204 base_align = large_align; 1221 base_align = large_align;
1212 base, base_align, 1229 base, base_align,
1213 offset); 1230 offset);
1214 } 1231 }
1215 } 1232 }
1216 1233
1217 gcc_assert (large_alloc == large_size); 1234 gcc_assert (known_eq (large_alloc, large_size));
1218 } 1235 }
1219 1236
1220 /* Take into account all sizes of partitions and reset DECL_RTLs. */ 1237 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1221 static HOST_WIDE_INT 1238 static poly_uint64
1222 account_stack_vars (void) 1239 account_stack_vars (void)
1223 { 1240 {
1224 size_t si, j, i, n = stack_vars_num; 1241 size_t si, j, i, n = stack_vars_num;
1225 HOST_WIDE_INT size = 0; 1242 poly_uint64 size = 0;
1226 1243
1227 for (si = 0; si < n; ++si) 1244 for (si = 0; si < n; ++si)
1228 { 1245 {
1229 i = stack_vars_sorted[si]; 1246 i = stack_vars_sorted[si];
1230 1247
1255 1272
1256 /* If the variable alignment is very large we'll dynamicaly 1273 /* If the variable alignment is very large we'll dynamicaly
1257 allocate it, which means that in-frame portion is just a 1274 allocate it, which means that in-frame portion is just a
1258 pointer. ??? We've got a pseudo for sure here, do we 1275 pointer. ??? We've got a pseudo for sure here, do we
1259 actually dynamically allocate its spilling area if needed? 1276 actually dynamically allocate its spilling area if needed?
1260 ??? Isn't it a problem when POINTER_SIZE also exceeds 1277 ??? Isn't it a problem when Pmode alignment also exceeds
1261 MAX_SUPPORTED_STACK_ALIGNMENT, as on cris and lm32? */ 1278 MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32? */
1262 if (align > MAX_SUPPORTED_STACK_ALIGNMENT) 1279 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1263 align = POINTER_SIZE; 1280 align = GET_MODE_ALIGNMENT (Pmode);
1264 1281
1265 record_alignment_for_reg_var (align); 1282 record_alignment_for_reg_var (align);
1266 } 1283 }
1267 1284
1268 tree ssa = ssa_default_def (cfun, parm); 1285 tree ssa = ssa_default_def (cfun, parm);
1283 to a variable to be allocated in the stack frame. */ 1300 to a variable to be allocated in the stack frame. */
1284 1301
1285 static void 1302 static void
1286 expand_one_stack_var_1 (tree var) 1303 expand_one_stack_var_1 (tree var)
1287 { 1304 {
1288 HOST_WIDE_INT size, offset; 1305 poly_uint64 size;
1306 poly_int64 offset;
1289 unsigned byte_align; 1307 unsigned byte_align;
1290 1308
1291 if (TREE_CODE (var) == SSA_NAME) 1309 if (TREE_CODE (var) == SSA_NAME)
1292 { 1310 {
1293 tree type = TREE_TYPE (var); 1311 tree type = TREE_TYPE (var);
1294 size = tree_to_uhwi (TYPE_SIZE_UNIT (type)); 1312 size = tree_to_poly_uint64 (TYPE_SIZE_UNIT (type));
1295 byte_align = TYPE_ALIGN_UNIT (type); 1313 byte_align = TYPE_ALIGN_UNIT (type);
1296 } 1314 }
1297 else 1315 else
1298 { 1316 {
1299 size = tree_to_uhwi (DECL_SIZE_UNIT (var)); 1317 size = tree_to_poly_uint64 (DECL_SIZE_UNIT (var));
1300 byte_align = align_local_variable (var); 1318 byte_align = align_local_variable (var);
1301 } 1319 }
1302 1320
1303 /* We handle highly aligned variables in expand_stack_vars. */ 1321 /* We handle highly aligned variables in expand_stack_vars. */
1304 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT); 1322 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT);
1378 TYPE_ALIGN (TREE_TYPE (var))); 1396 TYPE_ALIGN (TREE_TYPE (var)));
1379 1397
1380 /* If the variable alignment is very large we'll dynamicaly allocate 1398 /* If the variable alignment is very large we'll dynamicaly allocate
1381 it, which means that in-frame portion is just a pointer. */ 1399 it, which means that in-frame portion is just a pointer. */
1382 if (align > MAX_SUPPORTED_STACK_ALIGNMENT) 1400 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1383 align = POINTER_SIZE; 1401 align = GET_MODE_ALIGNMENT (Pmode);
1384 1402
1385 record_alignment_for_reg_var (align); 1403 record_alignment_for_reg_var (align);
1386 1404
1387 if (!use_register_for_decl (var)) 1405 if (!use_register_for_decl (var))
1388 { 1406 {
1506 defer_stack_allocation (tree var, bool toplevel) 1524 defer_stack_allocation (tree var, bool toplevel)
1507 { 1525 {
1508 tree size_unit = TREE_CODE (var) == SSA_NAME 1526 tree size_unit = TREE_CODE (var) == SSA_NAME
1509 ? TYPE_SIZE_UNIT (TREE_TYPE (var)) 1527 ? TYPE_SIZE_UNIT (TREE_TYPE (var))
1510 : DECL_SIZE_UNIT (var); 1528 : DECL_SIZE_UNIT (var);
1529 poly_uint64 size;
1511 1530
1512 /* Whether the variable is small enough for immediate allocation not to be 1531 /* Whether the variable is small enough for immediate allocation not to be
1513 a problem with regard to the frame size. */ 1532 a problem with regard to the frame size. */
1514 bool smallish 1533 bool smallish
1515 = ((HOST_WIDE_INT) tree_to_uhwi (size_unit) 1534 = (poly_int_tree_p (size_unit, &size)
1516 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING)); 1535 && (estimated_poly_value (size)
1536 < PARAM_VALUE (PARAM_MIN_SIZE_FOR_STACK_SHARING)));
1517 1537
1518 /* If stack protection is enabled, *all* stack variables must be deferred, 1538 /* If stack protection is enabled, *all* stack variables must be deferred,
1519 so that we can re-order the strings to the top of the frame. 1539 so that we can re-order the strings to the top of the frame.
1520 Similarly for Address Sanitizer. */ 1540 Similarly for Address Sanitizer. */
1521 if (flag_stack_protect || asan_sanitize_stack_p ()) 1541 if (flag_stack_protect || asan_sanitize_stack_p ())
1565 expanded yet, merely recorded. 1585 expanded yet, merely recorded.
1566 When REALLY_EXPAND is false, only add stack values to be allocated. 1586 When REALLY_EXPAND is false, only add stack values to be allocated.
1567 Return stack usage this variable is supposed to take. 1587 Return stack usage this variable is supposed to take.
1568 */ 1588 */
1569 1589
1570 static HOST_WIDE_INT 1590 static poly_uint64
1571 expand_one_var (tree var, bool toplevel, bool really_expand) 1591 expand_one_var (tree var, bool toplevel, bool really_expand)
1572 { 1592 {
1573 unsigned int align = BITS_PER_UNIT; 1593 unsigned int align = BITS_PER_UNIT;
1574 tree origvar = var; 1594 tree origvar = var;
1575 1595
1603 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var)); 1623 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
1604 1624
1605 /* If the variable alignment is very large we'll dynamicaly allocate 1625 /* If the variable alignment is very large we'll dynamicaly allocate
1606 it, which means that in-frame portion is just a pointer. */ 1626 it, which means that in-frame portion is just a pointer. */
1607 if (align > MAX_SUPPORTED_STACK_ALIGNMENT) 1627 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1608 align = POINTER_SIZE; 1628 align = GET_MODE_ALIGNMENT (Pmode);
1609 } 1629 }
1610 1630
1611 record_alignment_for_reg_var (align); 1631 record_alignment_for_reg_var (align);
1612 1632
1633 poly_uint64 size;
1613 if (TREE_CODE (origvar) == SSA_NAME) 1634 if (TREE_CODE (origvar) == SSA_NAME)
1614 { 1635 {
1615 gcc_assert (!VAR_P (var) 1636 gcc_assert (!VAR_P (var)
1616 || (!DECL_EXTERNAL (var) 1637 || (!DECL_EXTERNAL (var)
1617 && !DECL_HAS_VALUE_EXPR_P (var) 1638 && !DECL_HAS_VALUE_EXPR_P (var)
1648 else if (use_register_for_decl (var)) 1669 else if (use_register_for_decl (var))
1649 { 1670 {
1650 if (really_expand) 1671 if (really_expand)
1651 expand_one_register_var (origvar); 1672 expand_one_register_var (origvar);
1652 } 1673 }
1653 else if (! valid_constant_size_p (DECL_SIZE_UNIT (var))) 1674 else if (!poly_int_tree_p (DECL_SIZE_UNIT (var), &size)
1675 || !valid_constant_size_p (DECL_SIZE_UNIT (var)))
1654 { 1676 {
1655 /* Reject variables which cover more than half of the address-space. */ 1677 /* Reject variables which cover more than half of the address-space. */
1656 if (really_expand) 1678 if (really_expand)
1657 { 1679 {
1658 error ("size of variable %q+D is too large", var); 1680 if (DECL_NONLOCAL_FRAME (var))
1681 error_at (DECL_SOURCE_LOCATION (current_function_decl),
1682 "total size of local objects is too large");
1683 else
1684 error_at (DECL_SOURCE_LOCATION (var),
1685 "size of variable %q+D is too large", var);
1659 expand_one_error_var (var); 1686 expand_one_error_var (var);
1660 } 1687 }
1661 } 1688 }
1662 else if (defer_stack_allocation (var, toplevel)) 1689 else if (defer_stack_allocation (var, toplevel))
1663 add_stack_var (origvar); 1690 add_stack_var (origvar);
1670 error ("cannot allocate stack for variable %q+D, naked function.", 1697 error ("cannot allocate stack for variable %q+D, naked function.",
1671 var); 1698 var);
1672 1699
1673 expand_one_stack_var (origvar); 1700 expand_one_stack_var (origvar);
1674 } 1701 }
1675 1702 return size;
1676
1677 return tree_to_uhwi (DECL_SIZE_UNIT (var));
1678 } 1703 }
1679 return 0; 1704 return 0;
1680 } 1705 }
1681 1706
1682 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree 1707 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1925 happen very often. */ 1950 happen very often. */
1926 1951
1927 HOST_WIDE_INT 1952 HOST_WIDE_INT
1928 estimated_stack_frame_size (struct cgraph_node *node) 1953 estimated_stack_frame_size (struct cgraph_node *node)
1929 { 1954 {
1930 HOST_WIDE_INT size = 0; 1955 poly_int64 size = 0;
1931 size_t i; 1956 size_t i;
1932 tree var; 1957 tree var;
1933 struct function *fn = DECL_STRUCT_FUNCTION (node->decl); 1958 struct function *fn = DECL_STRUCT_FUNCTION (node->decl);
1934 1959
1935 push_cfun (fn); 1960 push_cfun (fn);
1949 size += account_stack_vars (); 1974 size += account_stack_vars ();
1950 } 1975 }
1951 1976
1952 fini_vars_expansion (); 1977 fini_vars_expansion ();
1953 pop_cfun (); 1978 pop_cfun ();
1954 return size; 1979 return estimated_poly_value (size);
1955 } 1980 }
1956 1981
1957 /* Helper routine to check if a record or union contains an array field. */ 1982 /* Helper routine to check if a record or union contains an array field. */
1958 1983
1959 static int 1984 static int
2219 if (asan_sanitize_stack_p ()) 2244 if (asan_sanitize_stack_p ())
2220 /* Phase 3, any partitions that need asan protection 2245 /* Phase 3, any partitions that need asan protection
2221 in addition to phase 1 and 2. */ 2246 in addition to phase 1 and 2. */
2222 expand_stack_vars (asan_decl_phase_3, &data); 2247 expand_stack_vars (asan_decl_phase_3, &data);
2223 2248
2224 if (!data.asan_vec.is_empty ()) 2249 /* ASAN description strings don't yet have a syntax for expressing
2225 { 2250 polynomial offsets. */
2226 HOST_WIDE_INT prev_offset = frame_offset; 2251 HOST_WIDE_INT prev_offset;
2252 if (!data.asan_vec.is_empty ()
2253 && frame_offset.is_constant (&prev_offset))
2254 {
2227 HOST_WIDE_INT offset, sz, redzonesz; 2255 HOST_WIDE_INT offset, sz, redzonesz;
2228 redzonesz = ASAN_RED_ZONE_SIZE; 2256 redzonesz = ASAN_RED_ZONE_SIZE;
2229 sz = data.asan_vec[0] - prev_offset; 2257 sz = data.asan_vec[0] - prev_offset;
2230 if (data.asan_alignb > ASAN_RED_ZONE_SIZE 2258 if (data.asan_alignb > ASAN_RED_ZONE_SIZE
2231 && data.asan_alignb <= 4096 2259 && data.asan_alignb <= 4096
2232 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb) 2260 && sz + ASAN_RED_ZONE_SIZE >= (int) data.asan_alignb)
2233 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1) 2261 redzonesz = ((sz + ASAN_RED_ZONE_SIZE + data.asan_alignb - 1)
2234 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz; 2262 & ~(data.asan_alignb - HOST_WIDE_INT_1)) - sz;
2235 offset 2263 /* Allocating a constant amount of space from a constant
2236 = alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE); 2264 starting offset must give a constant result. */
2265 offset = (alloc_stack_frame_space (redzonesz, ASAN_RED_ZONE_SIZE)
2266 .to_constant ());
2237 data.asan_vec.safe_push (prev_offset); 2267 data.asan_vec.safe_push (prev_offset);
2238 data.asan_vec.safe_push (offset); 2268 data.asan_vec.safe_push (offset);
2239 /* Leave space for alignment if STRICT_ALIGNMENT. */ 2269 /* Leave space for alignment if STRICT_ALIGNMENT. */
2240 if (STRICT_ALIGNMENT) 2270 if (STRICT_ALIGNMENT)
2241 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode) 2271 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode)
2276 2306
2277 /* If the target requires that FRAME_OFFSET be aligned, do it. */ 2307 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2278 if (STACK_ALIGNMENT_NEEDED) 2308 if (STACK_ALIGNMENT_NEEDED)
2279 { 2309 {
2280 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; 2310 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
2281 if (!FRAME_GROWS_DOWNWARD) 2311 if (FRAME_GROWS_DOWNWARD)
2282 frame_offset += align - 1; 2312 frame_offset = aligned_lower_bound (frame_offset, align);
2283 frame_offset &= -align; 2313 else
2314 frame_offset = aligned_upper_bound (frame_offset, align);
2284 } 2315 }
2285 2316
2286 return var_end_seq; 2317 return var_end_seq;
2287 } 2318 }
2288 2319
2466 } 2497 }
2467 } 2498 }
2468 } 2499 }
2469 } 2500 }
2470 2501
2502 /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
2503 into (x - C2) * C3 < C4. */
2504 if ((code == EQ_EXPR || code == NE_EXPR)
2505 && TREE_CODE (op0) == SSA_NAME
2506 && TREE_CODE (op1) == INTEGER_CST)
2507 code = maybe_optimize_mod_cmp (code, &op0, &op1);
2508
2471 last2 = last = get_last_insn (); 2509 last2 = last = get_last_insn ();
2472 2510
2473 extract_true_false_edges_from_block (bb, &true_edge, &false_edge); 2511 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
2474 set_curr_insn_location (gimple_location (stmt)); 2512 set_curr_insn_location (gimple_location (stmt));
2475 2513
2517 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb); 2555 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2518 dest = false_edge->dest; 2556 dest = false_edge->dest;
2519 redirect_edge_succ (false_edge, new_bb); 2557 redirect_edge_succ (false_edge, new_bb);
2520 false_edge->flags |= EDGE_FALLTHRU; 2558 false_edge->flags |= EDGE_FALLTHRU;
2521 new_bb->count = false_edge->count (); 2559 new_bb->count = false_edge->count ();
2522 new_bb->frequency = EDGE_FREQUENCY (false_edge);
2523 loop_p loop = find_common_loop (bb->loop_father, dest->loop_father); 2560 loop_p loop = find_common_loop (bb->loop_father, dest->loop_father);
2524 add_bb_to_loop (new_bb, loop); 2561 add_bb_to_loop (new_bb, loop);
2525 if (loop->latch == bb 2562 if (loop->latch == bb
2526 && loop->header == dest) 2563 && loop->header == dest)
2527 loop->latch = new_bb; 2564 loop->latch = new_bb;
2606 } 2643 }
2607 2644
2608 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3); 2645 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
2609 2646
2610 CALL_EXPR_FN (exp) = gimple_call_fn (stmt); 2647 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
2611 builtin_p = decl && DECL_BUILT_IN (decl); 2648 builtin_p = decl && fndecl_built_in_p (decl);
2612 2649
2613 /* If this is not a builtin function, the function type through which the 2650 /* If this is not a builtin function, the function type through which the
2614 call is made may be different from the type of the function. */ 2651 call is made may be different from the type of the function. */
2615 if (!builtin_p) 2652 if (!builtin_p)
2616 CALL_EXPR_FN (exp) 2653 CALL_EXPR_FN (exp)
2638 TREE_SIDE_EFFECTS (exp) = 1; 2675 TREE_SIDE_EFFECTS (exp) = 1;
2639 2676
2640 if (gimple_call_nothrow_p (stmt)) 2677 if (gimple_call_nothrow_p (stmt))
2641 TREE_NOTHROW (exp) = 1; 2678 TREE_NOTHROW (exp) = 1;
2642 2679
2680 if (gimple_no_warning_p (stmt))
2681 TREE_NO_WARNING (exp) = 1;
2682
2643 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt); 2683 CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
2644 CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt); 2684 CALL_EXPR_MUST_TAIL_CALL (exp) = gimple_call_must_tail_p (stmt);
2645 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt); 2685 CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
2646 if (decl 2686 if (decl
2647 && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL 2687 && fndecl_built_in_p (decl, BUILT_IN_NORMAL)
2648 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl))) 2688 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl)))
2649 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt); 2689 CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
2650 else 2690 else
2651 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt); 2691 CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
2652 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt); 2692 CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
2653 CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt); 2693 CALL_EXPR_BY_DESCRIPTOR (exp) = gimple_call_by_descriptor_p (stmt);
2654 SET_EXPR_LOCATION (exp, gimple_location (stmt)); 2694 SET_EXPR_LOCATION (exp, gimple_location (stmt));
2655 CALL_WITH_BOUNDS_P (exp) = gimple_call_with_bounds_p (stmt);
2656 2695
2657 /* Ensure RTL is created for debug args. */ 2696 /* Ensure RTL is created for debug args. */
2658 if (decl && DECL_HAS_DEBUG_ARGS_P (decl)) 2697 if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
2659 { 2698 {
2660 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl); 2699 vec<tree, va_gc> **debug_args = decl_debug_args_lookup (decl);
3029 Make the asm insn write into that, then we will copy it to 3068 Make the asm insn write into that, then we will copy it to
3030 the real output operand. Likewise for promoted variables. */ 3069 the real output operand. Likewise for promoted variables. */
3031 3070
3032 generating_concat_p = 0; 3071 generating_concat_p = 0;
3033 3072
3034 if ((TREE_CODE (val) == INDIRECT_REF 3073 if ((TREE_CODE (val) == INDIRECT_REF && allows_mem)
3035 && allows_mem)
3036 || (DECL_P (val) 3074 || (DECL_P (val)
3037 && (allows_mem || REG_P (DECL_RTL (val))) 3075 && (allows_mem || REG_P (DECL_RTL (val)))
3038 && ! (REG_P (DECL_RTL (val)) 3076 && ! (REG_P (DECL_RTL (val))
3039 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type))) 3077 && GET_MODE (DECL_RTL (val)) != TYPE_MODE (type)))
3040 || ! allows_reg 3078 || ! allows_reg
3041 || is_inout) 3079 || is_inout
3080 || TREE_ADDRESSABLE (type))
3042 { 3081 {
3043 op = expand_expr (val, NULL_RTX, VOIDmode, 3082 op = expand_expr (val, NULL_RTX, VOIDmode,
3044 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE); 3083 !allows_reg ? EXPAND_MEMORY : EXPAND_WRITE);
3045 if (MEM_P (op)) 3084 if (MEM_P (op))
3046 op = validize_mem (op); 3085 op = validize_mem (op);
3047 3086
3048 if (! allows_reg && !MEM_P (op)) 3087 if (! allows_reg && !MEM_P (op))
3049 error ("output number %d not directly addressable", i); 3088 error ("output number %d not directly addressable", i);
3050 if ((! allows_mem && MEM_P (op)) 3089 if ((! allows_mem && MEM_P (op) && GET_MODE (op) != BLKmode)
3051 || GET_CODE (op) == CONCAT) 3090 || GET_CODE (op) == CONCAT)
3052 { 3091 {
3053 rtx old_op = op; 3092 rtx old_op = op;
3054 op = gen_reg_rtx (GET_MODE (op)); 3093 op = gen_reg_rtx (GET_MODE (op));
3055 3094
3227 /* If asm goto has any labels in the fallthru basic block, use 3266 /* If asm goto has any labels in the fallthru basic block, use
3228 a label that we emit immediately after the asm goto. Expansion 3267 a label that we emit immediately after the asm goto. Expansion
3229 may insert further instructions into the same basic block after 3268 may insert further instructions into the same basic block after
3230 asm goto and if we don't do this, insertion of instructions on 3269 asm goto and if we don't do this, insertion of instructions on
3231 the fallthru edge might misbehave. See PR58670. */ 3270 the fallthru edge might misbehave. See PR58670. */
3232 if (fallthru_bb && label_to_block_fn (cfun, label) == fallthru_bb) 3271 if (fallthru_bb && label_to_block (cfun, label) == fallthru_bb)
3233 { 3272 {
3234 if (fallthru_label == NULL_RTX) 3273 if (fallthru_label == NULL_RTX)
3235 fallthru_label = gen_label_rtx (); 3274 fallthru_label = gen_label_rtx ();
3236 r = fallthru_label; 3275 r = fallthru_label;
3237 } 3276 }
3429 3468
3430 /* Generate RTL to evaluate the expression RETVAL and return it 3469 /* Generate RTL to evaluate the expression RETVAL and return it
3431 from the current function. */ 3470 from the current function. */
3432 3471
3433 static void 3472 static void
3434 expand_return (tree retval, tree bounds) 3473 expand_return (tree retval)
3435 { 3474 {
3436 rtx result_rtl; 3475 rtx result_rtl;
3437 rtx val = 0; 3476 rtx val = 0;
3438 tree retval_rhs; 3477 tree retval_rhs;
3439 rtx bounds_rtl;
3440 3478
3441 /* If function wants no value, give it none. */ 3479 /* If function wants no value, give it none. */
3442 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE) 3480 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
3443 { 3481 {
3444 expand_normal (retval); 3482 expand_normal (retval);
3460 else 3498 else
3461 retval_rhs = retval; 3499 retval_rhs = retval;
3462 3500
3463 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl)); 3501 result_rtl = DECL_RTL (DECL_RESULT (current_function_decl));
3464 3502
3465 /* Put returned bounds to the right place. */
3466 bounds_rtl = DECL_BOUNDS_RTL (DECL_RESULT (current_function_decl));
3467 if (bounds_rtl)
3468 {
3469 rtx addr = NULL;
3470 rtx bnd = NULL;
3471
3472 if (bounds && bounds != error_mark_node)
3473 {
3474 bnd = expand_normal (bounds);
3475 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3476 }
3477 else if (REG_P (bounds_rtl))
3478 {
3479 if (bounds)
3480 bnd = chkp_expand_zero_bounds ();
3481 else
3482 {
3483 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3484 addr = gen_rtx_MEM (Pmode, addr);
3485 bnd = targetm.calls.load_bounds_for_arg (addr, NULL, NULL);
3486 }
3487
3488 targetm.calls.store_returned_bounds (bounds_rtl, bnd);
3489 }
3490 else
3491 {
3492 int n;
3493
3494 gcc_assert (GET_CODE (bounds_rtl) == PARALLEL);
3495
3496 if (bounds)
3497 bnd = chkp_expand_zero_bounds ();
3498 else
3499 {
3500 addr = expand_normal (build_fold_addr_expr (retval_rhs));
3501 addr = gen_rtx_MEM (Pmode, addr);
3502 }
3503
3504 for (n = 0; n < XVECLEN (bounds_rtl, 0); n++)
3505 {
3506 rtx slot = XEXP (XVECEXP (bounds_rtl, 0, n), 0);
3507 if (!bounds)
3508 {
3509 rtx offs = XEXP (XVECEXP (bounds_rtl, 0, n), 1);
3510 rtx from = adjust_address (addr, Pmode, INTVAL (offs));
3511 bnd = targetm.calls.load_bounds_for_arg (from, NULL, NULL);
3512 }
3513 targetm.calls.store_returned_bounds (slot, bnd);
3514 }
3515 }
3516 }
3517 else if (chkp_function_instrumented_p (current_function_decl)
3518 && !BOUNDED_P (retval_rhs)
3519 && chkp_type_has_pointer (TREE_TYPE (retval_rhs))
3520 && TREE_CODE (retval_rhs) != RESULT_DECL)
3521 {
3522 rtx addr = expand_normal (build_fold_addr_expr (retval_rhs));
3523 addr = gen_rtx_MEM (Pmode, addr);
3524
3525 gcc_assert (MEM_P (result_rtl));
3526
3527 chkp_copy_bounds_for_stack_parm (result_rtl, addr, TREE_TYPE (retval_rhs));
3528 }
3529
3530 /* If we are returning the RESULT_DECL, then the value has already 3503 /* If we are returning the RESULT_DECL, then the value has already
3531 been stored into it, so we don't have to do anything special. */ 3504 been stored into it, so we don't have to do anything special. */
3532 if (TREE_CODE (retval_rhs) == RESULT_DECL) 3505 if (TREE_CODE (retval_rhs) == RESULT_DECL)
3533 expand_value_return (result_rtl); 3506 expand_value_return (result_rtl);
3534 3507
3564 else 3537 else
3565 { 3538 {
3566 /* No hard reg used; calculate value into hard return reg. */ 3539 /* No hard reg used; calculate value into hard return reg. */
3567 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL); 3540 expand_expr (retval, const0_rtx, VOIDmode, EXPAND_NORMAL);
3568 expand_value_return (result_rtl); 3541 expand_value_return (result_rtl);
3542 }
3543 }
3544
3545 /* Expand a clobber of LHS. If LHS is stored it in a multi-part
3546 register, tell the rtl optimizers that its value is no longer
3547 needed. */
3548
3549 static void
3550 expand_clobber (tree lhs)
3551 {
3552 if (DECL_P (lhs))
3553 {
3554 rtx decl_rtl = DECL_RTL_IF_SET (lhs);
3555 if (decl_rtl && REG_P (decl_rtl))
3556 {
3557 machine_mode decl_mode = GET_MODE (decl_rtl);
3558 if (maybe_gt (GET_MODE_SIZE (decl_mode),
3559 REGMODE_NATURAL_SIZE (decl_mode)))
3560 emit_clobber (decl_rtl);
3561 }
3569 } 3562 }
3570 } 3563 }
3571 3564
3572 /* A subroutine of expand_gimple_stmt, expanding one gimple statement 3565 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3573 STMT that doesn't require special handling for outgoing edges. That 3566 STMT that doesn't require special handling for outgoing edges. That
3611 expand_call_stmt (as_a <gcall *> (stmt)); 3604 expand_call_stmt (as_a <gcall *> (stmt));
3612 break; 3605 break;
3613 3606
3614 case GIMPLE_RETURN: 3607 case GIMPLE_RETURN:
3615 { 3608 {
3616 tree bnd = gimple_return_retbnd (as_a <greturn *> (stmt));
3617 op0 = gimple_return_retval (as_a <greturn *> (stmt)); 3609 op0 = gimple_return_retval (as_a <greturn *> (stmt));
3618 3610
3619 if (op0 && op0 != error_mark_node) 3611 if (op0 && op0 != error_mark_node)
3620 { 3612 {
3621 tree result = DECL_RESULT (current_function_decl); 3613 tree result = DECL_RESULT (current_function_decl);
3622
3623 /* Mark we have return statement with missing bounds. */
3624 if (!bnd
3625 && chkp_function_instrumented_p (cfun->decl)
3626 && !DECL_P (op0))
3627 bnd = error_mark_node;
3628 3614
3629 /* If we are not returning the current function's RESULT_DECL, 3615 /* If we are not returning the current function's RESULT_DECL,
3630 build an assignment to it. */ 3616 build an assignment to it. */
3631 if (op0 != result) 3617 if (op0 != result)
3632 { 3618 {
3645 } 3631 }
3646 3632
3647 if (!op0) 3633 if (!op0)
3648 expand_null_return (); 3634 expand_null_return ();
3649 else 3635 else
3650 expand_return (op0, bnd); 3636 expand_return (op0);
3651 } 3637 }
3652 break; 3638 break;
3653 3639
3654 case GIMPLE_ASSIGN: 3640 case GIMPLE_ASSIGN:
3655 { 3641 {
3672 && !is_gimple_min_invariant (rhs)) 3658 && !is_gimple_min_invariant (rhs))
3673 SET_EXPR_LOCATION (rhs, gimple_location (stmt)); 3659 SET_EXPR_LOCATION (rhs, gimple_location (stmt));
3674 if (TREE_CLOBBER_P (rhs)) 3660 if (TREE_CLOBBER_P (rhs))
3675 /* This is a clobber to mark the going out of scope for 3661 /* This is a clobber to mark the going out of scope for
3676 this LHS. */ 3662 this LHS. */
3677 ; 3663 expand_clobber (lhs);
3678 else 3664 else
3679 expand_assignment (lhs, rhs, 3665 expand_assignment (lhs, rhs,
3680 gimple_assign_nontemporal_move_p ( 3666 gimple_assign_nontemporal_move_p (
3681 assign_stmt)); 3667 assign_stmt));
3682 } 3668 }
3791 { 3777 {
3792 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX) 3778 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
3793 /* If we want exceptions for non-call insns, any 3779 /* If we want exceptions for non-call insns, any
3794 may_trap_p instruction may throw. */ 3780 may_trap_p instruction may throw. */
3795 && GET_CODE (PATTERN (insn)) != CLOBBER 3781 && GET_CODE (PATTERN (insn)) != CLOBBER
3782 && GET_CODE (PATTERN (insn)) != CLOBBER_HIGH
3796 && GET_CODE (PATTERN (insn)) != USE 3783 && GET_CODE (PATTERN (insn)) != USE
3797 && insn_could_throw_p (insn)) 3784 && insn_could_throw_p (insn))
3798 make_reg_eh_region_note (insn, 0, lp_nr); 3785 make_reg_eh_region_note (insn, 0, lp_nr);
3799 } 3786 }
3800 } 3787 }
3848 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) 3835 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
3849 { 3836 {
3850 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH))) 3837 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
3851 { 3838 {
3852 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)) 3839 if (e->dest != EXIT_BLOCK_PTR_FOR_FN (cfun))
3853 { 3840 e->dest->count -= e->count ();
3854 e->dest->frequency -= EDGE_FREQUENCY (e);
3855 if (e->dest->frequency < 0)
3856 e->dest->frequency = 0;
3857 }
3858 probability += e->probability; 3841 probability += e->probability;
3859 remove_edge (e); 3842 remove_edge (e);
3860 } 3843 }
3861 else 3844 else
3862 ei_next (&ei); 3845 ei_next (&ei);
4171 case COND_EXPR: 4154 case COND_EXPR:
4172 case DOT_PROD_EXPR: 4155 case DOT_PROD_EXPR:
4173 case SAD_EXPR: 4156 case SAD_EXPR:
4174 case WIDEN_MULT_PLUS_EXPR: 4157 case WIDEN_MULT_PLUS_EXPR:
4175 case WIDEN_MULT_MINUS_EXPR: 4158 case WIDEN_MULT_MINUS_EXPR:
4176 case FMA_EXPR:
4177 goto ternary; 4159 goto ternary;
4178 4160
4179 case TRUTH_ANDIF_EXPR: 4161 case TRUTH_ANDIF_EXPR:
4180 case TRUTH_ORIF_EXPR: 4162 case TRUTH_ORIF_EXPR:
4181 case TRUTH_AND_EXPR: 4163 case TRUTH_AND_EXPR:
4197 return NULL_RTX; 4179 return NULL_RTX;
4198 /* Fall through. */ 4180 /* Fall through. */
4199 4181
4200 binary: 4182 binary:
4201 case tcc_binary: 4183 case tcc_binary:
4184 if (mode == BLKmode)
4185 return NULL_RTX;
4202 op1 = expand_debug_expr (TREE_OPERAND (exp, 1)); 4186 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4203 if (!op1) 4187 if (!op1)
4204 return NULL_RTX; 4188 return NULL_RTX;
4205 switch (TREE_CODE (exp)) 4189 switch (TREE_CODE (exp))
4206 { 4190 {
4221 } 4205 }
4222 /* Fall through. */ 4206 /* Fall through. */
4223 4207
4224 unary: 4208 unary:
4225 case tcc_unary: 4209 case tcc_unary:
4210 if (mode == BLKmode)
4211 return NULL_RTX;
4226 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); 4212 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4227 op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); 4213 op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
4228 if (!op0) 4214 if (!op0)
4229 return NULL_RTX; 4215 return NULL_RTX;
4230 break; 4216 break;
4263 case INTEGER_CST: 4249 case INTEGER_CST:
4264 case REAL_CST: 4250 case REAL_CST:
4265 case FIXED_CST: 4251 case FIXED_CST:
4266 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER); 4252 op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER);
4267 return op0; 4253 return op0;
4254
4255 case POLY_INT_CST:
4256 return immed_wide_int_const (poly_int_cst_value (exp), mode);
4268 4257
4269 case COMPLEX_CST: 4258 case COMPLEX_CST:
4270 gcc_assert (COMPLEX_MODE_P (mode)); 4259 gcc_assert (COMPLEX_MODE_P (mode));
4271 op0 = expand_debug_expr (TREE_REALPART (exp)); 4260 op0 = expand_debug_expr (TREE_REALPART (exp));
4272 op1 = expand_debug_expr (TREE_IMAGPART (exp)); 4261 op1 = expand_debug_expr (TREE_IMAGPART (exp));
4412 /* (mem (debug_implicit_ptr)) might confuse aliasing. 4401 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4413 Instead just use get_inner_reference. */ 4402 Instead just use get_inner_reference. */
4414 goto component_ref; 4403 goto component_ref;
4415 4404
4416 op1 = expand_debug_expr (TREE_OPERAND (exp, 1)); 4405 op1 = expand_debug_expr (TREE_OPERAND (exp, 1));
4417 if (!op1 || !CONST_INT_P (op1)) 4406 poly_int64 offset;
4407 if (!op1 || !poly_int_rtx_p (op1, &offset))
4418 return NULL; 4408 return NULL;
4419 4409
4420 op0 = plus_constant (inner_mode, op0, INTVAL (op1)); 4410 op0 = plus_constant (inner_mode, op0, offset);
4421 } 4411 }
4422 4412
4423 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))); 4413 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4424 4414
4425 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as), 4415 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as),
4467 case REALPART_EXPR: 4457 case REALPART_EXPR:
4468 case IMAGPART_EXPR: 4458 case IMAGPART_EXPR:
4469 case VIEW_CONVERT_EXPR: 4459 case VIEW_CONVERT_EXPR:
4470 { 4460 {
4471 machine_mode mode1; 4461 machine_mode mode1;
4472 HOST_WIDE_INT bitsize, bitpos; 4462 poly_int64 bitsize, bitpos;
4473 tree offset; 4463 tree offset;
4474 int reversep, volatilep = 0; 4464 int reversep, volatilep = 0;
4475 tree tem 4465 tree tem
4476 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1, 4466 = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
4477 &unsignedp, &reversep, &volatilep); 4467 &unsignedp, &reversep, &volatilep);
4478 rtx orig_op0; 4468 rtx orig_op0;
4479 4469
4480 if (bitsize == 0) 4470 if (known_eq (bitsize, 0))
4481 return NULL; 4471 return NULL;
4482 4472
4483 orig_op0 = op0 = expand_debug_expr (tem); 4473 orig_op0 = op0 = expand_debug_expr (tem);
4484 4474
4485 if (!op0) 4475 if (!op0)
4516 } 4506 }
4517 4507
4518 if (MEM_P (op0)) 4508 if (MEM_P (op0))
4519 { 4509 {
4520 if (mode1 == VOIDmode) 4510 if (mode1 == VOIDmode)
4521 /* Bitfield. */
4522 mode1 = smallest_int_mode_for_size (bitsize);
4523 if (bitpos >= BITS_PER_UNIT)
4524 { 4511 {
4525 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT); 4512 if (maybe_gt (bitsize, MAX_BITSIZE_MODE_ANY_INT))
4526 bitpos %= BITS_PER_UNIT; 4513 return NULL;
4514 /* Bitfield. */
4515 mode1 = smallest_int_mode_for_size (bitsize);
4527 } 4516 }
4528 else if (bitpos < 0) 4517 poly_int64 bytepos = bits_to_bytes_round_down (bitpos);
4518 if (maybe_ne (bytepos, 0))
4529 { 4519 {
4530 HOST_WIDE_INT units 4520 op0 = adjust_address_nv (op0, mode1, bytepos);
4531 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT; 4521 bitpos = num_trailing_bits (bitpos);
4532 op0 = adjust_address_nv (op0, mode1, -units);
4533 bitpos += units * BITS_PER_UNIT;
4534 } 4522 }
4535 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode)) 4523 else if (known_eq (bitpos, 0)
4524 && known_eq (bitsize, GET_MODE_BITSIZE (mode)))
4536 op0 = adjust_address_nv (op0, mode, 0); 4525 op0 = adjust_address_nv (op0, mode, 0);
4537 else if (GET_MODE (op0) != mode1) 4526 else if (GET_MODE (op0) != mode1)
4538 op0 = adjust_address_nv (op0, mode1, 0); 4527 op0 = adjust_address_nv (op0, mode1, 0);
4539 else 4528 else
4540 op0 = copy_rtx (op0); 4529 op0 = copy_rtx (op0);
4541 if (op0 == orig_op0) 4530 if (op0 == orig_op0)
4542 op0 = shallow_copy_rtx (op0); 4531 op0 = shallow_copy_rtx (op0);
4543 set_mem_attributes (op0, exp, 0); 4532 set_mem_attributes (op0, exp, 0);
4544 } 4533 }
4545 4534
4546 if (bitpos == 0 && mode == GET_MODE (op0)) 4535 if (known_eq (bitpos, 0) && mode == GET_MODE (op0))
4547 return op0; 4536 return op0;
4548 4537
4549 if (bitpos < 0) 4538 if (maybe_lt (bitpos, 0))
4550 return NULL; 4539 return NULL;
4551 4540
4552 if (GET_MODE (op0) == BLKmode) 4541 if (GET_MODE (op0) == BLKmode || mode == BLKmode)
4553 return NULL; 4542 return NULL;
4554 4543
4555 if ((bitpos % BITS_PER_UNIT) == 0 4544 poly_int64 bytepos;
4556 && bitsize == GET_MODE_BITSIZE (mode1)) 4545 if (multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
4546 && known_eq (bitsize, GET_MODE_BITSIZE (mode1)))
4557 { 4547 {
4558 machine_mode opmode = GET_MODE (op0); 4548 machine_mode opmode = GET_MODE (op0);
4559 4549
4560 if (opmode == VOIDmode) 4550 if (opmode == VOIDmode)
4561 opmode = TYPE_MODE (TREE_TYPE (tem)); 4551 opmode = TYPE_MODE (TREE_TYPE (tem));
4564 right past the end of an array that turned out not to 4554 right past the end of an array that turned out not to
4565 be addressable (i.e., the address was only computed in 4555 be addressable (i.e., the address was only computed in
4566 debug stmts). The gen_subreg below would rightfully 4556 debug stmts). The gen_subreg below would rightfully
4567 crash, and the address doesn't really exist, so just 4557 crash, and the address doesn't really exist, so just
4568 drop it. */ 4558 drop it. */
4569 if (bitpos >= GET_MODE_BITSIZE (opmode)) 4559 if (known_ge (bitpos, GET_MODE_BITSIZE (opmode)))
4570 return NULL; 4560 return NULL;
4571 4561
4572 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0) 4562 if (multiple_p (bitpos, GET_MODE_BITSIZE (mode)))
4573 return simplify_gen_subreg (mode, op0, opmode, 4563 return simplify_gen_subreg (mode, op0, opmode, bytepos);
4574 bitpos / BITS_PER_UNIT);
4575 } 4564 }
4576 4565
4577 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0)) 4566 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
4578 && TYPE_UNSIGNED (TREE_TYPE (exp)) 4567 && TYPE_UNSIGNED (TREE_TYPE (exp))
4579 ? SIGN_EXTRACT 4568 ? SIGN_EXTRACT
4580 : ZERO_EXTRACT, mode, 4569 : ZERO_EXTRACT, mode,
4581 GET_MODE (op0) != VOIDmode 4570 GET_MODE (op0) != VOIDmode
4582 ? GET_MODE (op0) 4571 ? GET_MODE (op0)
4583 : TYPE_MODE (TREE_TYPE (tem)), 4572 : TYPE_MODE (TREE_TYPE (tem)),
4584 op0, GEN_INT (bitsize), GEN_INT (bitpos)); 4573 op0, gen_int_mode (bitsize, word_mode),
4574 gen_int_mode (bitpos, word_mode));
4585 } 4575 }
4586 4576
4587 case ABS_EXPR: 4577 case ABS_EXPR:
4578 case ABSU_EXPR:
4588 return simplify_gen_unary (ABS, mode, op0, mode); 4579 return simplify_gen_unary (ABS, mode, op0, mode);
4589 4580
4590 case NEGATE_EXPR: 4581 case NEGATE_EXPR:
4591 return simplify_gen_unary (NEG, mode, op0, mode); 4582 return simplify_gen_unary (NEG, mode, op0, mode);
4592 4583
4629 /* Fall through. */ 4620 /* Fall through. */
4630 case PLUS_EXPR: 4621 case PLUS_EXPR:
4631 return simplify_gen_binary (PLUS, mode, op0, op1); 4622 return simplify_gen_binary (PLUS, mode, op0, op1);
4632 4623
4633 case MINUS_EXPR: 4624 case MINUS_EXPR:
4625 case POINTER_DIFF_EXPR:
4634 return simplify_gen_binary (MINUS, mode, op0, op1); 4626 return simplify_gen_binary (MINUS, mode, op0, op1);
4635 4627
4636 case MULT_EXPR: 4628 case MULT_EXPR:
4637 return simplify_gen_binary (MULT, mode, op0, op1); 4629 return simplify_gen_binary (MULT, mode, op0, op1);
4638 4630
4895 || target_for_debug_bind (TREE_OPERAND (exp, 0)))) 4887 || target_for_debug_bind (TREE_OPERAND (exp, 0))))
4896 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0)); 4888 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0));
4897 4889
4898 if (handled_component_p (TREE_OPERAND (exp, 0))) 4890 if (handled_component_p (TREE_OPERAND (exp, 0)))
4899 { 4891 {
4900 HOST_WIDE_INT bitoffset, bitsize, maxsize; 4892 poly_int64 bitoffset, bitsize, maxsize, byteoffset;
4901 bool reverse; 4893 bool reverse;
4902 tree decl 4894 tree decl
4903 = get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset, 4895 = get_ref_base_and_extent (TREE_OPERAND (exp, 0), &bitoffset,
4904 &bitsize, &maxsize, &reverse); 4896 &bitsize, &maxsize, &reverse);
4905 if ((VAR_P (decl) 4897 if ((VAR_P (decl)
4906 || TREE_CODE (decl) == PARM_DECL 4898 || TREE_CODE (decl) == PARM_DECL
4907 || TREE_CODE (decl) == RESULT_DECL) 4899 || TREE_CODE (decl) == RESULT_DECL)
4908 && (!TREE_ADDRESSABLE (decl) 4900 && (!TREE_ADDRESSABLE (decl)
4909 || target_for_debug_bind (decl)) 4901 || target_for_debug_bind (decl))
4910 && (bitoffset % BITS_PER_UNIT) == 0 4902 && multiple_p (bitoffset, BITS_PER_UNIT, &byteoffset)
4911 && bitsize > 0 4903 && known_gt (bitsize, 0)
4912 && bitsize == maxsize) 4904 && known_eq (bitsize, maxsize))
4913 { 4905 {
4914 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl); 4906 rtx base = gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl);
4915 return plus_constant (mode, base, bitoffset / BITS_PER_UNIT); 4907 return plus_constant (mode, base, byteoffset);
4916 } 4908 }
4917 } 4909 }
4918 4910
4919 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF 4911 if (TREE_CODE (TREE_OPERAND (exp, 0)) == MEM_REF
4920 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) 4912 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
4928 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR 4920 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR
4929 && CONST_INT_P (XEXP (op0, 1))))) 4921 && CONST_INT_P (XEXP (op0, 1)))))
4930 { 4922 {
4931 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 4923 op1 = expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp, 0),
4932 1)); 4924 1));
4933 if (!op1 || !CONST_INT_P (op1)) 4925 poly_int64 offset;
4926 if (!op1 || !poly_int_rtx_p (op1, &offset))
4934 return NULL; 4927 return NULL;
4935 4928
4936 return plus_constant (mode, op0, INTVAL (op1)); 4929 return plus_constant (mode, op0, offset);
4937 } 4930 }
4938 } 4931 }
4939 4932
4940 return NULL; 4933 return NULL;
4941 } 4934 }
4946 4939
4947 return op0; 4940 return op0;
4948 4941
4949 case VECTOR_CST: 4942 case VECTOR_CST:
4950 { 4943 {
4951 unsigned i, nelts; 4944 unsigned HOST_WIDE_INT i, nelts;
4952 4945
4953 nelts = VECTOR_CST_NELTS (exp); 4946 if (!VECTOR_CST_NELTS (exp).is_constant (&nelts))
4947 return NULL;
4948
4954 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts)); 4949 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
4955 4950
4956 for (i = 0; i < nelts; ++i) 4951 for (i = 0; i < nelts; ++i)
4957 { 4952 {
4958 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i)); 4953 op1 = expand_debug_expr (VECTOR_CST_ELT (exp, i));
4968 if (TREE_CLOBBER_P (exp)) 4963 if (TREE_CLOBBER_P (exp))
4969 return NULL; 4964 return NULL;
4970 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE) 4965 else if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE)
4971 { 4966 {
4972 unsigned i; 4967 unsigned i;
4968 unsigned HOST_WIDE_INT nelts;
4973 tree val; 4969 tree val;
4974 4970
4975 op0 = gen_rtx_CONCATN 4971 if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)).is_constant (&nelts))
4976 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))); 4972 goto flag_unsupported;
4973
4974 op0 = gen_rtx_CONCATN (mode, rtvec_alloc (nelts));
4977 4975
4978 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val) 4976 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val)
4979 { 4977 {
4980 op1 = expand_debug_expr (val); 4978 op1 = expand_debug_expr (val);
4981 if (!op1) 4979 if (!op1)
4982 return NULL; 4980 return NULL;
4983 XVECEXP (op0, 0, i) = op1; 4981 XVECEXP (op0, 0, i) = op1;
4984 } 4982 }
4985 4983
4986 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))) 4984 if (i < nelts)
4987 { 4985 {
4988 op1 = expand_debug_expr 4986 op1 = expand_debug_expr
4989 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp)))); 4987 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp))));
4990 4988
4991 if (!op1) 4989 if (!op1)
4992 return NULL; 4990 return NULL;
4993 4991
4994 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++) 4992 for (; i < nelts; i++)
4995 XVECEXP (op0, 0, i) = op1; 4993 XVECEXP (op0, 0, i) = op1;
4996 } 4994 }
4997 4995
4998 return op0; 4996 return op0;
4999 } 4997 }
5056 case ERROR_MARK: 5054 case ERROR_MARK:
5057 return NULL; 5055 return NULL;
5058 5056
5059 /* Vector stuff. For most of the codes we don't have rtl codes. */ 5057 /* Vector stuff. For most of the codes we don't have rtl codes. */
5060 case REALIGN_LOAD_EXPR: 5058 case REALIGN_LOAD_EXPR:
5061 case REDUC_MAX_EXPR:
5062 case REDUC_MIN_EXPR:
5063 case REDUC_PLUS_EXPR:
5064 case VEC_COND_EXPR: 5059 case VEC_COND_EXPR:
5065 case VEC_PACK_FIX_TRUNC_EXPR: 5060 case VEC_PACK_FIX_TRUNC_EXPR:
5061 case VEC_PACK_FLOAT_EXPR:
5066 case VEC_PACK_SAT_EXPR: 5062 case VEC_PACK_SAT_EXPR:
5067 case VEC_PACK_TRUNC_EXPR: 5063 case VEC_PACK_TRUNC_EXPR:
5064 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
5065 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
5068 case VEC_UNPACK_FLOAT_HI_EXPR: 5066 case VEC_UNPACK_FLOAT_HI_EXPR:
5069 case VEC_UNPACK_FLOAT_LO_EXPR: 5067 case VEC_UNPACK_FLOAT_LO_EXPR:
5070 case VEC_UNPACK_HI_EXPR: 5068 case VEC_UNPACK_HI_EXPR:
5071 case VEC_UNPACK_LO_EXPR: 5069 case VEC_UNPACK_LO_EXPR:
5072 case VEC_WIDEN_MULT_HI_EXPR: 5070 case VEC_WIDEN_MULT_HI_EXPR:
5074 case VEC_WIDEN_MULT_EVEN_EXPR: 5072 case VEC_WIDEN_MULT_EVEN_EXPR:
5075 case VEC_WIDEN_MULT_ODD_EXPR: 5073 case VEC_WIDEN_MULT_ODD_EXPR:
5076 case VEC_WIDEN_LSHIFT_HI_EXPR: 5074 case VEC_WIDEN_LSHIFT_HI_EXPR:
5077 case VEC_WIDEN_LSHIFT_LO_EXPR: 5075 case VEC_WIDEN_LSHIFT_LO_EXPR:
5078 case VEC_PERM_EXPR: 5076 case VEC_PERM_EXPR:
5077 case VEC_DUPLICATE_EXPR:
5078 case VEC_SERIES_EXPR:
5079 return NULL; 5079 return NULL;
5080 5080
5081 /* Misc codes. */ 5081 /* Misc codes. */
5082 case ADDR_SPACE_CONVERT_EXPR: 5082 case ADDR_SPACE_CONVERT_EXPR:
5083 case FIXED_CONVERT_EXPR: 5083 case FIXED_CONVERT_EXPR:
5147 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR 5147 return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
5148 ? ASHIFT : PLUS, mode, op0, op1); 5148 ? ASHIFT : PLUS, mode, op0, op1);
5149 } 5149 }
5150 return NULL; 5150 return NULL;
5151 5151
5152 case FMA_EXPR:
5153 return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
5154
5155 default: 5152 default:
5156 flag_unsupported: 5153 flag_unsupported:
5157 if (flag_checking) 5154 if (flag_checking)
5158 { 5155 {
5159 debug_tree (exp); 5156 debug_tree (exp);
5172 rtx op0 = NULL_RTX; 5169 rtx op0 = NULL_RTX;
5173 machine_mode mode = VOIDmode, inner_mode; 5170 machine_mode mode = VOIDmode, inner_mode;
5174 5171
5175 switch (TREE_CODE (exp)) 5172 switch (TREE_CODE (exp))
5176 { 5173 {
5174 case VAR_DECL:
5175 if (DECL_ABSTRACT_ORIGIN (exp))
5176 return expand_debug_source_expr (DECL_ABSTRACT_ORIGIN (exp));
5177 break;
5177 case PARM_DECL: 5178 case PARM_DECL:
5178 { 5179 {
5179 mode = DECL_MODE (exp); 5180 mode = DECL_MODE (exp);
5180 op0 = expand_debug_parm_decl (exp); 5181 op0 = expand_debug_parm_decl (exp);
5181 if (op0) 5182 if (op0)
5315 -fcompare-debug failures, even though it doesn't bring about any 5316 -fcompare-debug failures, even though it doesn't bring about any
5316 codegen changes. */ 5317 codegen changes. */
5317 flag_strict_aliasing = 0; 5318 flag_strict_aliasing = 0;
5318 5319
5319 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) 5320 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5320 if (DEBUG_INSN_P (insn)) 5321 if (DEBUG_BIND_INSN_P (insn))
5321 { 5322 {
5322 tree value = (tree)INSN_VAR_LOCATION_LOC (insn); 5323 tree value = (tree)INSN_VAR_LOCATION_LOC (insn);
5323 rtx val; 5324 rtx val;
5324 rtx_insn *prev_insn, *insn2; 5325 rtx_insn *prev_insn, *insn2;
5325 machine_mode mode; 5326 machine_mode mode;
5462 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls) 5463 expand_gimple_basic_block (basic_block bb, bool disable_tail_calls)
5463 { 5464 {
5464 gimple_stmt_iterator gsi; 5465 gimple_stmt_iterator gsi;
5465 gimple_seq stmts; 5466 gimple_seq stmts;
5466 gimple *stmt = NULL; 5467 gimple *stmt = NULL;
5467 rtx_note *note; 5468 rtx_note *note = NULL;
5468 rtx_insn *last; 5469 rtx_insn *last;
5469 edge e; 5470 edge e;
5470 edge_iterator ei; 5471 edge_iterator ei;
5471 5472
5472 if (dump_file) 5473 if (dump_file)
5515 5516
5516 rtx_code_label **elt = lab_rtx_for_bb->get (bb); 5517 rtx_code_label **elt = lab_rtx_for_bb->get (bb);
5517 5518
5518 if (stmt || elt) 5519 if (stmt || elt)
5519 { 5520 {
5521 gcc_checking_assert (!note);
5520 last = get_last_insn (); 5522 last = get_last_insn ();
5521 5523
5522 if (stmt) 5524 if (stmt)
5523 { 5525 {
5524 expand_gimple_stmt (stmt); 5526 expand_gimple_stmt (stmt);
5529 emit_label (*elt); 5531 emit_label (*elt);
5530 5532
5531 BB_HEAD (bb) = NEXT_INSN (last); 5533 BB_HEAD (bb) = NEXT_INSN (last);
5532 if (NOTE_P (BB_HEAD (bb))) 5534 if (NOTE_P (BB_HEAD (bb)))
5533 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb)); 5535 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
5536 gcc_assert (LABEL_P (BB_HEAD (bb)));
5534 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb)); 5537 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
5535 5538
5536 maybe_dump_rtl_for_gimple_stmt (stmt, last); 5539 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5537 } 5540 }
5538 else 5541 else
5539 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK); 5542 BB_HEAD (bb) = note = emit_note (NOTE_INSN_BASIC_BLOCK);
5540 5543
5541 NOTE_BASIC_BLOCK (note) = bb; 5544 if (note)
5545 NOTE_BASIC_BLOCK (note) = bb;
5542 5546
5543 for (; !gsi_end_p (gsi); gsi_next (&gsi)) 5547 for (; !gsi_end_p (gsi); gsi_next (&gsi))
5544 { 5548 {
5545 basic_block new_bb; 5549 basic_block new_bb;
5546 5550
5568 #DEBUG #D1 => a_1 5572 #DEBUG #D1 => a_1
5569 use(a_1); 5573 use(a_1);
5570 a_2 = ... 5574 a_2 = ...
5571 #DEBUG ... => #D1 5575 #DEBUG ... => #D1
5572 */ 5576 */
5573 if (MAY_HAVE_DEBUG_INSNS 5577 if (MAY_HAVE_DEBUG_BIND_INSNS
5574 && SA.values 5578 && SA.values
5575 && !is_gimple_debug (stmt)) 5579 && !is_gimple_debug (stmt))
5576 { 5580 {
5577 ssa_op_iter iter; 5581 ssa_op_iter iter;
5578 tree op; 5582 tree op;
5649 { 5653 {
5650 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt)); 5654 new_bb = expand_gimple_cond (bb, as_a <gcond *> (stmt));
5651 if (new_bb) 5655 if (new_bb)
5652 return new_bb; 5656 return new_bb;
5653 } 5657 }
5654 else if (gimple_debug_bind_p (stmt)) 5658 else if (is_gimple_debug (stmt))
5655 { 5659 {
5656 location_t sloc = curr_insn_location (); 5660 location_t sloc = curr_insn_location ();
5657 gimple_stmt_iterator nsi = gsi; 5661 gimple_stmt_iterator nsi = gsi;
5658 5662
5659 for (;;) 5663 for (;;)
5660 { 5664 {
5661 tree var = gimple_debug_bind_get_var (stmt); 5665 tree var;
5662 tree value; 5666 tree value = NULL_TREE;
5663 rtx val; 5667 rtx val = NULL_RTX;
5664 machine_mode mode; 5668 machine_mode mode;
5665 5669
5666 if (TREE_CODE (var) != DEBUG_EXPR_DECL 5670 if (!gimple_debug_nonbind_marker_p (stmt))
5667 && TREE_CODE (var) != LABEL_DECL 5671 {
5668 && !target_for_debug_bind (var)) 5672 if (gimple_debug_bind_p (stmt))
5673 {
5674 var = gimple_debug_bind_get_var (stmt);
5675
5676 if (TREE_CODE (var) != DEBUG_EXPR_DECL
5677 && TREE_CODE (var) != LABEL_DECL
5678 && !target_for_debug_bind (var))
5679 goto delink_debug_stmt;
5680
5681 if (DECL_P (var))
5682 mode = DECL_MODE (var);
5683 else
5684 mode = TYPE_MODE (TREE_TYPE (var));
5685
5686 if (gimple_debug_bind_has_value_p (stmt))
5687 value = gimple_debug_bind_get_value (stmt);
5688
5689 val = gen_rtx_VAR_LOCATION
5690 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5691 }
5692 else if (gimple_debug_source_bind_p (stmt))
5693 {
5694 var = gimple_debug_source_bind_get_var (stmt);
5695
5696 value = gimple_debug_source_bind_get_value (stmt);
5697
5698 mode = DECL_MODE (var);
5699
5700 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5701 VAR_INIT_STATUS_UNINITIALIZED);
5702 }
5703 else
5704 gcc_unreachable ();
5705 }
5706 /* If this function was first compiled with markers
5707 enabled, but they're now disable (e.g. LTO), drop
5708 them on the floor. */
5709 else if (gimple_debug_nonbind_marker_p (stmt)
5710 && !MAY_HAVE_DEBUG_MARKER_INSNS)
5669 goto delink_debug_stmt; 5711 goto delink_debug_stmt;
5670 5712 else if (gimple_debug_begin_stmt_p (stmt))
5671 if (gimple_debug_bind_has_value_p (stmt)) 5713 val = GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
5672 value = gimple_debug_bind_get_value (stmt); 5714 else if (gimple_debug_inline_entry_p (stmt))
5715 {
5716 tree block = gimple_block (stmt);
5717
5718 if (block)
5719 val = GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
5720 else
5721 goto delink_debug_stmt;
5722 }
5673 else 5723 else
5674 value = NULL_TREE; 5724 gcc_unreachable ();
5675 5725
5676 last = get_last_insn (); 5726 last = get_last_insn ();
5677 5727
5678 set_curr_insn_location (gimple_location (stmt)); 5728 set_curr_insn_location (gimple_location (stmt));
5679
5680 if (DECL_P (var))
5681 mode = DECL_MODE (var);
5682 else
5683 mode = TYPE_MODE (TREE_TYPE (var));
5684
5685 val = gen_rtx_VAR_LOCATION
5686 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
5687 5729
5688 emit_debug_insn (val); 5730 emit_debug_insn (val);
5689 5731
5690 if (dump_file && (dump_flags & TDF_DETAILS)) 5732 if (dump_file && (dump_flags & TDF_DETAILS))
5691 { 5733 {
5692 /* We can't dump the insn with a TREE where an RTX 5734 /* We can't dump the insn with a TREE where an RTX
5693 is expected. */ 5735 is expected. */
5694 PAT_VAR_LOCATION_LOC (val) = const0_rtx; 5736 if (GET_CODE (val) == VAR_LOCATION)
5737 {
5738 gcc_checking_assert (PAT_VAR_LOCATION_LOC (val) == (rtx)value);
5739 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5740 }
5695 maybe_dump_rtl_for_gimple_stmt (stmt, last); 5741 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5696 PAT_VAR_LOCATION_LOC (val) = (rtx)value; 5742 if (GET_CODE (val) == VAR_LOCATION)
5743 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5697 } 5744 }
5698 5745
5699 delink_debug_stmt: 5746 delink_debug_stmt:
5700 /* In order not to generate too many debug temporaries, 5747 /* In order not to generate too many debug temporaries,
5701 we delink all uses of debug statements we already expanded. 5748 we delink all uses of debug statements we already expanded.
5707 gsi = nsi; 5754 gsi = nsi;
5708 gsi_next (&nsi); 5755 gsi_next (&nsi);
5709 if (gsi_end_p (nsi)) 5756 if (gsi_end_p (nsi))
5710 break; 5757 break;
5711 stmt = gsi_stmt (nsi); 5758 stmt = gsi_stmt (nsi);
5712 if (!gimple_debug_bind_p (stmt)) 5759 if (!is_gimple_debug (stmt))
5713 break; 5760 break;
5714 }
5715
5716 set_curr_insn_location (sloc);
5717 }
5718 else if (gimple_debug_source_bind_p (stmt))
5719 {
5720 location_t sloc = curr_insn_location ();
5721 tree var = gimple_debug_source_bind_get_var (stmt);
5722 tree value = gimple_debug_source_bind_get_value (stmt);
5723 rtx val;
5724 machine_mode mode;
5725
5726 last = get_last_insn ();
5727
5728 set_curr_insn_location (gimple_location (stmt));
5729
5730 mode = DECL_MODE (var);
5731
5732 val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
5733 VAR_INIT_STATUS_UNINITIALIZED);
5734
5735 emit_debug_insn (val);
5736
5737 if (dump_file && (dump_flags & TDF_DETAILS))
5738 {
5739 /* We can't dump the insn with a TREE where an RTX
5740 is expected. */
5741 PAT_VAR_LOCATION_LOC (val) = const0_rtx;
5742 maybe_dump_rtl_for_gimple_stmt (stmt, last);
5743 PAT_VAR_LOCATION_LOC (val) = (rtx)value;
5744 } 5761 }
5745 5762
5746 set_curr_insn_location (sloc); 5763 set_curr_insn_location (sloc);
5747 } 5764 }
5748 else 5765 else
5820 last = get_last_insn (); 5837 last = get_last_insn ();
5821 if (BARRIER_P (last)) 5838 if (BARRIER_P (last))
5822 last = PREV_INSN (last); 5839 last = PREV_INSN (last);
5823 if (JUMP_TABLE_DATA_P (last)) 5840 if (JUMP_TABLE_DATA_P (last))
5824 last = PREV_INSN (PREV_INSN (last)); 5841 last = PREV_INSN (PREV_INSN (last));
5842 if (BARRIER_P (last))
5843 last = PREV_INSN (last);
5825 BB_END (bb) = last; 5844 BB_END (bb) = last;
5826 5845
5827 update_bb_for_insn (bb); 5846 update_bb_for_insn (bb);
5828 5847
5829 return bb; 5848 return bb;
5861 flags = EDGE_FALLTHRU; 5880 flags = EDGE_FALLTHRU;
5862 5881
5863 init_block = create_basic_block (NEXT_INSN (get_insns ()), 5882 init_block = create_basic_block (NEXT_INSN (get_insns ()),
5864 get_last_insn (), 5883 get_last_insn (),
5865 ENTRY_BLOCK_PTR_FOR_FN (cfun)); 5884 ENTRY_BLOCK_PTR_FOR_FN (cfun));
5866 init_block->frequency = ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency;
5867 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count; 5885 init_block->count = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
5868 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father); 5886 add_bb_to_loop (init_block, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5869 if (e) 5887 if (e)
5870 { 5888 {
5871 first_block = e->dest; 5889 first_block = e->dest;
5925 block. */ 5943 block. */
5926 BB_END (prev_bb) = orig_end; 5944 BB_END (prev_bb) = orig_end;
5927 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head))) 5945 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
5928 head = NEXT_INSN (head); 5946 head = NEXT_INSN (head);
5929 /* But make sure exit_block starts with RETURN_LABEL, otherwise the 5947 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
5930 bb frequency counting will be confused. Any instructions before that 5948 bb count counting will be confused. Any instructions before that
5931 label are emitted for the case where PREV_BB falls through into the 5949 label are emitted for the case where PREV_BB falls through into the
5932 exit block, so append those instructions to prev_bb in that case. */ 5950 exit block, so append those instructions to prev_bb in that case. */
5933 if (NEXT_INSN (head) != return_label) 5951 if (NEXT_INSN (head) != return_label)
5934 { 5952 {
5935 while (NEXT_INSN (head) != return_label) 5953 while (NEXT_INSN (head) != return_label)
5938 BB_END (prev_bb) = NEXT_INSN (head); 5956 BB_END (prev_bb) = NEXT_INSN (head);
5939 head = NEXT_INSN (head); 5957 head = NEXT_INSN (head);
5940 } 5958 }
5941 } 5959 }
5942 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb); 5960 exit_block = create_basic_block (NEXT_INSN (head), end, prev_bb);
5943 exit_block->frequency = EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency;
5944 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count; 5961 exit_block->count = EXIT_BLOCK_PTR_FOR_FN (cfun)->count;
5945 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father); 5962 add_bb_to_loop (exit_block, EXIT_BLOCK_PTR_FOR_FN (cfun)->loop_father);
5946 5963
5947 ix = 0; 5964 ix = 0;
5948 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)) 5965 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
5958 EDGE_FALLTHRU); 5975 EDGE_FALLTHRU);
5959 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds) 5976 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR_FOR_FN (cfun)->preds)
5960 if (e2 != e) 5977 if (e2 != e)
5961 { 5978 {
5962 exit_block->count -= e2->count (); 5979 exit_block->count -= e2->count ();
5963 exit_block->frequency -= EDGE_FREQUENCY (e2);
5964 } 5980 }
5965 if (exit_block->frequency < 0)
5966 exit_block->frequency = 0;
5967 update_bb_for_insn (exit_block); 5981 update_bb_for_insn (exit_block);
5968 } 5982 }
5969 5983
5970 /* Helper function for discover_nonconstant_array_refs. 5984 /* Helper function for discover_nonconstant_array_refs.
5971 Look for ARRAY_REF nodes with non-constant indexes and mark them 5985 Look for ARRAY_REF nodes with non-constant indexes and mark them
6090 6104
6091 /* stack_realign_drap and drap_rtx must match. */ 6105 /* stack_realign_drap and drap_rtx must match. */
6092 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL)); 6106 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
6093 6107
6094 /* Do nothing if NULL is returned, which means DRAP is not needed. */ 6108 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6095 if (NULL != drap_rtx) 6109 if (drap_rtx != NULL)
6096 { 6110 {
6097 crtl->args.internal_arg_pointer = drap_rtx; 6111 crtl->args.internal_arg_pointer = drap_rtx;
6098 6112
6099 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is 6113 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6100 needed. */ 6114 needed. */
6194 timevar_push (TV_OUT_OF_SSA); 6208 timevar_push (TV_OUT_OF_SSA);
6195 rewrite_out_of_ssa (&SA); 6209 rewrite_out_of_ssa (&SA);
6196 timevar_pop (TV_OUT_OF_SSA); 6210 timevar_pop (TV_OUT_OF_SSA);
6197 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions); 6211 SA.partition_to_pseudo = XCNEWVEC (rtx, SA.map->num_partitions);
6198 6212
6199 if (MAY_HAVE_DEBUG_STMTS && flag_tree_ter) 6213 if (MAY_HAVE_DEBUG_BIND_STMTS && flag_tree_ter)
6200 { 6214 {
6201 gimple_stmt_iterator gsi; 6215 gimple_stmt_iterator gsi;
6202 FOR_EACH_BB_FN (bb, cfun) 6216 FOR_EACH_BB_FN (bb, cfun)
6203 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 6217 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
6204 if (gimple_debug_bind_p (gsi_stmt (gsi))) 6218 if (gimple_debug_bind_p (gsi_stmt (gsi)))
6213 currently_expanding_to_rtl = 1; 6227 currently_expanding_to_rtl = 1;
6214 /* Dominators are not kept up-to-date as we may create new basic-blocks. */ 6228 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6215 free_dominance_info (CDI_DOMINATORS); 6229 free_dominance_info (CDI_DOMINATORS);
6216 6230
6217 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun)); 6231 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun));
6218
6219 if (chkp_function_instrumented_p (current_function_decl))
6220 chkp_reset_rtl_bounds ();
6221 6232
6222 insn_locations_init (); 6233 insn_locations_init ();
6223 if (!DECL_IS_BUILTIN (current_function_decl)) 6234 if (!DECL_IS_BUILTIN (current_function_decl))
6224 { 6235 {
6225 /* Eventually, all FEs should explicitly set function_start_locus. */ 6236 /* Eventually, all FEs should explicitly set function_start_locus. */
6380 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the 6391 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6381 remaining edges later. */ 6392 remaining edges later. */
6382 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs) 6393 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (fun)->succs)
6383 e->flags &= ~EDGE_EXECUTABLE; 6394 e->flags &= ~EDGE_EXECUTABLE;
6384 6395
6396 /* If the function has too many markers, drop them while expanding. */
6397 if (cfun->debug_marker_count
6398 >= PARAM_VALUE (PARAM_MAX_DEBUG_MARKER_COUNT))
6399 cfun->debug_nonbind_markers = false;
6400
6385 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>; 6401 lab_rtx_for_bb = new hash_map<basic_block, rtx_code_label *>;
6386 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun), 6402 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR_FOR_FN (fun),
6387 next_bb) 6403 next_bb)
6388 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX); 6404 bb = expand_gimple_basic_block (bb, var_ret_seq != NULL_RTX);
6389 6405
6390 if (MAY_HAVE_DEBUG_INSNS) 6406 if (MAY_HAVE_DEBUG_BIND_INSNS)
6391 expand_debug_locations (); 6407 expand_debug_locations ();
6392 6408
6393 if (deep_ter_debug_map) 6409 if (deep_ter_debug_map)
6394 { 6410 {
6395 delete deep_ter_debug_map; 6411 delete deep_ter_debug_map;