comparison gcc/tree-cfg.c @ 131:84e7813d76e9

gcc-8.2
author mir3636
date Thu, 25 Oct 2018 07:37:49 +0900
parents 04ced10e8804
children 1830386684a0
comparison
equal deleted inserted replaced
111:04ced10e8804 131:84e7813d76e9
1 /* Control flow functions for trees. 1 /* Control flow functions for trees.
2 Copyright (C) 2001-2017 Free Software Foundation, Inc. 2 Copyright (C) 2001-2018 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com> 3 Contributed by Diego Novillo <dnovillo@redhat.com>
4 4
5 This file is part of GCC. 5 This file is part of GCC.
6 6
7 GCC is free software; you can redistribute it and/or modify 7 GCC is free software; you can redistribute it and/or modify
59 #include "tree-cfgcleanup.h" 59 #include "tree-cfgcleanup.h"
60 #include "gimplify.h" 60 #include "gimplify.h"
61 #include "attribs.h" 61 #include "attribs.h"
62 #include "selftest.h" 62 #include "selftest.h"
63 #include "opts.h" 63 #include "opts.h"
64 #include "asan.h"
64 65
65 /* This file contains functions for building the Control Flow Graph (CFG) 66 /* This file contains functions for building the Control Flow Graph (CFG)
66 for a function tree. */ 67 for a function tree. */
67 68
68 /* Local declarations. */ 69 /* Local declarations. */
107 }; 108 };
108 109
109 /* Hash table to store last discriminator assigned for each locus. */ 110 /* Hash table to store last discriminator assigned for each locus. */
110 struct locus_discrim_map 111 struct locus_discrim_map
111 { 112 {
112 location_t locus; 113 int location_line;
113 int discriminator; 114 int discriminator;
114 }; 115 };
115 116
116 /* Hashtable helpers. */ 117 /* Hashtable helpers. */
117 118
126 a hash table entry that maps a location_t to a discriminator. */ 127 a hash table entry that maps a location_t to a discriminator. */
127 128
128 inline hashval_t 129 inline hashval_t
129 locus_discrim_hasher::hash (const locus_discrim_map *item) 130 locus_discrim_hasher::hash (const locus_discrim_map *item)
130 { 131 {
131 return LOCATION_LINE (item->locus); 132 return item->location_line;
132 } 133 }
133 134
134 /* Equality function for the locus-to-discriminator map. A and B 135 /* Equality function for the locus-to-discriminator map. A and B
135 point to the two hash table entries to compare. */ 136 point to the two hash table entries to compare. */
136 137
137 inline bool 138 inline bool
138 locus_discrim_hasher::equal (const locus_discrim_map *a, 139 locus_discrim_hasher::equal (const locus_discrim_map *a,
139 const locus_discrim_map *b) 140 const locus_discrim_map *b)
140 { 141 {
141 return LOCATION_LINE (a->locus) == LOCATION_LINE (b->locus); 142 return a->location_line == b->location_line;
142 } 143 }
143 144
144 static hash_table<locus_discrim_hasher> *discriminator_per_locus; 145 static hash_table<locus_discrim_hasher> *discriminator_per_locus;
145 146
146 /* Basic blocks and flowgraphs. */ 147 /* Basic blocks and flowgraphs. */
167 /* Flowgraph optimization and cleanup. */ 168 /* Flowgraph optimization and cleanup. */
168 static void gimple_merge_blocks (basic_block, basic_block); 169 static void gimple_merge_blocks (basic_block, basic_block);
169 static bool gimple_can_merge_blocks_p (basic_block, basic_block); 170 static bool gimple_can_merge_blocks_p (basic_block, basic_block);
170 static void remove_bb (basic_block); 171 static void remove_bb (basic_block);
171 static edge find_taken_edge_computed_goto (basic_block, tree); 172 static edge find_taken_edge_computed_goto (basic_block, tree);
172 static edge find_taken_edge_cond_expr (basic_block, tree); 173 static edge find_taken_edge_cond_expr (const gcond *, tree);
173 static edge find_taken_edge_switch_expr (gswitch *, basic_block, tree);
174 static tree find_case_label_for_value (gswitch *, tree);
175 static void lower_phi_internal_fn (); 174 static void lower_phi_internal_fn ();
176 175
177 void 176 void
178 init_empty_tree_cfg_for_function (struct function *fn) 177 init_empty_tree_cfg_for_function (struct function *fn)
179 { 178 {
278 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1))) 277 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
279 { 278 {
280 case annot_expr_ivdep_kind: 279 case annot_expr_ivdep_kind:
281 loop->safelen = INT_MAX; 280 loop->safelen = INT_MAX;
282 break; 281 break;
282 case annot_expr_unroll_kind:
283 loop->unroll
284 = (unsigned short) tree_to_shwi (gimple_call_arg (stmt, 2));
285 cfun->has_unroll = true;
286 break;
283 case annot_expr_no_vector_kind: 287 case annot_expr_no_vector_kind:
284 loop->dont_vectorize = true; 288 loop->dont_vectorize = true;
285 break; 289 break;
286 case annot_expr_vector_kind: 290 case annot_expr_vector_kind:
287 loop->force_vectorize = true; 291 loop->force_vectorize = true;
288 cfun->has_force_vectorize_loops = true; 292 cfun->has_force_vectorize_loops = true;
289 break; 293 break;
294 case annot_expr_parallel_kind:
295 loop->can_be_parallel = true;
296 loop->safelen = INT_MAX;
297 break;
290 default: 298 default:
291 gcc_unreachable (); 299 gcc_unreachable ();
292 } 300 }
293 301
294 stmt = gimple_build_assign (gimple_call_lhs (stmt), 302 stmt = gimple_build_assign (gimple_call_lhs (stmt),
332 continue; 340 continue;
333 341
334 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1))) 342 switch ((annot_expr_kind) tree_to_shwi (gimple_call_arg (stmt, 1)))
335 { 343 {
336 case annot_expr_ivdep_kind: 344 case annot_expr_ivdep_kind:
345 case annot_expr_unroll_kind:
337 case annot_expr_no_vector_kind: 346 case annot_expr_no_vector_kind:
338 case annot_expr_vector_kind: 347 case annot_expr_vector_kind:
348 case annot_expr_parallel_kind:
339 break; 349 break;
340 default: 350 default:
341 gcc_unreachable (); 351 gcc_unreachable ();
342 } 352 }
343 353
375 /* Add arguments to the PHI node. */ 385 /* Add arguments to the PHI node. */
376 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i) 386 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
377 { 387 {
378 tree arg = gimple_call_arg (stmt, i); 388 tree arg = gimple_call_arg (stmt, i);
379 if (TREE_CODE (arg) == LABEL_DECL) 389 if (TREE_CODE (arg) == LABEL_DECL)
380 pred = label_to_block (arg); 390 pred = label_to_block (cfun, arg);
381 else 391 else
382 { 392 {
383 edge e = find_edge (pred, bb); 393 edge e = find_edge (pred, bb);
384 add_phi_arg (phi_node, arg, e, UNKNOWN_LOCATION); 394 add_phi_arg (phi_node, arg, e, UNKNOWN_LOCATION);
385 } 395 }
457 a call to __builtin_unreachable (). */ 467 a call to __builtin_unreachable (). */
458 468
459 bool 469 bool
460 gimple_seq_unreachable_p (gimple_seq stmts) 470 gimple_seq_unreachable_p (gimple_seq stmts)
461 { 471 {
462 if (stmts == NULL) 472 if (stmts == NULL
473 /* Return false if -fsanitize=unreachable, we don't want to
474 optimize away those calls, but rather turn them into
475 __ubsan_handle_builtin_unreachable () or __builtin_trap ()
476 later. */
477 || sanitize_flags_p (SANITIZE_UNREACHABLE))
463 return false; 478 return false;
464 479
465 gimple_stmt_iterator gsi = gsi_last (stmts); 480 gimple_stmt_iterator gsi = gsi_last (stmts);
466 481
467 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE)) 482 if (!gimple_call_builtin_p (gsi_stmt (gsi), BUILT_IN_UNREACHABLE))
543 static basic_block 558 static basic_block
544 make_blocks_1 (gimple_seq seq, basic_block bb) 559 make_blocks_1 (gimple_seq seq, basic_block bb)
545 { 560 {
546 gimple_stmt_iterator i = gsi_start (seq); 561 gimple_stmt_iterator i = gsi_start (seq);
547 gimple *stmt = NULL; 562 gimple *stmt = NULL;
563 gimple *prev_stmt = NULL;
548 bool start_new_block = true; 564 bool start_new_block = true;
549 bool first_stmt_of_seq = true; 565 bool first_stmt_of_seq = true;
550 566
551 while (!gsi_end_p (i)) 567 while (!gsi_end_p (i))
552 { 568 {
553 gimple *prev_stmt; 569 /* PREV_STMT should only be set to a debug stmt if the debug
554 570 stmt is before nondebug stmts. Once stmt reaches a nondebug
555 prev_stmt = stmt; 571 nonlabel, prev_stmt will be set to it, so that
572 stmt_starts_bb_p will know to start a new block if a label is
573 found. However, if stmt was a label after debug stmts only,
574 keep the label in prev_stmt even if we find further debug
575 stmts, for there may be other labels after them, and they
576 should land in the same block. */
577 if (!prev_stmt || !stmt || !is_gimple_debug (stmt))
578 prev_stmt = stmt;
556 stmt = gsi_stmt (i); 579 stmt = gsi_stmt (i);
557 580
558 if (stmt && is_gimple_call (stmt)) 581 if (stmt && is_gimple_call (stmt))
559 gimple_call_initialize_ctrl_altering (stmt); 582 gimple_call_initialize_ctrl_altering (stmt);
560 583
565 { 588 {
566 if (!first_stmt_of_seq) 589 if (!first_stmt_of_seq)
567 gsi_split_seq_before (&i, &seq); 590 gsi_split_seq_before (&i, &seq);
568 bb = create_basic_block (seq, bb); 591 bb = create_basic_block (seq, bb);
569 start_new_block = false; 592 start_new_block = false;
593 prev_stmt = NULL;
570 } 594 }
571 595
572 /* Now add STMT to BB and create the subgraphs for special statement 596 /* Now add STMT to BB and create the subgraphs for special statement
573 codes. */ 597 codes. */
574 gimple_set_bb (stmt, bb); 598 gimple_set_bb (stmt, bb);
609 /* Build a flowgraph for the sequence of stmts SEQ. */ 633 /* Build a flowgraph for the sequence of stmts SEQ. */
610 634
611 static void 635 static void
612 make_blocks (gimple_seq seq) 636 make_blocks (gimple_seq seq)
613 { 637 {
638 /* Look for debug markers right before labels, and move the debug
639 stmts after the labels. Accepting labels among debug markers
640 adds no value, just complexity; if we wanted to annotate labels
641 with view numbers (so sequencing among markers would matter) or
642 somesuch, we're probably better off still moving the labels, but
643 adding other debug annotations in their original positions or
644 emitting nonbind or bind markers associated with the labels in
645 the original position of the labels.
646
647 Moving labels would probably be simpler, but we can't do that:
648 moving labels assigns label ids to them, and doing so because of
649 debug markers makes for -fcompare-debug and possibly even codegen
650 differences. So, we have to move the debug stmts instead. To
651 that end, we scan SEQ backwards, marking the position of the
652 latest (earliest we find) label, and moving debug stmts that are
653 not separated from it by nondebug nonlabel stmts after the
654 label. */
655 if (MAY_HAVE_DEBUG_MARKER_STMTS)
656 {
657 gimple_stmt_iterator label = gsi_none ();
658
659 for (gimple_stmt_iterator i = gsi_last (seq); !gsi_end_p (i); gsi_prev (&i))
660 {
661 gimple *stmt = gsi_stmt (i);
662
663 /* If this is the first label we encounter (latest in SEQ)
664 before nondebug stmts, record its position. */
665 if (is_a <glabel *> (stmt))
666 {
667 if (gsi_end_p (label))
668 label = i;
669 continue;
670 }
671
672 /* Without a recorded label position to move debug stmts to,
673 there's nothing to do. */
674 if (gsi_end_p (label))
675 continue;
676
677 /* Move the debug stmt at I after LABEL. */
678 if (is_gimple_debug (stmt))
679 {
680 gcc_assert (gimple_debug_nonbind_marker_p (stmt));
681 /* As STMT is removed, I advances to the stmt after
682 STMT, so the gsi_prev in the for "increment"
683 expression gets us to the stmt we're to visit after
684 STMT. LABEL, however, would advance to the moved
685 stmt if we passed it to gsi_move_after, so pass it a
686 copy instead, so as to keep LABEL pointing to the
687 LABEL. */
688 gimple_stmt_iterator copy = label;
689 gsi_move_after (&i, &copy);
690 continue;
691 }
692
693 /* There aren't any (more?) debug stmts before label, so
694 there isn't anything else to move after it. */
695 label = gsi_none ();
696 }
697 }
698
614 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun)); 699 make_blocks_1 (seq, ENTRY_BLOCK_PTR_FOR_FN (cfun));
615 } 700 }
616 701
617 /* Create and return a new empty basic block after bb AFTER. */ 702 /* Create and return a new empty basic block after bb AFTER. */
618 703
883 gtransaction *txn = as_a <gtransaction *> (last); 968 gtransaction *txn = as_a <gtransaction *> (last);
884 tree label1 = gimple_transaction_label_norm (txn); 969 tree label1 = gimple_transaction_label_norm (txn);
885 tree label2 = gimple_transaction_label_uninst (txn); 970 tree label2 = gimple_transaction_label_uninst (txn);
886 971
887 if (label1) 972 if (label1)
888 make_edge (bb, label_to_block (label1), EDGE_FALLTHRU); 973 make_edge (bb, label_to_block (cfun, label1), EDGE_FALLTHRU);
889 if (label2) 974 if (label2)
890 make_edge (bb, label_to_block (label2), 975 make_edge (bb, label_to_block (cfun, label2),
891 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU)); 976 EDGE_TM_UNINSTRUMENTED | (label1 ? 0 : EDGE_FALLTHRU));
892 977
893 tree label3 = gimple_transaction_label_over (txn); 978 tree label3 = gimple_transaction_label_over (txn);
894 if (gimple_transaction_subcode (txn) 979 if (gimple_transaction_subcode (txn)
895 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER)) 980 & (GTMA_HAVE_ABORT | GTMA_IS_OUTER))
896 make_edge (bb, label_to_block (label3), EDGE_TM_ABORT); 981 make_edge (bb, label_to_block (cfun, label3), EDGE_TM_ABORT);
897 982
898 fallthru = false; 983 fallthru = false;
899 } 984 }
900 break; 985 break;
901 986
1048 bb = bb->next_bb; 1133 bb = bb->next_bb;
1049 while (bb != afterbb) 1134 while (bb != afterbb)
1050 { 1135 {
1051 struct omp_region *cur_region = NULL; 1136 struct omp_region *cur_region = NULL;
1052 profile_count cnt = profile_count::zero (); 1137 profile_count cnt = profile_count::zero ();
1053 int freq = 0;
1054 bool all = true; 1138 bool all = true;
1055 1139
1056 int cur_omp_region_idx = 0; 1140 int cur_omp_region_idx = 0;
1057 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx); 1141 int mer = make_edges_bb (bb, &cur_region, &cur_omp_region_idx);
1058 gcc_assert (!mer && !cur_region); 1142 gcc_assert (!mer && !cur_region);
1064 { 1148 {
1065 if (e->count ().initialized_p ()) 1149 if (e->count ().initialized_p ())
1066 cnt += e->count (); 1150 cnt += e->count ();
1067 else 1151 else
1068 all = false; 1152 all = false;
1069 freq += EDGE_FREQUENCY (e);
1070 } 1153 }
1071 tree_guess_outgoing_edge_probabilities (bb); 1154 tree_guess_outgoing_edge_probabilities (bb);
1072 if (all || profile_status_for_fn (cfun) == PROFILE_READ) 1155 if (all || profile_status_for_fn (cfun) == PROFILE_READ)
1073 bb->count = cnt; 1156 bb->count = cnt;
1074 bb->frequency = freq;
1075 1157
1076 bb = bb->next_bb; 1158 bb = bb->next_bb;
1077 } 1159 }
1078 return true; 1160 return true;
1079 } 1161 }
1082 discriminator distinguishes among several basic blocks that 1164 discriminator distinguishes among several basic blocks that
1083 share a common locus, allowing for more accurate sample-based 1165 share a common locus, allowing for more accurate sample-based
1084 profiling. */ 1166 profiling. */
1085 1167
1086 static int 1168 static int
1087 next_discriminator_for_locus (location_t locus) 1169 next_discriminator_for_locus (int line)
1088 { 1170 {
1089 struct locus_discrim_map item; 1171 struct locus_discrim_map item;
1090 struct locus_discrim_map **slot; 1172 struct locus_discrim_map **slot;
1091 1173
1092 item.locus = locus; 1174 item.location_line = line;
1093 item.discriminator = 0; 1175 item.discriminator = 0;
1094 slot = discriminator_per_locus->find_slot_with_hash ( 1176 slot = discriminator_per_locus->find_slot_with_hash (&item, line, INSERT);
1095 &item, LOCATION_LINE (locus), INSERT);
1096 gcc_assert (slot); 1177 gcc_assert (slot);
1097 if (*slot == HTAB_EMPTY_ENTRY) 1178 if (*slot == HTAB_EMPTY_ENTRY)
1098 { 1179 {
1099 *slot = XNEW (struct locus_discrim_map); 1180 *slot = XNEW (struct locus_discrim_map);
1100 gcc_assert (*slot); 1181 gcc_assert (*slot);
1101 (*slot)->locus = locus; 1182 (*slot)->location_line = line;
1102 (*slot)->discriminator = 0; 1183 (*slot)->discriminator = 0;
1103 } 1184 }
1104 (*slot)->discriminator++; 1185 (*slot)->discriminator++;
1105 return (*slot)->discriminator; 1186 return (*slot)->discriminator;
1106 } 1187 }
1107 1188
1108 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */ 1189 /* Return TRUE if LOCUS1 and LOCUS2 refer to the same source line. */
1109 1190
1110 static bool 1191 static bool
1111 same_line_p (location_t locus1, location_t locus2) 1192 same_line_p (location_t locus1, expanded_location *from, location_t locus2)
1112 { 1193 {
1113 expanded_location from, to; 1194 expanded_location to;
1114 1195
1115 if (locus1 == locus2) 1196 if (locus1 == locus2)
1116 return true; 1197 return true;
1117 1198
1118 from = expand_location (locus1);
1119 to = expand_location (locus2); 1199 to = expand_location (locus2);
1120 1200
1121 if (from.line != to.line) 1201 if (from->line != to.line)
1122 return false; 1202 return false;
1123 if (from.file == to.file) 1203 if (from->file == to.file)
1124 return true; 1204 return true;
1125 return (from.file != NULL 1205 return (from->file != NULL
1126 && to.file != NULL 1206 && to.file != NULL
1127 && filename_cmp (from.file, to.file) == 0); 1207 && filename_cmp (from->file, to.file) == 0);
1128 } 1208 }
1129 1209
1130 /* Assign discriminators to each basic block. */ 1210 /* Assign discriminators to each basic block. */
1131 1211
1132 static void 1212 static void
1142 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION; 1222 location_t locus = last ? gimple_location (last) : UNKNOWN_LOCATION;
1143 1223
1144 if (locus == UNKNOWN_LOCATION) 1224 if (locus == UNKNOWN_LOCATION)
1145 continue; 1225 continue;
1146 1226
1227 expanded_location locus_e = expand_location (locus);
1228
1147 FOR_EACH_EDGE (e, ei, bb->succs) 1229 FOR_EACH_EDGE (e, ei, bb->succs)
1148 { 1230 {
1149 gimple *first = first_non_label_stmt (e->dest); 1231 gimple *first = first_non_label_stmt (e->dest);
1150 gimple *last = last_stmt (e->dest); 1232 gimple *last = last_stmt (e->dest);
1151 if ((first && same_line_p (locus, gimple_location (first))) 1233 if ((first && same_line_p (locus, &locus_e,
1152 || (last && same_line_p (locus, gimple_location (last)))) 1234 gimple_location (first)))
1235 || (last && same_line_p (locus, &locus_e,
1236 gimple_location (last))))
1153 { 1237 {
1154 if (e->dest->discriminator != 0 && bb->discriminator == 0) 1238 if (e->dest->discriminator != 0 && bb->discriminator == 0)
1155 bb->discriminator = next_discriminator_for_locus (locus); 1239 bb->discriminator
1240 = next_discriminator_for_locus (locus_e.line);
1156 else 1241 else
1157 e->dest->discriminator = next_discriminator_for_locus (locus); 1242 e->dest->discriminator
1243 = next_discriminator_for_locus (locus_e.line);
1158 } 1244 }
1159 } 1245 }
1160 } 1246 }
1161 } 1247 }
1162 1248
1175 gcc_assert (gimple_code (entry) == GIMPLE_COND); 1261 gcc_assert (gimple_code (entry) == GIMPLE_COND);
1176 1262
1177 /* Entry basic blocks for each component. */ 1263 /* Entry basic blocks for each component. */
1178 then_label = gimple_cond_true_label (entry); 1264 then_label = gimple_cond_true_label (entry);
1179 else_label = gimple_cond_false_label (entry); 1265 else_label = gimple_cond_false_label (entry);
1180 then_bb = label_to_block (then_label); 1266 then_bb = label_to_block (cfun, then_label);
1181 else_bb = label_to_block (else_label); 1267 else_bb = label_to_block (cfun, else_label);
1182 then_stmt = first_stmt (then_bb); 1268 then_stmt = first_stmt (then_bb);
1183 else_stmt = first_stmt (else_bb); 1269 else_stmt = first_stmt (else_bb);
1184 1270
1185 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE); 1271 e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
1186 e->goto_locus = gimple_location (then_stmt); 1272 e->goto_locus = gimple_location (then_stmt);
1283 n = gimple_switch_num_labels (t); 1369 n = gimple_switch_num_labels (t);
1284 for (i = 0; i < n; i++) 1370 for (i = 0; i < n; i++)
1285 { 1371 {
1286 tree elt = gimple_switch_label (t, i); 1372 tree elt = gimple_switch_label (t, i);
1287 tree lab = CASE_LABEL (elt); 1373 tree lab = CASE_LABEL (elt);
1288 basic_block label_bb = label_to_block (lab); 1374 basic_block label_bb = label_to_block (cfun, lab);
1289 edge this_edge = find_edge (e->src, label_bb); 1375 edge this_edge = find_edge (e->src, label_bb);
1290 1376
1291 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create 1377 /* Add it to the chain of CASE_LABEL_EXPRs referencing E, or create
1292 a new chain. */ 1378 a new chain. */
1293 tree &s = edge_to_cases->get_or_insert (this_edge); 1379 tree &s = edge_to_cases->get_or_insert (this_edge);
1307 1393
1308 n = gimple_switch_num_labels (entry); 1394 n = gimple_switch_num_labels (entry);
1309 1395
1310 for (i = 0; i < n; ++i) 1396 for (i = 0; i < n; ++i)
1311 { 1397 {
1312 tree lab = CASE_LABEL (gimple_switch_label (entry, i)); 1398 basic_block label_bb = gimple_switch_label_bb (cfun, entry, i);
1313 basic_block label_bb = label_to_block (lab);
1314 make_edge (bb, label_bb, 0); 1399 make_edge (bb, label_bb, 0);
1315 } 1400 }
1316 } 1401 }
1317 1402
1318 1403
1319 /* Return the basic block holding label DEST. */ 1404 /* Return the basic block holding label DEST. */
1320 1405
1321 basic_block 1406 basic_block
1322 label_to_block_fn (struct function *ifun, tree dest) 1407 label_to_block (struct function *ifun, tree dest)
1323 { 1408 {
1324 int uid = LABEL_DECL_UID (dest); 1409 int uid = LABEL_DECL_UID (dest);
1325 1410
1326 /* We would die hard when faced by an undefined label. Emit a label to 1411 /* We would die hard when faced by an undefined label. Emit a label to
1327 the very first basic block. This will hopefully make even the dataflow 1412 the very first basic block. This will hopefully make even the dataflow
1352 1437
1353 /* A simple GOTO creates normal edges. */ 1438 /* A simple GOTO creates normal edges. */
1354 if (simple_goto_p (goto_t)) 1439 if (simple_goto_p (goto_t))
1355 { 1440 {
1356 tree dest = gimple_goto_dest (goto_t); 1441 tree dest = gimple_goto_dest (goto_t);
1357 basic_block label_bb = label_to_block (dest); 1442 basic_block label_bb = label_to_block (cfun, dest);
1358 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU); 1443 edge e = make_edge (bb, label_bb, EDGE_FALLTHRU);
1359 e->goto_locus = gimple_location (goto_t); 1444 e->goto_locus = gimple_location (goto_t);
1360 gsi_remove (&last, true); 1445 gsi_remove (&last, true);
1361 return false; 1446 return false;
1362 } 1447 }
1374 int i, n = gimple_asm_nlabels (stmt); 1459 int i, n = gimple_asm_nlabels (stmt);
1375 1460
1376 for (i = 0; i < n; ++i) 1461 for (i = 0; i < n; ++i)
1377 { 1462 {
1378 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i)); 1463 tree label = TREE_VALUE (gimple_asm_label_op (stmt, i));
1379 basic_block label_bb = label_to_block (label); 1464 basic_block label_bb = label_to_block (cfun, label);
1380 make_edge (bb, label_bb, 0); 1465 make_edge (bb, label_bb, 0);
1381 } 1466 }
1382 } 1467 }
1383 1468
1384 /*--------------------------------------------------------------------------- 1469 /*---------------------------------------------------------------------------
1406 /* Given LABEL return the first label in the same basic block. */ 1491 /* Given LABEL return the first label in the same basic block. */
1407 1492
1408 static tree 1493 static tree
1409 main_block_label (tree label) 1494 main_block_label (tree label)
1410 { 1495 {
1411 basic_block bb = label_to_block (label); 1496 basic_block bb = label_to_block (cfun, label);
1412 tree main_label = label_for_bb[bb->index].label; 1497 tree main_label = label_for_bb[bb->index].label;
1413 1498
1414 /* label_to_block possibly inserted undefined label into the chain. */ 1499 /* label_to_block possibly inserted undefined label into the chain. */
1415 if (!main_label) 1500 if (!main_label)
1416 { 1501 {
1683 { 1768 {
1684 int old_size = gimple_switch_num_labels (stmt); 1769 int old_size = gimple_switch_num_labels (stmt);
1685 int i, next_index, new_size; 1770 int i, next_index, new_size;
1686 basic_block default_bb = NULL; 1771 basic_block default_bb = NULL;
1687 1772
1688 default_bb = label_to_block (CASE_LABEL (gimple_switch_default_label (stmt))); 1773 default_bb = gimple_switch_default_bb (cfun, stmt);
1689 1774
1690 /* Look for possible opportunities to merge cases. */ 1775 /* Look for possible opportunities to merge cases. */
1691 new_size = i = 1; 1776 new_size = i = 1;
1692 while (i < old_size) 1777 while (i < old_size)
1693 { 1778 {
1695 basic_block base_bb; 1780 basic_block base_bb;
1696 1781
1697 base_case = gimple_switch_label (stmt, i); 1782 base_case = gimple_switch_label (stmt, i);
1698 1783
1699 gcc_assert (base_case); 1784 gcc_assert (base_case);
1700 base_bb = label_to_block (CASE_LABEL (base_case)); 1785 base_bb = label_to_block (cfun, CASE_LABEL (base_case));
1701 1786
1702 /* Discard cases that have the same destination as the default case or 1787 /* Discard cases that have the same destination as the default case or
1703 whose destiniation blocks have already been removed as unreachable. */ 1788 whose destiniation blocks have already been removed as unreachable. */
1704 if (base_bb == NULL || base_bb == default_bb) 1789 if (base_bb == NULL || base_bb == default_bb)
1705 { 1790 {
1716 of the label vector or when we cannot merge the next case 1801 of the label vector or when we cannot merge the next case
1717 label with the current one. */ 1802 label with the current one. */
1718 while (next_index < old_size) 1803 while (next_index < old_size)
1719 { 1804 {
1720 tree merge_case = gimple_switch_label (stmt, next_index); 1805 tree merge_case = gimple_switch_label (stmt, next_index);
1721 basic_block merge_bb = label_to_block (CASE_LABEL (merge_case)); 1806 basic_block merge_bb = label_to_block (cfun, CASE_LABEL (merge_case));
1722 wide_int bhp1 = wi::to_wide (base_high) + 1; 1807 wide_int bhp1 = wi::to_wide (base_high) + 1;
1723 1808
1724 /* Merge the cases if they jump to the same place, 1809 /* Merge the cases if they jump to the same place,
1725 and their ranges are consecutive. */ 1810 and their ranges are consecutive. */
1726 if (merge_bb == base_bb 1811 if (merge_bb == base_bb
1735 break; 1820 break;
1736 } 1821 }
1737 1822
1738 /* Discard cases that have an unreachable destination block. */ 1823 /* Discard cases that have an unreachable destination block. */
1739 if (EDGE_COUNT (base_bb->succs) == 0 1824 if (EDGE_COUNT (base_bb->succs) == 0
1740 && gimple_seq_unreachable_p (bb_seq (base_bb))) 1825 && gimple_seq_unreachable_p (bb_seq (base_bb))
1826 /* Don't optimize this if __builtin_unreachable () is the
1827 implicitly added one by the C++ FE too early, before
1828 -Wreturn-type can be diagnosed. We'll optimize it later
1829 during switchconv pass or any other cfg cleanup. */
1830 && (gimple_in_ssa_p (cfun)
1831 || (LOCATION_LOCUS (gimple_location (last_stmt (base_bb)))
1832 != BUILTINS_LOCATION)))
1741 { 1833 {
1742 edge base_edge = find_edge (gimple_bb (stmt), base_bb); 1834 edge base_edge = find_edge (gimple_bb (stmt), base_bb);
1743 if (base_edge != NULL) 1835 if (base_edge != NULL)
1744 remove_edge_and_dominated_blocks (base_edge); 1836 remove_edge_and_dominated_blocks (base_edge);
1745 i = next_index; 1837 i = next_index;
2050 { 2142 {
2051 gimple_stmt_iterator dest_gsi = gsi_start_bb (a); 2143 gimple_stmt_iterator dest_gsi = gsi_start_bb (a);
2052 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT); 2144 gsi_insert_before (&dest_gsi, stmt, GSI_NEW_STMT);
2053 } 2145 }
2054 /* Other user labels keep around in a form of a debug stmt. */ 2146 /* Other user labels keep around in a form of a debug stmt. */
2055 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_STMTS) 2147 else if (!DECL_ARTIFICIAL (label) && MAY_HAVE_DEBUG_BIND_STMTS)
2056 { 2148 {
2057 gimple *dbg = gimple_build_debug_bind (label, 2149 gimple *dbg = gimple_build_debug_bind (label,
2058 integer_zero_node, 2150 integer_zero_node,
2059 stmt); 2151 stmt);
2060 gimple_debug_bind_reset_value (dbg); 2152 gimple_debug_bind_reset_value (dbg);
2079 is selected as the new bb count. This is to handle inconsistent 2171 is selected as the new bb count. This is to handle inconsistent
2080 profiles. */ 2172 profiles. */
2081 if (a->loop_father == b->loop_father) 2173 if (a->loop_father == b->loop_father)
2082 { 2174 {
2083 a->count = a->count.merge (b->count); 2175 a->count = a->count.merge (b->count);
2084 a->frequency = MAX (a->frequency, b->frequency);
2085 } 2176 }
2086 2177
2087 /* Merge the sequences. */ 2178 /* Merge the sequences. */
2088 last = gsi_last_bb (a); 2179 last = gsi_last_bb (a);
2089 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT); 2180 gsi_insert_seq_after (&last, bb_seq (b), GSI_NEW_STMT);
2209 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0; 2300 DECL_NONLOCAL (gimple_label_label (label_stmt)) = 0;
2210 FORCED_LABEL (gimple_label_label (label_stmt)) = 1; 2301 FORCED_LABEL (gimple_label_label (label_stmt)) = 1;
2211 } 2302 }
2212 2303
2213 new_bb = bb->prev_bb; 2304 new_bb = bb->prev_bb;
2305 /* Don't move any labels into ENTRY block. */
2306 if (new_bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
2307 {
2308 new_bb = single_succ (new_bb);
2309 gcc_assert (new_bb != bb);
2310 }
2214 new_gsi = gsi_start_bb (new_bb); 2311 new_gsi = gsi_start_bb (new_bb);
2215 gsi_remove (&i, false); 2312 gsi_remove (&i, false);
2216 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT); 2313 gsi_insert_before (&new_gsi, stmt, GSI_NEW_STMT);
2217 } 2314 }
2218 else 2315 else
2233 bb->il.gimple.seq = NULL; 2330 bb->il.gimple.seq = NULL;
2234 bb->il.gimple.phi_nodes = NULL; 2331 bb->il.gimple.phi_nodes = NULL;
2235 } 2332 }
2236 2333
2237 2334
2238 /* Given a basic block BB ending with COND_EXPR or SWITCH_EXPR, and a 2335 /* Given a basic block BB and a value VAL for use in the final statement
2239 predicate VAL, return the edge that will be taken out of the block. 2336 of the block (if a GIMPLE_COND, GIMPLE_SWITCH, or computed goto), return
2240 If VAL does not match a unique edge, NULL is returned. */ 2337 the edge that will be taken out of the block.
2338 If VAL is NULL_TREE, then the current value of the final statement's
2339 predicate or index is used.
2340 If the value does not match a unique edge, NULL is returned. */
2241 2341
2242 edge 2342 edge
2243 find_taken_edge (basic_block bb, tree val) 2343 find_taken_edge (basic_block bb, tree val)
2244 { 2344 {
2245 gimple *stmt; 2345 gimple *stmt;
2246 2346
2247 stmt = last_stmt (bb); 2347 stmt = last_stmt (bb);
2248 2348
2249 gcc_assert (is_ctrl_stmt (stmt)); 2349 /* Handle ENTRY and EXIT. */
2350 if (!stmt)
2351 return NULL;
2250 2352
2251 if (gimple_code (stmt) == GIMPLE_COND) 2353 if (gimple_code (stmt) == GIMPLE_COND)
2252 return find_taken_edge_cond_expr (bb, val); 2354 return find_taken_edge_cond_expr (as_a <gcond *> (stmt), val);
2253 2355
2254 if (gimple_code (stmt) == GIMPLE_SWITCH) 2356 if (gimple_code (stmt) == GIMPLE_SWITCH)
2255 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), bb, val); 2357 return find_taken_edge_switch_expr (as_a <gswitch *> (stmt), val);
2256 2358
2257 if (computed_goto_p (stmt)) 2359 if (computed_goto_p (stmt))
2258 { 2360 {
2259 /* Only optimize if the argument is a label, if the argument is 2361 /* Only optimize if the argument is a label, if the argument is
2260 not a label then we can not construct a proper CFG. 2362 not a label then we can not construct a proper CFG.
2264 appear inside a LABEL_EXPR just to be safe. */ 2366 appear inside a LABEL_EXPR just to be safe. */
2265 if (val 2367 if (val
2266 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR) 2368 && (TREE_CODE (val) == ADDR_EXPR || TREE_CODE (val) == LABEL_EXPR)
2267 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL) 2369 && TREE_CODE (TREE_OPERAND (val, 0)) == LABEL_DECL)
2268 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0)); 2370 return find_taken_edge_computed_goto (bb, TREE_OPERAND (val, 0));
2269 return NULL; 2371 }
2270 } 2372
2271 2373 /* Otherwise we only know the taken successor edge if it's unique. */
2272 gcc_unreachable (); 2374 return single_succ_p (bb) ? single_succ_edge (bb) : NULL;
2273 } 2375 }
2274 2376
2275 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR 2377 /* Given a constant value VAL and the entry block BB to a GOTO_EXPR
2276 statement, determine which of the outgoing edges will be taken out of the 2378 statement, determine which of the outgoing edges will be taken out of the
2277 block. Return NULL if either edge may be taken. */ 2379 block. Return NULL if either edge may be taken. */
2280 find_taken_edge_computed_goto (basic_block bb, tree val) 2382 find_taken_edge_computed_goto (basic_block bb, tree val)
2281 { 2383 {
2282 basic_block dest; 2384 basic_block dest;
2283 edge e = NULL; 2385 edge e = NULL;
2284 2386
2285 dest = label_to_block (val); 2387 dest = label_to_block (cfun, val);
2286 if (dest) 2388 if (dest)
2287 { 2389 e = find_edge (bb, dest);
2288 e = find_edge (bb, dest); 2390
2289 gcc_assert (e != NULL); 2391 /* It's possible for find_edge to return NULL here on invalid code
2290 } 2392 that abuses the labels-as-values extension (e.g. code that attempts to
2393 jump *between* functions via stored labels-as-values; PR 84136).
2394 If so, then we simply return that NULL for the edge.
2395 We don't currently have a way of detecting such invalid code, so we
2396 can't assert that it was the case when a NULL edge occurs here. */
2291 2397
2292 return e; 2398 return e;
2293 } 2399 }
2294 2400
2295 /* Given a constant value VAL and the entry block BB to a COND_EXPR 2401 /* Given COND_STMT and a constant value VAL for use as the predicate,
2296 statement, determine which of the two edges will be taken out of the 2402 determine which of the two edges will be taken out of
2297 block. Return NULL if either edge may be taken. */ 2403 the statement's block. Return NULL if either edge may be taken.
2404 If VAL is NULL_TREE, then the current value of COND_STMT's predicate
2405 is used. */
2298 2406
2299 static edge 2407 static edge
2300 find_taken_edge_cond_expr (basic_block bb, tree val) 2408 find_taken_edge_cond_expr (const gcond *cond_stmt, tree val)
2301 { 2409 {
2302 edge true_edge, false_edge; 2410 edge true_edge, false_edge;
2303 2411
2304 if (val == NULL 2412 if (val == NULL_TREE)
2305 || TREE_CODE (val) != INTEGER_CST) 2413 {
2414 /* Use the current value of the predicate. */
2415 if (gimple_cond_true_p (cond_stmt))
2416 val = integer_one_node;
2417 else if (gimple_cond_false_p (cond_stmt))
2418 val = integer_zero_node;
2419 else
2420 return NULL;
2421 }
2422 else if (TREE_CODE (val) != INTEGER_CST)
2306 return NULL; 2423 return NULL;
2307 2424
2308 extract_true_false_edges_from_block (bb, &true_edge, &false_edge); 2425 extract_true_false_edges_from_block (gimple_bb (cond_stmt),
2426 &true_edge, &false_edge);
2309 2427
2310 return (integer_zerop (val) ? false_edge : true_edge); 2428 return (integer_zerop (val) ? false_edge : true_edge);
2311 } 2429 }
2312 2430
2313 /* Given an INTEGER_CST VAL and the entry block BB to a SWITCH_EXPR 2431 /* Given SWITCH_STMT and an INTEGER_CST VAL for use as the index, determine
2314 statement, determine which edge will be taken out of the block. Return 2432 which edge will be taken out of the statement's block. Return NULL if any
2315 NULL if any edge may be taken. */ 2433 edge may be taken.
2316 2434 If VAL is NULL_TREE, then the current value of SWITCH_STMT's index
2317 static edge 2435 is used. */
2318 find_taken_edge_switch_expr (gswitch *switch_stmt, basic_block bb, 2436
2319 tree val) 2437 edge
2438 find_taken_edge_switch_expr (const gswitch *switch_stmt, tree val)
2320 { 2439 {
2321 basic_block dest_bb; 2440 basic_block dest_bb;
2322 edge e; 2441 edge e;
2323 tree taken_case; 2442 tree taken_case;
2324 2443
2325 if (gimple_switch_num_labels (switch_stmt) == 1) 2444 if (gimple_switch_num_labels (switch_stmt) == 1)
2326 taken_case = gimple_switch_default_label (switch_stmt); 2445 taken_case = gimple_switch_default_label (switch_stmt);
2327 else if (! val || TREE_CODE (val) != INTEGER_CST)
2328 return NULL;
2329 else 2446 else
2330 taken_case = find_case_label_for_value (switch_stmt, val); 2447 {
2331 dest_bb = label_to_block (CASE_LABEL (taken_case)); 2448 if (val == NULL_TREE)
2332 2449 val = gimple_switch_index (switch_stmt);
2333 e = find_edge (bb, dest_bb); 2450 if (TREE_CODE (val) != INTEGER_CST)
2451 return NULL;
2452 else
2453 taken_case = find_case_label_for_value (switch_stmt, val);
2454 }
2455 dest_bb = label_to_block (cfun, CASE_LABEL (taken_case));
2456
2457 e = find_edge (gimple_bb (switch_stmt), dest_bb);
2334 gcc_assert (e); 2458 gcc_assert (e);
2335 return e; 2459 return e;
2336 } 2460 }
2337 2461
2338 2462
2339 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL. 2463 /* Return the CASE_LABEL_EXPR that SWITCH_STMT will take for VAL.
2340 We can make optimal use here of the fact that the case labels are 2464 We can make optimal use here of the fact that the case labels are
2341 sorted: We can do a binary search for a case matching VAL. */ 2465 sorted: We can do a binary search for a case matching VAL. */
2342 2466
2343 static tree 2467 tree
2344 find_case_label_for_value (gswitch *switch_stmt, tree val) 2468 find_case_label_for_value (const gswitch *switch_stmt, tree val)
2345 { 2469 {
2346 size_t low, high, n = gimple_switch_num_labels (switch_stmt); 2470 size_t low, high, n = gimple_switch_num_labels (switch_stmt);
2347 tree default_case = gimple_switch_default_label (switch_stmt); 2471 tree default_case = gimple_switch_default_label (switch_stmt);
2348 2472
2349 for (low = 0, high = n; high - low > 1; ) 2473 for (low = 0, high = n; high - low > 1; )
2594 default: 2718 default:
2595 break; 2719 break;
2596 } 2720 }
2597 2721
2598 /* If a statement can throw, it alters control flow. */ 2722 /* If a statement can throw, it alters control flow. */
2599 return stmt_can_throw_internal (t); 2723 return stmt_can_throw_internal (cfun, t);
2600 } 2724 }
2601 2725
2602 2726
2603 /* Return true if T is a simple local goto. */ 2727 /* Return true if T is a simple local goto. */
2604 2728
2619 2743
2620 static inline bool 2744 static inline bool
2621 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt) 2745 stmt_starts_bb_p (gimple *stmt, gimple *prev_stmt)
2622 { 2746 {
2623 if (stmt == NULL) 2747 if (stmt == NULL)
2748 return false;
2749
2750 /* PREV_STMT is only set to a debug stmt if the debug stmt is before
2751 any nondebug stmts in the block. We don't want to start another
2752 block in this case: the debug stmt will already have started the
2753 one STMT would start if we weren't outputting debug stmts. */
2754 if (prev_stmt && is_gimple_debug (prev_stmt))
2624 return false; 2755 return false;
2625 2756
2626 /* Labels start a new basic block only if the preceding statement 2757 /* Labels start a new basic block only if the preceding statement
2627 wasn't a label of the same type. This prevents the creation of 2758 wasn't a label of the same type. This prevents the creation of
2628 consecutive blocks that have nothing but a single label. */ 2759 consecutive blocks that have nothing but a single label. */
2838 dest = edge_in->dest; 2969 dest = edge_in->dest;
2839 2970
2840 after_bb = split_edge_bb_loc (edge_in); 2971 after_bb = split_edge_bb_loc (edge_in);
2841 2972
2842 new_bb = create_empty_bb (after_bb); 2973 new_bb = create_empty_bb (after_bb);
2843 new_bb->frequency = EDGE_FREQUENCY (edge_in);
2844 new_bb->count = edge_in->count (); 2974 new_bb->count = edge_in->count ();
2845 2975
2846 e = redirect_edge_and_branch (edge_in, new_bb); 2976 e = redirect_edge_and_branch (edge_in, new_bb);
2847 gcc_assert (e == edge_in); 2977 gcc_assert (e == edge_in);
2848 2978
2851 2981
2852 return new_bb; 2982 return new_bb;
2853 } 2983 }
2854 2984
2855 2985
2856 /* Verify properties of the address expression T with base object BASE. */ 2986 /* Verify properties of the address expression T whose base should be
2857 2987 TREE_ADDRESSABLE if VERIFY_ADDRESSABLE is true. */
2858 static tree 2988
2859 verify_address (tree t, tree base) 2989 static bool
2990 verify_address (tree t, bool verify_addressable)
2860 { 2991 {
2861 bool old_constant; 2992 bool old_constant;
2862 bool old_side_effects; 2993 bool old_side_effects;
2863 bool new_constant; 2994 bool new_constant;
2864 bool new_side_effects; 2995 bool new_side_effects;
2871 new_constant = TREE_CONSTANT (t); 3002 new_constant = TREE_CONSTANT (t);
2872 3003
2873 if (old_constant != new_constant) 3004 if (old_constant != new_constant)
2874 { 3005 {
2875 error ("constant not recomputed when ADDR_EXPR changed"); 3006 error ("constant not recomputed when ADDR_EXPR changed");
2876 return t; 3007 return true;
2877 } 3008 }
2878 if (old_side_effects != new_side_effects) 3009 if (old_side_effects != new_side_effects)
2879 { 3010 {
2880 error ("side effects not recomputed when ADDR_EXPR changed"); 3011 error ("side effects not recomputed when ADDR_EXPR changed");
2881 return t; 3012 return true;
2882 } 3013 }
3014
3015 tree base = TREE_OPERAND (t, 0);
3016 while (handled_component_p (base))
3017 base = TREE_OPERAND (base, 0);
2883 3018
2884 if (!(VAR_P (base) 3019 if (!(VAR_P (base)
2885 || TREE_CODE (base) == PARM_DECL 3020 || TREE_CODE (base) == PARM_DECL
2886 || TREE_CODE (base) == RESULT_DECL)) 3021 || TREE_CODE (base) == RESULT_DECL))
2887 return NULL_TREE; 3022 return false;
2888 3023
2889 if (DECL_GIMPLE_REG_P (base)) 3024 if (DECL_GIMPLE_REG_P (base))
2890 { 3025 {
2891 error ("DECL_GIMPLE_REG_P set on a variable with address taken"); 3026 error ("DECL_GIMPLE_REG_P set on a variable with address taken");
2892 return base; 3027 return true;
2893 } 3028 }
2894 3029
2895 return NULL_TREE; 3030 if (verify_addressable && !TREE_ADDRESSABLE (base))
2896 } 3031 {
2897 3032 error ("address taken, but ADDRESSABLE bit not set");
2898 /* Callback for walk_tree, check that all elements with address taken are 3033 return true;
2899 properly noticed as such. The DATA is an int* that is 1 if TP was seen 3034 }
2900 inside a PHI node. */ 3035
2901 3036 return false;
2902 static tree 3037 }
2903 verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) 3038
2904 { 3039
2905 tree t = *tp, x; 3040 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
2906 3041 Returns true if there is an error, otherwise false. */
2907 if (TYPE_P (t)) 3042
2908 *walk_subtrees = 0; 3043 static bool
2909 3044 verify_types_in_gimple_min_lval (tree expr)
2910 /* Check operand N for being valid GIMPLE and give error MSG if not. */ 3045 {
2911 #define CHECK_OP(N, MSG) \ 3046 tree op;
2912 do { if (!is_gimple_val (TREE_OPERAND (t, N))) \ 3047
2913 { error (MSG); return TREE_OPERAND (t, N); }} while (0) 3048 if (is_gimple_id (expr))
2914 3049 return false;
2915 switch (TREE_CODE (t)) 3050
2916 { 3051 if (TREE_CODE (expr) != TARGET_MEM_REF
2917 case SSA_NAME: 3052 && TREE_CODE (expr) != MEM_REF)
2918 if (SSA_NAME_IN_FREE_LIST (t)) 3053 {
2919 { 3054 error ("invalid expression for min lvalue");
2920 error ("SSA name in freelist but still referenced"); 3055 return true;
2921 return *tp; 3056 }
2922 } 3057
2923 break; 3058 /* TARGET_MEM_REFs are strange beasts. */
2924 3059 if (TREE_CODE (expr) == TARGET_MEM_REF)
2925 case PARM_DECL: 3060 return false;
2926 case VAR_DECL: 3061
2927 case RESULT_DECL: 3062 op = TREE_OPERAND (expr, 0);
2928 { 3063 if (!is_gimple_val (op))
2929 tree context = decl_function_context (t); 3064 {
2930 if (context != cfun->decl 3065 error ("invalid operand in indirect reference");
2931 && !SCOPE_FILE_SCOPE_P (context) 3066 debug_generic_stmt (op);
2932 && !TREE_STATIC (t) 3067 return true;
2933 && !DECL_EXTERNAL (t)) 3068 }
2934 { 3069 /* Memory references now generally can involve a value conversion. */
2935 error ("Local declaration from a different function"); 3070
2936 return t; 3071 return false;
2937 } 3072 }
2938 } 3073
2939 break; 3074 /* Verify if EXPR is a valid GIMPLE reference expression. If
2940 3075 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
2941 case INDIRECT_REF: 3076 if there is an error, otherwise false. */
2942 error ("INDIRECT_REF in gimple IL"); 3077
2943 return t; 3078 static bool
2944 3079 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
2945 case MEM_REF: 3080 {
2946 x = TREE_OPERAND (t, 0); 3081 if (TREE_CODE (expr) == REALPART_EXPR
2947 if (!POINTER_TYPE_P (TREE_TYPE (x)) 3082 || TREE_CODE (expr) == IMAGPART_EXPR
2948 || !is_gimple_mem_ref_addr (x)) 3083 || TREE_CODE (expr) == BIT_FIELD_REF)
2949 { 3084 {
2950 error ("invalid first operand of MEM_REF"); 3085 tree op = TREE_OPERAND (expr, 0);
2951 return x; 3086 if (!is_gimple_reg_type (TREE_TYPE (expr)))
2952 }
2953 if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
2954 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
2955 {
2956 error ("invalid offset operand of MEM_REF");
2957 return TREE_OPERAND (t, 1);
2958 }
2959 if (TREE_CODE (x) == ADDR_EXPR)
2960 {
2961 tree va = verify_address (x, TREE_OPERAND (x, 0));
2962 if (va)
2963 return va;
2964 x = TREE_OPERAND (x, 0);
2965 }
2966 walk_tree (&x, verify_expr, data, NULL);
2967 *walk_subtrees = 0;
2968 break;
2969
2970 case ASSERT_EXPR:
2971 x = fold (ASSERT_EXPR_COND (t));
2972 if (x == boolean_false_node)
2973 {
2974 error ("ASSERT_EXPR with an always-false condition");
2975 return *tp;
2976 }
2977 break;
2978
2979 case MODIFY_EXPR:
2980 error ("MODIFY_EXPR not expected while having tuples");
2981 return *tp;
2982
2983 case ADDR_EXPR:
2984 {
2985 tree tem;
2986
2987 gcc_assert (is_gimple_address (t));
2988
2989 /* Skip any references (they will be checked when we recurse down the
2990 tree) and ensure that any variable used as a prefix is marked
2991 addressable. */
2992 for (x = TREE_OPERAND (t, 0);
2993 handled_component_p (x);
2994 x = TREE_OPERAND (x, 0))
2995 ;
2996
2997 if ((tem = verify_address (t, x)))
2998 return tem;
2999
3000 if (!(VAR_P (x)
3001 || TREE_CODE (x) == PARM_DECL
3002 || TREE_CODE (x) == RESULT_DECL))
3003 return NULL;
3004
3005 if (!TREE_ADDRESSABLE (x))
3006 {
3007 error ("address taken, but ADDRESSABLE bit not set");
3008 return x;
3009 }
3010
3011 break;
3012 }
3013
3014 case COND_EXPR:
3015 x = COND_EXPR_COND (t);
3016 if (!INTEGRAL_TYPE_P (TREE_TYPE (x)))
3017 {
3018 error ("non-integral used in condition");
3019 return x;
3020 }
3021 if (!is_gimple_condexpr (x))
3022 {
3023 error ("invalid conditional operand");
3024 return x;
3025 }
3026 break;
3027
3028 case NON_LVALUE_EXPR:
3029 case TRUTH_NOT_EXPR:
3030 gcc_unreachable ();
3031
3032 CASE_CONVERT:
3033 case FIX_TRUNC_EXPR:
3034 case FLOAT_EXPR:
3035 case NEGATE_EXPR:
3036 case ABS_EXPR:
3037 case BIT_NOT_EXPR:
3038 CHECK_OP (0, "invalid operand to unary operator");
3039 break;
3040
3041 case REALPART_EXPR:
3042 case IMAGPART_EXPR:
3043 case BIT_FIELD_REF:
3044 if (!is_gimple_reg_type (TREE_TYPE (t)))
3045 { 3087 {
3046 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR"); 3088 error ("non-scalar BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3047 return t; 3089 return true;
3048 } 3090 }
3049 3091
3050 if (TREE_CODE (t) == BIT_FIELD_REF) 3092 if (TREE_CODE (expr) == BIT_FIELD_REF)
3051 { 3093 {
3052 tree t0 = TREE_OPERAND (t, 0); 3094 tree t1 = TREE_OPERAND (expr, 1);
3053 tree t1 = TREE_OPERAND (t, 1); 3095 tree t2 = TREE_OPERAND (expr, 2);
3054 tree t2 = TREE_OPERAND (t, 2); 3096 poly_uint64 size, bitpos;
3055 if (!tree_fits_uhwi_p (t1) 3097 if (!poly_int_tree_p (t1, &size)
3056 || !tree_fits_uhwi_p (t2) 3098 || !poly_int_tree_p (t2, &bitpos)
3057 || !types_compatible_p (bitsizetype, TREE_TYPE (t1)) 3099 || !types_compatible_p (bitsizetype, TREE_TYPE (t1))
3058 || !types_compatible_p (bitsizetype, TREE_TYPE (t2))) 3100 || !types_compatible_p (bitsizetype, TREE_TYPE (t2)))
3059 { 3101 {
3060 error ("invalid position or size operand to BIT_FIELD_REF"); 3102 error ("invalid position or size operand to BIT_FIELD_REF");
3061 return t; 3103 return true;
3062 } 3104 }
3063 if (INTEGRAL_TYPE_P (TREE_TYPE (t)) 3105 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))
3064 && (TYPE_PRECISION (TREE_TYPE (t)) 3106 && maybe_ne (TYPE_PRECISION (TREE_TYPE (expr)), size))
3065 != tree_to_uhwi (t1)))
3066 { 3107 {
3067 error ("integral result type precision does not match " 3108 error ("integral result type precision does not match "
3068 "field size of BIT_FIELD_REF"); 3109 "field size of BIT_FIELD_REF");
3069 return t; 3110 return true;
3070 } 3111 }
3071 else if (!INTEGRAL_TYPE_P (TREE_TYPE (t)) 3112 else if (!INTEGRAL_TYPE_P (TREE_TYPE (expr))
3072 && TYPE_MODE (TREE_TYPE (t)) != BLKmode 3113 && TYPE_MODE (TREE_TYPE (expr)) != BLKmode
3073 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (t))) 3114 && maybe_ne (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (expr))),
3074 != tree_to_uhwi (t1))) 3115 size))
3075 { 3116 {
3076 error ("mode size of non-integral result does not " 3117 error ("mode size of non-integral result does not "
3077 "match field size of BIT_FIELD_REF"); 3118 "match field size of BIT_FIELD_REF");
3078 return t; 3119 return true;
3079 } 3120 }
3080 if (!AGGREGATE_TYPE_P (TREE_TYPE (t0)) 3121 if (!AGGREGATE_TYPE_P (TREE_TYPE (op))
3081 && (tree_to_uhwi (t1) + tree_to_uhwi (t2) 3122 && maybe_gt (size + bitpos,
3082 > tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t0))))) 3123 tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (op)))))
3083 { 3124 {
3084 error ("position plus size exceeds size of referenced object in " 3125 error ("position plus size exceeds size of referenced object in "
3085 "BIT_FIELD_REF"); 3126 "BIT_FIELD_REF");
3086 return t; 3127 return true;
3087 } 3128 }
3088 } 3129 }
3089 t = TREE_OPERAND (t, 0); 3130
3090 3131 if ((TREE_CODE (expr) == REALPART_EXPR
3091 /* Fall-through. */ 3132 || TREE_CODE (expr) == IMAGPART_EXPR)
3092 case COMPONENT_REF: 3133 && !useless_type_conversion_p (TREE_TYPE (expr),
3093 case ARRAY_REF: 3134 TREE_TYPE (TREE_TYPE (op))))
3094 case ARRAY_RANGE_REF: 3135 {
3095 case VIEW_CONVERT_EXPR: 3136 error ("type mismatch in real/imagpart reference");
3096 /* We have a nest of references. Verify that each of the operands 3137 debug_generic_stmt (TREE_TYPE (expr));
3097 that determine where to reference is either a constant or a variable, 3138 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3098 verify that the base is valid, and then show we've already checked 3139 return true;
3099 the subtrees. */ 3140 }
3100 while (handled_component_p (t)) 3141 expr = op;
3101 { 3142 }
3102 if (TREE_CODE (t) == COMPONENT_REF && TREE_OPERAND (t, 2)) 3143
3103 CHECK_OP (2, "invalid COMPONENT_REF offset operator");
3104 else if (TREE_CODE (t) == ARRAY_REF
3105 || TREE_CODE (t) == ARRAY_RANGE_REF)
3106 {
3107 CHECK_OP (1, "invalid array index");
3108 if (TREE_OPERAND (t, 2))
3109 CHECK_OP (2, "invalid array lower bound");
3110 if (TREE_OPERAND (t, 3))
3111 CHECK_OP (3, "invalid array stride");
3112 }
3113 else if (TREE_CODE (t) == BIT_FIELD_REF
3114 || TREE_CODE (t) == REALPART_EXPR
3115 || TREE_CODE (t) == IMAGPART_EXPR)
3116 {
3117 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or "
3118 "REALPART_EXPR");
3119 return t;
3120 }
3121
3122 t = TREE_OPERAND (t, 0);
3123 }
3124
3125 if (!is_gimple_min_invariant (t) && !is_gimple_lvalue (t))
3126 {
3127 error ("invalid reference prefix");
3128 return t;
3129 }
3130 walk_tree (&t, verify_expr, data, NULL);
3131 *walk_subtrees = 0;
3132 break;
3133 case PLUS_EXPR:
3134 case MINUS_EXPR:
3135 /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
3136 POINTER_PLUS_EXPR. */
3137 if (POINTER_TYPE_P (TREE_TYPE (t)))
3138 {
3139 error ("invalid operand to plus/minus, type is a pointer");
3140 return t;
3141 }
3142 CHECK_OP (0, "invalid operand to binary operator");
3143 CHECK_OP (1, "invalid operand to binary operator");
3144 break;
3145
3146 case POINTER_PLUS_EXPR:
3147 /* Check to make sure the first operand is a pointer or reference type. */
3148 if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
3149 {
3150 error ("invalid operand to pointer plus, first operand is not a pointer");
3151 return t;
3152 }
3153 /* Check to make sure the second operand is a ptrofftype. */
3154 if (!ptrofftype_p (TREE_TYPE (TREE_OPERAND (t, 1))))
3155 {
3156 error ("invalid operand to pointer plus, second operand is not an "
3157 "integer type of appropriate width");
3158 return t;
3159 }
3160 /* FALLTHROUGH */
3161 case LT_EXPR:
3162 case LE_EXPR:
3163 case GT_EXPR:
3164 case GE_EXPR:
3165 case EQ_EXPR:
3166 case NE_EXPR:
3167 case UNORDERED_EXPR:
3168 case ORDERED_EXPR:
3169 case UNLT_EXPR:
3170 case UNLE_EXPR:
3171 case UNGT_EXPR:
3172 case UNGE_EXPR:
3173 case UNEQ_EXPR:
3174 case LTGT_EXPR:
3175 case MULT_EXPR:
3176 case TRUNC_DIV_EXPR:
3177 case CEIL_DIV_EXPR:
3178 case FLOOR_DIV_EXPR:
3179 case ROUND_DIV_EXPR:
3180 case TRUNC_MOD_EXPR:
3181 case CEIL_MOD_EXPR:
3182 case FLOOR_MOD_EXPR:
3183 case ROUND_MOD_EXPR:
3184 case RDIV_EXPR:
3185 case EXACT_DIV_EXPR:
3186 case MIN_EXPR:
3187 case MAX_EXPR:
3188 case LSHIFT_EXPR:
3189 case RSHIFT_EXPR:
3190 case LROTATE_EXPR:
3191 case RROTATE_EXPR:
3192 case BIT_IOR_EXPR:
3193 case BIT_XOR_EXPR:
3194 case BIT_AND_EXPR:
3195 CHECK_OP (0, "invalid operand to binary operator");
3196 CHECK_OP (1, "invalid operand to binary operator");
3197 break;
3198
3199 case CONSTRUCTOR:
3200 if (TREE_CONSTANT (t) && TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
3201 *walk_subtrees = 0;
3202 break;
3203
3204 case CASE_LABEL_EXPR:
3205 if (CASE_CHAIN (t))
3206 {
3207 error ("invalid CASE_CHAIN");
3208 return t;
3209 }
3210 break;
3211
3212 default:
3213 break;
3214 }
3215 return NULL;
3216
3217 #undef CHECK_OP
3218 }
3219
3220
3221 /* Verify if EXPR is either a GIMPLE ID or a GIMPLE indirect reference.
3222 Returns true if there is an error, otherwise false. */
3223
3224 static bool
3225 verify_types_in_gimple_min_lval (tree expr)
3226 {
3227 tree op;
3228
3229 if (is_gimple_id (expr))
3230 return false;
3231
3232 if (TREE_CODE (expr) != TARGET_MEM_REF
3233 && TREE_CODE (expr) != MEM_REF)
3234 {
3235 error ("invalid expression for min lvalue");
3236 return true;
3237 }
3238
3239 /* TARGET_MEM_REFs are strange beasts. */
3240 if (TREE_CODE (expr) == TARGET_MEM_REF)
3241 return false;
3242
3243 op = TREE_OPERAND (expr, 0);
3244 if (!is_gimple_val (op))
3245 {
3246 error ("invalid operand in indirect reference");
3247 debug_generic_stmt (op);
3248 return true;
3249 }
3250 /* Memory references now generally can involve a value conversion. */
3251
3252 return false;
3253 }
3254
3255 /* Verify if EXPR is a valid GIMPLE reference expression. If
3256 REQUIRE_LVALUE is true verifies it is an lvalue. Returns true
3257 if there is an error, otherwise false. */
3258
3259 static bool
3260 verify_types_in_gimple_reference (tree expr, bool require_lvalue)
3261 {
3262 while (handled_component_p (expr)) 3144 while (handled_component_p (expr))
3263 { 3145 {
3146 if (TREE_CODE (expr) == REALPART_EXPR
3147 || TREE_CODE (expr) == IMAGPART_EXPR
3148 || TREE_CODE (expr) == BIT_FIELD_REF)
3149 {
3150 error ("non-top-level BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR");
3151 return true;
3152 }
3153
3264 tree op = TREE_OPERAND (expr, 0); 3154 tree op = TREE_OPERAND (expr, 0);
3265 3155
3266 if (TREE_CODE (expr) == ARRAY_REF 3156 if (TREE_CODE (expr) == ARRAY_REF
3267 || TREE_CODE (expr) == ARRAY_RANGE_REF) 3157 || TREE_CODE (expr) == ARRAY_RANGE_REF)
3268 { 3158 {
3296 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr))); 3186 debug_generic_stmt (TREE_TYPE (TREE_TYPE (expr)));
3297 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op))); 3187 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
3298 return true; 3188 return true;
3299 } 3189 }
3300 3190
3301 if ((TREE_CODE (expr) == REALPART_EXPR 3191 if (TREE_CODE (expr) == COMPONENT_REF)
3302 || TREE_CODE (expr) == IMAGPART_EXPR) 3192 {
3303 && !useless_type_conversion_p (TREE_TYPE (expr), 3193 if (TREE_OPERAND (expr, 2)
3304 TREE_TYPE (TREE_TYPE (op)))) 3194 && !is_gimple_val (TREE_OPERAND (expr, 2)))
3305 { 3195 {
3306 error ("type mismatch in real/imagpart reference"); 3196 error ("invalid COMPONENT_REF offset operator");
3307 debug_generic_stmt (TREE_TYPE (expr)); 3197 return true;
3308 debug_generic_stmt (TREE_TYPE (TREE_TYPE (op))); 3198 }
3309 return true; 3199 if (!useless_type_conversion_p (TREE_TYPE (expr),
3310 } 3200 TREE_TYPE (TREE_OPERAND (expr, 1))))
3311 3201 {
3312 if (TREE_CODE (expr) == COMPONENT_REF 3202 error ("type mismatch in component reference");
3313 && !useless_type_conversion_p (TREE_TYPE (expr), 3203 debug_generic_stmt (TREE_TYPE (expr));
3314 TREE_TYPE (TREE_OPERAND (expr, 1)))) 3204 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3315 { 3205 return true;
3316 error ("type mismatch in component reference"); 3206 }
3317 debug_generic_stmt (TREE_TYPE (expr));
3318 debug_generic_stmt (TREE_TYPE (TREE_OPERAND (expr, 1)));
3319 return true;
3320 } 3207 }
3321 3208
3322 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR) 3209 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
3323 { 3210 {
3324 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check 3211 /* For VIEW_CONVERT_EXPRs which are allowed here too, we only check
3348 expr = op; 3235 expr = op;
3349 } 3236 }
3350 3237
3351 if (TREE_CODE (expr) == MEM_REF) 3238 if (TREE_CODE (expr) == MEM_REF)
3352 { 3239 {
3353 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))) 3240 if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0))
3241 || (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
3242 && verify_address (TREE_OPERAND (expr, 0), false)))
3354 { 3243 {
3355 error ("invalid address operand in MEM_REF"); 3244 error ("invalid address operand in MEM_REF");
3356 debug_generic_stmt (expr); 3245 debug_generic_stmt (expr);
3357 return true; 3246 return true;
3358 } 3247 }
3359 if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST 3248 if (!poly_int_tree_p (TREE_OPERAND (expr, 1))
3360 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1)))) 3249 || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
3361 { 3250 {
3362 error ("invalid offset operand in MEM_REF"); 3251 error ("invalid offset operand in MEM_REF");
3363 debug_generic_stmt (expr); 3252 debug_generic_stmt (expr);
3364 return true; 3253 return true;
3365 } 3254 }
3366 } 3255 }
3367 else if (TREE_CODE (expr) == TARGET_MEM_REF) 3256 else if (TREE_CODE (expr) == TARGET_MEM_REF)
3368 { 3257 {
3369 if (!TMR_BASE (expr) 3258 if (!TMR_BASE (expr)
3370 || !is_gimple_mem_ref_addr (TMR_BASE (expr))) 3259 || !is_gimple_mem_ref_addr (TMR_BASE (expr))
3260 || (TREE_CODE (TMR_BASE (expr)) == ADDR_EXPR
3261 && verify_address (TMR_BASE (expr), false)))
3371 { 3262 {
3372 error ("invalid address operand in TARGET_MEM_REF"); 3263 error ("invalid address operand in TARGET_MEM_REF");
3373 return true; 3264 return true;
3374 } 3265 }
3375 if (!TMR_OFFSET (expr) 3266 if (!TMR_OFFSET (expr)
3376 || TREE_CODE (TMR_OFFSET (expr)) != INTEGER_CST 3267 || !poly_int_tree_p (TMR_OFFSET (expr))
3377 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr)))) 3268 || !POINTER_TYPE_P (TREE_TYPE (TMR_OFFSET (expr))))
3378 { 3269 {
3379 error ("invalid offset operand in TARGET_MEM_REF"); 3270 error ("invalid offset operand in TARGET_MEM_REF");
3380 debug_generic_stmt (expr); 3271 debug_generic_stmt (expr);
3381 return true; 3272 return true;
3382 } 3273 }
3274 }
3275 else if (TREE_CODE (expr) == INDIRECT_REF)
3276 {
3277 error ("INDIRECT_REF in gimple IL");
3278 debug_generic_stmt (expr);
3279 return true;
3383 } 3280 }
3384 3281
3385 return ((require_lvalue || !is_gimple_min_invariant (expr)) 3282 return ((require_lvalue || !is_gimple_min_invariant (expr))
3386 && verify_types_in_gimple_min_lval (expr)); 3283 && verify_types_in_gimple_min_lval (expr));
3387 } 3284 }
3525 { 3422 {
3526 error ("static chain with function that doesn%'t use one"); 3423 error ("static chain with function that doesn%'t use one");
3527 return true; 3424 return true;
3528 } 3425 }
3529 3426
3530 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) 3427 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3531 { 3428 {
3532 switch (DECL_FUNCTION_CODE (fndecl)) 3429 switch (DECL_FUNCTION_CODE (fndecl))
3533 { 3430 {
3534 case BUILT_IN_UNREACHABLE: 3431 case BUILT_IN_UNREACHABLE:
3535 case BUILT_IN_TRAP: 3432 case BUILT_IN_TRAP:
3636 debug_generic_expr (op0_type); 3533 debug_generic_expr (op0_type);
3637 debug_generic_expr (op1_type); 3534 debug_generic_expr (op1_type);
3638 return true; 3535 return true;
3639 } 3536 }
3640 3537
3641 if (TYPE_VECTOR_SUBPARTS (type) != TYPE_VECTOR_SUBPARTS (op0_type)) 3538 if (maybe_ne (TYPE_VECTOR_SUBPARTS (type),
3539 TYPE_VECTOR_SUBPARTS (op0_type)))
3642 { 3540 {
3643 error ("invalid vector comparison resulting type"); 3541 error ("invalid vector comparison resulting type");
3644 debug_generic_expr (type); 3542 debug_generic_expr (type);
3645 return true; 3543 return true;
3646 } 3544 }
3691 if ((POINTER_TYPE_P (lhs_type) 3589 if ((POINTER_TYPE_P (lhs_type)
3692 && INTEGRAL_TYPE_P (rhs1_type)) 3590 && INTEGRAL_TYPE_P (rhs1_type))
3693 || (POINTER_TYPE_P (rhs1_type) 3591 || (POINTER_TYPE_P (rhs1_type)
3694 && INTEGRAL_TYPE_P (lhs_type) 3592 && INTEGRAL_TYPE_P (lhs_type)
3695 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type) 3593 && (TYPE_PRECISION (rhs1_type) >= TYPE_PRECISION (lhs_type)
3696 || ptrofftype_p (sizetype)))) 3594 || ptrofftype_p (lhs_type))))
3697 return false; 3595 return false;
3698 3596
3699 /* Allow conversion from integral to offset type and vice versa. */ 3597 /* Allow conversion from integral to offset type and vice versa. */
3700 if ((TREE_CODE (lhs_type) == OFFSET_TYPE 3598 if ((TREE_CODE (lhs_type) == OFFSET_TYPE
3701 && INTEGRAL_TYPE_P (rhs1_type)) 3599 && INTEGRAL_TYPE_P (rhs1_type))
3772 return true; 3670 return true;
3773 } 3671 }
3774 3672
3775 return false; 3673 return false;
3776 } 3674 }
3777 case REDUC_MAX_EXPR:
3778 case REDUC_MIN_EXPR:
3779 case REDUC_PLUS_EXPR:
3780 if (!VECTOR_TYPE_P (rhs1_type)
3781 || !useless_type_conversion_p (lhs_type, TREE_TYPE (rhs1_type)))
3782 {
3783 error ("reduction should convert from vector to element type");
3784 debug_generic_expr (lhs_type);
3785 debug_generic_expr (rhs1_type);
3786 return true;
3787 }
3788 return false;
3789 3675
3790 case VEC_UNPACK_HI_EXPR: 3676 case VEC_UNPACK_HI_EXPR:
3791 case VEC_UNPACK_LO_EXPR: 3677 case VEC_UNPACK_LO_EXPR:
3792 case VEC_UNPACK_FLOAT_HI_EXPR: 3678 case VEC_UNPACK_FLOAT_HI_EXPR:
3793 case VEC_UNPACK_FLOAT_LO_EXPR: 3679 case VEC_UNPACK_FLOAT_LO_EXPR:
3794 /* FIXME. */ 3680 case VEC_UNPACK_FIX_TRUNC_HI_EXPR:
3681 case VEC_UNPACK_FIX_TRUNC_LO_EXPR:
3682 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
3683 || TREE_CODE (lhs_type) != VECTOR_TYPE
3684 || (!INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3685 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type)))
3686 || (!INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3687 && !SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)))
3688 || ((rhs_code == VEC_UNPACK_HI_EXPR
3689 || rhs_code == VEC_UNPACK_LO_EXPR)
3690 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3691 != INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))))
3692 || ((rhs_code == VEC_UNPACK_FLOAT_HI_EXPR
3693 || rhs_code == VEC_UNPACK_FLOAT_LO_EXPR)
3694 && (INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))
3695 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))))
3696 || ((rhs_code == VEC_UNPACK_FIX_TRUNC_HI_EXPR
3697 || rhs_code == VEC_UNPACK_FIX_TRUNC_LO_EXPR)
3698 && (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
3699 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))))
3700 || (maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
3701 2 * GET_MODE_SIZE (element_mode (rhs1_type)))
3702 && (!VECTOR_BOOLEAN_TYPE_P (lhs_type)
3703 || !VECTOR_BOOLEAN_TYPE_P (rhs1_type)))
3704 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (lhs_type),
3705 TYPE_VECTOR_SUBPARTS (rhs1_type)))
3706 {
3707 error ("type mismatch in vector unpack expression");
3708 debug_generic_expr (lhs_type);
3709 debug_generic_expr (rhs1_type);
3710 return true;
3711 }
3712
3795 return false; 3713 return false;
3796 3714
3797 case NEGATE_EXPR: 3715 case NEGATE_EXPR:
3798 case ABS_EXPR: 3716 case ABS_EXPR:
3799 case BIT_NOT_EXPR: 3717 case BIT_NOT_EXPR:
3800 case PAREN_EXPR: 3718 case PAREN_EXPR:
3801 case CONJ_EXPR: 3719 case CONJ_EXPR:
3802 break; 3720 break;
3721
3722 case ABSU_EXPR:
3723 if (!ANY_INTEGRAL_TYPE_P (lhs_type)
3724 || !TYPE_UNSIGNED (lhs_type)
3725 || !ANY_INTEGRAL_TYPE_P (rhs1_type)
3726 || TYPE_UNSIGNED (rhs1_type)
3727 || element_precision (lhs_type) != element_precision (rhs1_type))
3728 {
3729 error ("invalid types for ABSU_EXPR");
3730 debug_generic_expr (lhs_type);
3731 debug_generic_expr (rhs1_type);
3732 return true;
3733 }
3734 return false;
3735
3736 case VEC_DUPLICATE_EXPR:
3737 if (TREE_CODE (lhs_type) != VECTOR_TYPE
3738 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
3739 {
3740 error ("vec_duplicate should be from a scalar to a like vector");
3741 debug_generic_expr (lhs_type);
3742 debug_generic_expr (rhs1_type);
3743 return true;
3744 }
3745 return false;
3803 3746
3804 default: 3747 default:
3805 gcc_unreachable (); 3748 gcc_unreachable ();
3806 } 3749 }
3807 3750
3976 } 3919 }
3977 3920
3978 return false; 3921 return false;
3979 } 3922 }
3980 3923
3924 case POINTER_DIFF_EXPR:
3925 {
3926 if (!POINTER_TYPE_P (rhs1_type)
3927 || !POINTER_TYPE_P (rhs2_type)
3928 /* Because we special-case pointers to void we allow difference
3929 of arbitrary pointers with the same mode. */
3930 || TYPE_MODE (rhs1_type) != TYPE_MODE (rhs2_type)
3931 || TREE_CODE (lhs_type) != INTEGER_TYPE
3932 || TYPE_UNSIGNED (lhs_type)
3933 || TYPE_PRECISION (lhs_type) != TYPE_PRECISION (rhs1_type))
3934 {
3935 error ("type mismatch in pointer diff expression");
3936 debug_generic_stmt (lhs_type);
3937 debug_generic_stmt (rhs1_type);
3938 debug_generic_stmt (rhs2_type);
3939 return true;
3940 }
3941
3942 return false;
3943 }
3944
3981 case TRUTH_ANDIF_EXPR: 3945 case TRUTH_ANDIF_EXPR:
3982 case TRUTH_ORIF_EXPR: 3946 case TRUTH_ORIF_EXPR:
3983 case TRUTH_AND_EXPR: 3947 case TRUTH_AND_EXPR:
3984 case TRUTH_OR_EXPR: 3948 case TRUTH_OR_EXPR:
3985 case TRUTH_XOR_EXPR: 3949 case TRUTH_XOR_EXPR:
4017 && ((!INTEGRAL_TYPE_P (rhs1_type) 3981 && ((!INTEGRAL_TYPE_P (rhs1_type)
4018 && !SCALAR_FLOAT_TYPE_P (rhs1_type)) 3982 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4019 || (!INTEGRAL_TYPE_P (lhs_type) 3983 || (!INTEGRAL_TYPE_P (lhs_type)
4020 && !SCALAR_FLOAT_TYPE_P (lhs_type)))) 3984 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4021 || !useless_type_conversion_p (lhs_type, rhs2_type) 3985 || !useless_type_conversion_p (lhs_type, rhs2_type)
4022 || (GET_MODE_SIZE (element_mode (rhs2_type)) 3986 || maybe_lt (GET_MODE_SIZE (element_mode (rhs2_type)),
4023 < 2 * GET_MODE_SIZE (element_mode (rhs1_type)))) 3987 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4024 { 3988 {
4025 error ("type mismatch in widening sum reduction"); 3989 error ("type mismatch in widening sum reduction");
4026 debug_generic_expr (lhs_type); 3990 debug_generic_expr (lhs_type);
4027 debug_generic_expr (rhs1_type); 3991 debug_generic_expr (rhs1_type);
4028 debug_generic_expr (rhs2_type); 3992 debug_generic_expr (rhs2_type);
4037 case VEC_WIDEN_MULT_ODD_EXPR: 4001 case VEC_WIDEN_MULT_ODD_EXPR:
4038 { 4002 {
4039 if (TREE_CODE (rhs1_type) != VECTOR_TYPE 4003 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4040 || TREE_CODE (lhs_type) != VECTOR_TYPE 4004 || TREE_CODE (lhs_type) != VECTOR_TYPE
4041 || !types_compatible_p (rhs1_type, rhs2_type) 4005 || !types_compatible_p (rhs1_type, rhs2_type)
4042 || (GET_MODE_SIZE (element_mode (lhs_type)) 4006 || maybe_ne (GET_MODE_SIZE (element_mode (lhs_type)),
4043 != 2 * GET_MODE_SIZE (element_mode (rhs1_type)))) 4007 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4044 { 4008 {
4045 error ("type mismatch in vector widening multiplication"); 4009 error ("type mismatch in vector widening multiplication");
4046 debug_generic_expr (lhs_type); 4010 debug_generic_expr (lhs_type);
4047 debug_generic_expr (rhs1_type); 4011 debug_generic_expr (rhs1_type);
4048 debug_generic_expr (rhs2_type); 4012 debug_generic_expr (rhs2_type);
4055 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat 4019 /* ??? We currently use VEC_PACK_TRUNC_EXPR to simply concat
4056 vector boolean types. */ 4020 vector boolean types. */
4057 if (VECTOR_BOOLEAN_TYPE_P (lhs_type) 4021 if (VECTOR_BOOLEAN_TYPE_P (lhs_type)
4058 && VECTOR_BOOLEAN_TYPE_P (rhs1_type) 4022 && VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4059 && types_compatible_p (rhs1_type, rhs2_type) 4023 && types_compatible_p (rhs1_type, rhs2_type)
4060 && (TYPE_VECTOR_SUBPARTS (lhs_type) 4024 && known_eq (TYPE_VECTOR_SUBPARTS (lhs_type),
4061 == 2 * TYPE_VECTOR_SUBPARTS (rhs1_type))) 4025 2 * TYPE_VECTOR_SUBPARTS (rhs1_type)))
4062 return false; 4026 return false;
4063 4027
4064 /* Fallthru. */ 4028 /* Fallthru. */
4065 case VEC_PACK_SAT_EXPR: 4029 case VEC_PACK_SAT_EXPR:
4066 case VEC_PACK_FIX_TRUNC_EXPR: 4030 case VEC_PACK_FIX_TRUNC_EXPR:
4071 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type)) 4035 && SCALAR_FLOAT_TYPE_P (TREE_TYPE (rhs1_type))
4072 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))) 4036 && INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))
4073 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type)) 4037 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4074 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type)))) 4038 == INTEGRAL_TYPE_P (TREE_TYPE (lhs_type))))
4075 || !types_compatible_p (rhs1_type, rhs2_type) 4039 || !types_compatible_p (rhs1_type, rhs2_type)
4076 || (GET_MODE_SIZE (element_mode (rhs1_type)) 4040 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4077 != 2 * GET_MODE_SIZE (element_mode (lhs_type)))) 4041 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4042 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4043 TYPE_VECTOR_SUBPARTS (lhs_type)))
4078 { 4044 {
4079 error ("type mismatch in vector pack expression"); 4045 error ("type mismatch in vector pack expression");
4080 debug_generic_expr (lhs_type); 4046 debug_generic_expr (lhs_type);
4081 debug_generic_expr (rhs1_type); 4047 debug_generic_expr (rhs1_type);
4082 debug_generic_expr (rhs2_type); 4048 debug_generic_expr (rhs2_type);
4083 return true; 4049 return true;
4084 } 4050 }
4085 4051
4086 return false; 4052 return false;
4087 } 4053 }
4054
4055 case VEC_PACK_FLOAT_EXPR:
4056 if (TREE_CODE (rhs1_type) != VECTOR_TYPE
4057 || TREE_CODE (lhs_type) != VECTOR_TYPE
4058 || !INTEGRAL_TYPE_P (TREE_TYPE (rhs1_type))
4059 || !SCALAR_FLOAT_TYPE_P (TREE_TYPE (lhs_type))
4060 || !types_compatible_p (rhs1_type, rhs2_type)
4061 || maybe_ne (GET_MODE_SIZE (element_mode (rhs1_type)),
4062 2 * GET_MODE_SIZE (element_mode (lhs_type)))
4063 || maybe_ne (2 * TYPE_VECTOR_SUBPARTS (rhs1_type),
4064 TYPE_VECTOR_SUBPARTS (lhs_type)))
4065 {
4066 error ("type mismatch in vector pack expression");
4067 debug_generic_expr (lhs_type);
4068 debug_generic_expr (rhs1_type);
4069 debug_generic_expr (rhs2_type);
4070 return true;
4071 }
4072
4073 return false;
4088 4074
4089 case MULT_EXPR: 4075 case MULT_EXPR:
4090 case MULT_HIGHPART_EXPR: 4076 case MULT_HIGHPART_EXPR:
4091 case TRUNC_DIV_EXPR: 4077 case TRUNC_DIV_EXPR:
4092 case CEIL_DIV_EXPR: 4078 case CEIL_DIV_EXPR:
4104 case BIT_XOR_EXPR: 4090 case BIT_XOR_EXPR:
4105 case BIT_AND_EXPR: 4091 case BIT_AND_EXPR:
4106 /* Continue with generic binary expression handling. */ 4092 /* Continue with generic binary expression handling. */
4107 break; 4093 break;
4108 4094
4095 case VEC_SERIES_EXPR:
4096 if (!useless_type_conversion_p (rhs1_type, rhs2_type))
4097 {
4098 error ("type mismatch in series expression");
4099 debug_generic_expr (rhs1_type);
4100 debug_generic_expr (rhs2_type);
4101 return true;
4102 }
4103 if (TREE_CODE (lhs_type) != VECTOR_TYPE
4104 || !useless_type_conversion_p (TREE_TYPE (lhs_type), rhs1_type))
4105 {
4106 error ("vector type expected in series expression");
4107 debug_generic_expr (lhs_type);
4108 return true;
4109 }
4110 return false;
4111
4109 default: 4112 default:
4110 gcc_unreachable (); 4113 gcc_unreachable ();
4111 } 4114 }
4112 4115
4113 if (!useless_type_conversion_p (lhs_type, rhs1_type) 4116 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4173 debug_generic_expr (rhs3_type); 4176 debug_generic_expr (rhs3_type);
4174 return true; 4177 return true;
4175 } 4178 }
4176 break; 4179 break;
4177 4180
4178 case FMA_EXPR:
4179 if (!useless_type_conversion_p (lhs_type, rhs1_type)
4180 || !useless_type_conversion_p (lhs_type, rhs2_type)
4181 || !useless_type_conversion_p (lhs_type, rhs3_type))
4182 {
4183 error ("type mismatch in fused multiply-add expression");
4184 debug_generic_expr (lhs_type);
4185 debug_generic_expr (rhs1_type);
4186 debug_generic_expr (rhs2_type);
4187 debug_generic_expr (rhs3_type);
4188 return true;
4189 }
4190 break;
4191
4192 case VEC_COND_EXPR: 4181 case VEC_COND_EXPR:
4193 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type) 4182 if (!VECTOR_BOOLEAN_TYPE_P (rhs1_type)
4194 || TYPE_VECTOR_SUBPARTS (rhs1_type) 4183 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4195 != TYPE_VECTOR_SUBPARTS (lhs_type)) 4184 TYPE_VECTOR_SUBPARTS (lhs_type)))
4196 { 4185 {
4197 error ("the first argument of a VEC_COND_EXPR must be of a " 4186 error ("the first argument of a VEC_COND_EXPR must be of a "
4198 "boolean vector type of the same number of elements " 4187 "boolean vector type of the same number of elements "
4199 "as the result"); 4188 "as the result");
4200 debug_generic_expr (lhs_type); 4189 debug_generic_expr (lhs_type);
4201 debug_generic_expr (rhs1_type); 4190 debug_generic_expr (rhs1_type);
4202 return true; 4191 return true;
4203 } 4192 }
4204 /* Fallthrough. */ 4193 /* Fallthrough. */
4205 case COND_EXPR: 4194 case COND_EXPR:
4195 if (!is_gimple_val (rhs1)
4196 && verify_gimple_comparison (TREE_TYPE (rhs1),
4197 TREE_OPERAND (rhs1, 0),
4198 TREE_OPERAND (rhs1, 1),
4199 TREE_CODE (rhs1)))
4200 return true;
4206 if (!useless_type_conversion_p (lhs_type, rhs2_type) 4201 if (!useless_type_conversion_p (lhs_type, rhs2_type)
4207 || !useless_type_conversion_p (lhs_type, rhs3_type)) 4202 || !useless_type_conversion_p (lhs_type, rhs3_type))
4208 { 4203 {
4209 error ("type mismatch in conditional expression"); 4204 error ("type mismatch in conditional expression");
4210 debug_generic_expr (lhs_type); 4205 debug_generic_expr (lhs_type);
4236 debug_generic_expr (rhs2_type); 4231 debug_generic_expr (rhs2_type);
4237 debug_generic_expr (rhs3_type); 4232 debug_generic_expr (rhs3_type);
4238 return true; 4233 return true;
4239 } 4234 }
4240 4235
4241 if (TYPE_VECTOR_SUBPARTS (rhs1_type) != TYPE_VECTOR_SUBPARTS (rhs2_type) 4236 if (maybe_ne (TYPE_VECTOR_SUBPARTS (rhs1_type),
4242 || TYPE_VECTOR_SUBPARTS (rhs2_type) 4237 TYPE_VECTOR_SUBPARTS (rhs2_type))
4243 != TYPE_VECTOR_SUBPARTS (rhs3_type) 4238 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs2_type),
4244 || TYPE_VECTOR_SUBPARTS (rhs3_type) 4239 TYPE_VECTOR_SUBPARTS (rhs3_type))
4245 != TYPE_VECTOR_SUBPARTS (lhs_type)) 4240 || maybe_ne (TYPE_VECTOR_SUBPARTS (rhs3_type),
4241 TYPE_VECTOR_SUBPARTS (lhs_type)))
4246 { 4242 {
4247 error ("vectors with different element number found " 4243 error ("vectors with different element number found "
4248 "in vector permute expression"); 4244 "in vector permute expression");
4249 debug_generic_expr (lhs_type); 4245 debug_generic_expr (lhs_type);
4250 debug_generic_expr (rhs1_type); 4246 debug_generic_expr (rhs1_type);
4252 debug_generic_expr (rhs3_type); 4248 debug_generic_expr (rhs3_type);
4253 return true; 4249 return true;
4254 } 4250 }
4255 4251
4256 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE 4252 if (TREE_CODE (TREE_TYPE (rhs3_type)) != INTEGER_TYPE
4257 || GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (rhs3_type))) 4253 || (TREE_CODE (rhs3) != VECTOR_CST
4258 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (rhs1_type)))) 4254 && (GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE
4255 (TREE_TYPE (rhs3_type)))
4256 != GET_MODE_BITSIZE (SCALAR_TYPE_MODE
4257 (TREE_TYPE (rhs1_type))))))
4259 { 4258 {
4260 error ("invalid mask type in vector permute expression"); 4259 error ("invalid mask type in vector permute expression");
4261 debug_generic_expr (lhs_type); 4260 debug_generic_expr (lhs_type);
4262 debug_generic_expr (rhs1_type); 4261 debug_generic_expr (rhs1_type);
4263 debug_generic_expr (rhs2_type); 4262 debug_generic_expr (rhs2_type);
4351 && !SCALAR_FLOAT_TYPE_P (rhs1_type)) 4350 && !SCALAR_FLOAT_TYPE_P (rhs1_type))
4352 || (!INTEGRAL_TYPE_P (lhs_type) 4351 || (!INTEGRAL_TYPE_P (lhs_type)
4353 && !SCALAR_FLOAT_TYPE_P (lhs_type)))) 4352 && !SCALAR_FLOAT_TYPE_P (lhs_type))))
4354 || !types_compatible_p (rhs1_type, rhs2_type) 4353 || !types_compatible_p (rhs1_type, rhs2_type)
4355 || !useless_type_conversion_p (lhs_type, rhs3_type) 4354 || !useless_type_conversion_p (lhs_type, rhs3_type)
4356 || (GET_MODE_SIZE (element_mode (rhs3_type)) 4355 || maybe_lt (GET_MODE_SIZE (element_mode (rhs3_type)),
4357 < 2 * GET_MODE_SIZE (element_mode (rhs1_type)))) 4356 2 * GET_MODE_SIZE (element_mode (rhs1_type))))
4358 { 4357 {
4359 error ("type mismatch in dot product reduction"); 4358 error ("type mismatch in dot product reduction");
4360 debug_generic_expr (lhs_type); 4359 debug_generic_expr (lhs_type);
4361 debug_generic_expr (rhs1_type); 4360 debug_generic_expr (rhs1_type);
4362 debug_generic_expr (rhs2_type); 4361 debug_generic_expr (rhs2_type);
4435 debug_generic_stmt (TREE_TYPE (rhs1)); 4434 debug_generic_stmt (TREE_TYPE (rhs1));
4436 debug_generic_stmt (TREE_TYPE (op)); 4435 debug_generic_stmt (TREE_TYPE (op));
4437 return true; 4436 return true;
4438 } 4437 }
4439 4438
4440 return verify_types_in_gimple_reference (op, true); 4439 return (verify_address (rhs1, true)
4440 || verify_types_in_gimple_reference (op, true));
4441 } 4441 }
4442 4442
4443 /* tcc_reference */ 4443 /* tcc_reference */
4444 case INDIRECT_REF: 4444 case INDIRECT_REF:
4445 error ("INDIRECT_REF in gimple IL"); 4445 error ("INDIRECT_REF in gimple IL");
4520 error ("incorrect type of vector CONSTRUCTOR" 4520 error ("incorrect type of vector CONSTRUCTOR"
4521 " elements"); 4521 " elements");
4522 debug_generic_stmt (rhs1); 4522 debug_generic_stmt (rhs1);
4523 return true; 4523 return true;
4524 } 4524 }
4525 else if (CONSTRUCTOR_NELTS (rhs1) 4525 else if (maybe_ne (CONSTRUCTOR_NELTS (rhs1)
4526 * TYPE_VECTOR_SUBPARTS (elt_t) 4526 * TYPE_VECTOR_SUBPARTS (elt_t),
4527 != TYPE_VECTOR_SUBPARTS (rhs1_type)) 4527 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4528 { 4528 {
4529 error ("incorrect number of vector CONSTRUCTOR" 4529 error ("incorrect number of vector CONSTRUCTOR"
4530 " elements"); 4530 " elements");
4531 debug_generic_stmt (rhs1); 4531 debug_generic_stmt (rhs1);
4532 return true; 4532 return true;
4537 { 4537 {
4538 error ("incorrect type of vector CONSTRUCTOR elements"); 4538 error ("incorrect type of vector CONSTRUCTOR elements");
4539 debug_generic_stmt (rhs1); 4539 debug_generic_stmt (rhs1);
4540 return true; 4540 return true;
4541 } 4541 }
4542 else if (CONSTRUCTOR_NELTS (rhs1) 4542 else if (maybe_gt (CONSTRUCTOR_NELTS (rhs1),
4543 > TYPE_VECTOR_SUBPARTS (rhs1_type)) 4543 TYPE_VECTOR_SUBPARTS (rhs1_type)))
4544 { 4544 {
4545 error ("incorrect number of vector CONSTRUCTOR elements"); 4545 error ("incorrect number of vector CONSTRUCTOR elements");
4546 debug_generic_stmt (rhs1); 4546 debug_generic_stmt (rhs1);
4547 return true; 4547 return true;
4548 } 4548 }
4575 error ("non-vector CONSTRUCTOR with elements"); 4575 error ("non-vector CONSTRUCTOR with elements");
4576 debug_generic_stmt (rhs1); 4576 debug_generic_stmt (rhs1);
4577 return true; 4577 return true;
4578 } 4578 }
4579 return res; 4579 return res;
4580
4581 case ASSERT_EXPR:
4582 /* FIXME. */
4583 rhs1 = fold (ASSERT_EXPR_COND (rhs1));
4584 if (rhs1 == boolean_false_node)
4585 {
4586 error ("ASSERT_EXPR with an always-false condition");
4587 debug_generic_stmt (rhs1);
4588 return true;
4589 }
4590 break;
4591
4580 case OBJ_TYPE_REF: 4592 case OBJ_TYPE_REF:
4581 case ASSERT_EXPR:
4582 case WITH_SIZE_EXPR: 4593 case WITH_SIZE_EXPR:
4583 /* FIXME. */ 4594 /* FIXME. */
4584 return res; 4595 return res;
4585 4596
4586 default:; 4597 default:;
4701 debug_generic_expr (index_type); 4712 debug_generic_expr (index_type);
4702 return true; 4713 return true;
4703 } 4714 }
4704 4715
4705 elt = gimple_switch_label (stmt, 0); 4716 elt = gimple_switch_label (stmt, 0);
4706 if (CASE_LOW (elt) != NULL_TREE || CASE_HIGH (elt) != NULL_TREE) 4717 if (CASE_LOW (elt) != NULL_TREE
4718 || CASE_HIGH (elt) != NULL_TREE
4719 || CASE_CHAIN (elt) != NULL_TREE)
4707 { 4720 {
4708 error ("invalid default case label in switch statement"); 4721 error ("invalid default case label in switch statement");
4709 debug_generic_expr (elt); 4722 debug_generic_expr (elt);
4710 return true; 4723 return true;
4711 } 4724 }
4713 n = gimple_switch_num_labels (stmt); 4726 n = gimple_switch_num_labels (stmt);
4714 for (i = 1; i < n; i++) 4727 for (i = 1; i < n; i++)
4715 { 4728 {
4716 elt = gimple_switch_label (stmt, i); 4729 elt = gimple_switch_label (stmt, i);
4717 4730
4731 if (CASE_CHAIN (elt))
4732 {
4733 error ("invalid CASE_CHAIN");
4734 debug_generic_expr (elt);
4735 return true;
4736 }
4718 if (! CASE_LOW (elt)) 4737 if (! CASE_LOW (elt))
4719 { 4738 {
4720 error ("invalid case label in switch statement"); 4739 error ("invalid case label in switch statement");
4721 debug_generic_expr (elt); 4740 debug_generic_expr (elt);
4722 return true; 4741 return true;
4911 4930
4912 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem, 4931 /* Verify the contents of a GIMPLE_PHI. Returns true if there is a problem,
4913 and false otherwise. */ 4932 and false otherwise. */
4914 4933
4915 static bool 4934 static bool
4916 verify_gimple_phi (gimple *phi) 4935 verify_gimple_phi (gphi *phi)
4917 { 4936 {
4918 bool err = false; 4937 bool err = false;
4919 unsigned i; 4938 unsigned i;
4920 tree phi_result = gimple_phi_result (phi); 4939 tree phi_result = gimple_phi_result (phi);
4921 bool virtual_p; 4940 bool virtual_p;
5068 5087
5069 static bool 5088 static bool
5070 tree_node_can_be_shared (tree t) 5089 tree_node_can_be_shared (tree t)
5071 { 5090 {
5072 if (IS_TYPE_OR_DECL_P (t) 5091 if (IS_TYPE_OR_DECL_P (t)
5073 || is_gimple_min_invariant (t)
5074 || TREE_CODE (t) == SSA_NAME 5092 || TREE_CODE (t) == SSA_NAME
5075 || t == error_mark_node 5093 || TREE_CODE (t) == IDENTIFIER_NODE
5076 || TREE_CODE (t) == IDENTIFIER_NODE) 5094 || TREE_CODE (t) == CASE_LABEL_EXPR
5095 || is_gimple_min_invariant (t))
5077 return true; 5096 return true;
5078 5097
5079 if (TREE_CODE (t) == CASE_LABEL_EXPR) 5098 if (t == error_mark_node)
5080 return true;
5081
5082 if (DECL_P (t))
5083 return true; 5099 return true;
5084 5100
5085 return false; 5101 return false;
5086 } 5102 }
5087 5103
5166 5182
5167 static tree 5183 static tree
5168 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data) 5184 verify_expr_location_1 (tree *tp, int *walk_subtrees, void *data)
5169 { 5185 {
5170 hash_set<tree> *blocks = (hash_set<tree> *) data; 5186 hash_set<tree> *blocks = (hash_set<tree> *) data;
5171 5187 tree t = *tp;
5172 if (VAR_P (*tp) && DECL_HAS_DEBUG_EXPR_P (*tp)) 5188
5173 { 5189 /* ??? This doesn't really belong here but there's no good place to
5174 tree t = DECL_DEBUG_EXPR (*tp); 5190 stick this remainder of old verify_expr. */
5175 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL); 5191 /* ??? This barfs on debug stmts which contain binds to vars with
5192 different function context. */
5193 #if 0
5194 if (VAR_P (t)
5195 || TREE_CODE (t) == PARM_DECL
5196 || TREE_CODE (t) == RESULT_DECL)
5197 {
5198 tree context = decl_function_context (t);
5199 if (context != cfun->decl
5200 && !SCOPE_FILE_SCOPE_P (context)
5201 && !TREE_STATIC (t)
5202 && !DECL_EXTERNAL (t))
5203 {
5204 error ("local declaration from a different function");
5205 return t;
5206 }
5207 }
5208 #endif
5209
5210 if (VAR_P (t) && DECL_HAS_DEBUG_EXPR_P (t))
5211 {
5212 tree x = DECL_DEBUG_EXPR (t);
5213 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5176 if (addr) 5214 if (addr)
5177 return addr; 5215 return addr;
5178 } 5216 }
5179 if ((VAR_P (*tp) 5217 if ((VAR_P (t)
5180 || TREE_CODE (*tp) == PARM_DECL 5218 || TREE_CODE (t) == PARM_DECL
5181 || TREE_CODE (*tp) == RESULT_DECL) 5219 || TREE_CODE (t) == RESULT_DECL)
5182 && DECL_HAS_VALUE_EXPR_P (*tp)) 5220 && DECL_HAS_VALUE_EXPR_P (t))
5183 { 5221 {
5184 tree t = DECL_VALUE_EXPR (*tp); 5222 tree x = DECL_VALUE_EXPR (t);
5185 tree addr = walk_tree (&t, verify_expr_no_block, NULL, NULL); 5223 tree addr = walk_tree (&x, verify_expr_no_block, NULL, NULL);
5186 if (addr) 5224 if (addr)
5187 return addr; 5225 return addr;
5188 } 5226 }
5189 5227
5190 if (!EXPR_P (*tp)) 5228 if (!EXPR_P (t))
5191 { 5229 {
5192 *walk_subtrees = false; 5230 *walk_subtrees = false;
5193 return NULL; 5231 return NULL;
5194 } 5232 }
5195 5233
5196 location_t loc = EXPR_LOCATION (*tp); 5234 location_t loc = EXPR_LOCATION (t);
5197 if (verify_location (blocks, loc)) 5235 if (verify_location (blocks, loc))
5198 return *tp; 5236 return t;
5199 5237
5200 return NULL; 5238 return NULL;
5201 } 5239 }
5202 5240
5203 /* Called via walk_gimple_op. Verify locations of expressions. */ 5241 /* Called via walk_gimple_op. Verify locations of expressions. */
5230 basic_block bb; 5268 basic_block bb;
5231 bool err = false; 5269 bool err = false;
5232 5270
5233 timevar_push (TV_TREE_STMT_VERIFY); 5271 timevar_push (TV_TREE_STMT_VERIFY);
5234 hash_set<void *> visited; 5272 hash_set<void *> visited;
5235 hash_set<gimple *> visited_stmts; 5273 hash_set<gimple *> visited_throwing_stmts;
5236 5274
5237 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */ 5275 /* Collect all BLOCKs referenced by the BLOCK tree of FN. */
5238 hash_set<tree> blocks; 5276 hash_set<tree> blocks;
5239 if (DECL_INITIAL (fn->decl)) 5277 if (DECL_INITIAL (fn->decl))
5240 { 5278 {
5243 } 5281 }
5244 5282
5245 FOR_EACH_BB_FN (bb, fn) 5283 FOR_EACH_BB_FN (bb, fn)
5246 { 5284 {
5247 gimple_stmt_iterator gsi; 5285 gimple_stmt_iterator gsi;
5286 edge_iterator ei;
5287 edge e;
5248 5288
5249 for (gphi_iterator gpi = gsi_start_phis (bb); 5289 for (gphi_iterator gpi = gsi_start_phis (bb);
5250 !gsi_end_p (gpi); 5290 !gsi_end_p (gpi);
5251 gsi_next (&gpi)) 5291 gsi_next (&gpi))
5252 { 5292 {
5253 gphi *phi = gpi.phi (); 5293 gphi *phi = gpi.phi ();
5254 bool err2 = false; 5294 bool err2 = false;
5255 unsigned i; 5295 unsigned i;
5256
5257 visited_stmts.add (phi);
5258 5296
5259 if (gimple_bb (phi) != bb) 5297 if (gimple_bb (phi) != bb)
5260 { 5298 {
5261 error ("gimple_bb (phi) is set to a wrong basic block"); 5299 error ("gimple_bb (phi) is set to a wrong basic block");
5262 err2 = true; 5300 err2 = true;
5309 bool err2 = false; 5347 bool err2 = false;
5310 struct walk_stmt_info wi; 5348 struct walk_stmt_info wi;
5311 tree addr; 5349 tree addr;
5312 int lp_nr; 5350 int lp_nr;
5313 5351
5314 visited_stmts.add (stmt);
5315
5316 if (gimple_bb (stmt) != bb) 5352 if (gimple_bb (stmt) != bb)
5317 { 5353 {
5318 error ("gimple_bb (stmt) is set to a wrong basic block"); 5354 error ("gimple_bb (stmt) is set to a wrong basic block");
5319 err2 = true; 5355 err2 = true;
5320 } 5356 }
5339 { 5375 {
5340 debug_generic_expr (addr); 5376 debug_generic_expr (addr);
5341 err2 |= true; 5377 err2 |= true;
5342 } 5378 }
5343 5379
5344 /* ??? Instead of not checking these stmts at all the walker
5345 should know its context via wi. */
5346 if (!is_gimple_debug (stmt)
5347 && !is_gimple_omp (stmt))
5348 {
5349 memset (&wi, 0, sizeof (wi));
5350 addr = walk_gimple_op (stmt, verify_expr, &wi);
5351 if (addr)
5352 {
5353 debug_generic_expr (addr);
5354 inform (gimple_location (stmt), "in statement");
5355 err2 |= true;
5356 }
5357 }
5358
5359 /* If the statement is marked as part of an EH region, then it is 5380 /* If the statement is marked as part of an EH region, then it is
5360 expected that the statement could throw. Verify that when we 5381 expected that the statement could throw. Verify that when we
5361 have optimizations that simplify statements such that we prove 5382 have optimizations that simplify statements such that we prove
5362 that they cannot throw, that we update other data structures 5383 that they cannot throw, that we update other data structures
5363 to match. */ 5384 to match. */
5364 lp_nr = lookup_stmt_eh_lp (stmt); 5385 lp_nr = lookup_stmt_eh_lp (stmt);
5386 if (lp_nr != 0)
5387 visited_throwing_stmts.add (stmt);
5365 if (lp_nr > 0) 5388 if (lp_nr > 0)
5366 { 5389 {
5367 if (!stmt_could_throw_p (stmt)) 5390 if (!stmt_could_throw_p (cfun, stmt))
5368 { 5391 {
5369 if (verify_nothrow) 5392 if (verify_nothrow)
5370 { 5393 {
5371 error ("statement marked for throw, but doesn%'t"); 5394 error ("statement marked for throw, but doesn%'t");
5372 err2 |= true; 5395 err2 |= true;
5381 5404
5382 if (err2) 5405 if (err2)
5383 debug_gimple_stmt (stmt); 5406 debug_gimple_stmt (stmt);
5384 err |= err2; 5407 err |= err2;
5385 } 5408 }
5386 } 5409
5387 5410 FOR_EACH_EDGE (e, ei, bb->succs)
5411 if (e->goto_locus != UNKNOWN_LOCATION)
5412 err |= verify_location (&blocks, e->goto_locus);
5413 }
5414
5415 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5388 eh_error_found = false; 5416 eh_error_found = false;
5389 hash_map<gimple *, int> *eh_table = get_eh_throw_stmt_table (cfun);
5390 if (eh_table) 5417 if (eh_table)
5391 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node> 5418 eh_table->traverse<hash_set<gimple *> *, verify_eh_throw_stmt_node>
5392 (&visited_stmts); 5419 (&visited_throwing_stmts);
5393 5420
5394 if (err || eh_error_found) 5421 if (err || eh_error_found)
5395 internal_error ("verify_gimple failed"); 5422 internal_error ("verify_gimple failed");
5396 5423
5397 verify_histograms (); 5424 verify_histograms ();
5466 fprintf (stderr, " is not first in a sequence of labels in bb %d", 5493 fprintf (stderr, " is not first in a sequence of labels in bb %d",
5467 bb->index); 5494 bb->index);
5468 err = 1; 5495 err = 1;
5469 } 5496 }
5470 5497
5471 if (label_to_block (label) != bb) 5498 if (label_to_block (cfun, label) != bb)
5472 { 5499 {
5473 error ("label "); 5500 error ("label ");
5474 print_generic_expr (stderr, label); 5501 print_generic_expr (stderr, label);
5475 fprintf (stderr, " to block does not match in bb %d", 5502 fprintf (stderr, " to block does not match in bb %d",
5476 bb->index); 5503 bb->index);
5509 fprintf (stderr, " in the middle of basic block %d", bb->index); 5536 fprintf (stderr, " in the middle of basic block %d", bb->index);
5510 err = 1; 5537 err = 1;
5511 } 5538 }
5512 } 5539 }
5513 5540
5514 gsi = gsi_last_bb (bb); 5541 gsi = gsi_last_nondebug_bb (bb);
5515 if (gsi_end_p (gsi)) 5542 if (gsi_end_p (gsi))
5516 continue; 5543 continue;
5517 5544
5518 stmt = gsi_stmt (gsi); 5545 stmt = gsi_stmt (gsi);
5519 5546
5623 n = gimple_switch_num_labels (switch_stmt); 5650 n = gimple_switch_num_labels (switch_stmt);
5624 5651
5625 /* Mark all the destination basic blocks. */ 5652 /* Mark all the destination basic blocks. */
5626 for (i = 0; i < n; ++i) 5653 for (i = 0; i < n; ++i)
5627 { 5654 {
5628 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i)); 5655 basic_block label_bb = gimple_switch_label_bb (cfun, switch_stmt, i);
5629 basic_block label_bb = label_to_block (lab);
5630 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1); 5656 gcc_assert (!label_bb->aux || label_bb->aux == (void *)1);
5631 label_bb->aux = (void *)1; 5657 label_bb->aux = (void *)1;
5632 } 5658 }
5633 5659
5634 /* Verify that the case labels are sorted. */ 5660 /* Verify that the case labels are sorted. */
5679 } 5705 }
5680 5706
5681 /* Check that we have all of them. */ 5707 /* Check that we have all of them. */
5682 for (i = 0; i < n; ++i) 5708 for (i = 0; i < n; ++i)
5683 { 5709 {
5684 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i)); 5710 basic_block label_bb = gimple_switch_label_bb (cfun,
5685 basic_block label_bb = label_to_block (lab); 5711 switch_stmt, i);
5686 5712
5687 if (label_bb->aux != (void *)2) 5713 if (label_bb->aux != (void *)2)
5688 { 5714 {
5689 error ("missing edge %i->%i", bb->index, label_bb->index); 5715 error ("missing edge %i->%i", bb->index, label_bb->index);
5690 err = 1; 5716 err = 1;
5851 ret = gimple_try_redirect_by_replacing_jump (e, dest); 5877 ret = gimple_try_redirect_by_replacing_jump (e, dest);
5852 if (ret) 5878 if (ret)
5853 return ret; 5879 return ret;
5854 } 5880 }
5855 5881
5856 gsi = gsi_last_bb (bb); 5882 gsi = gsi_last_nondebug_bb (bb);
5857 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi); 5883 stmt = gsi_end_p (gsi) ? NULL : gsi_stmt (gsi);
5858 5884
5859 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK) 5885 switch (stmt ? gimple_code (stmt) : GIMPLE_ERROR_MARK)
5860 { 5886 {
5861 case GIMPLE_COND: 5887 case GIMPLE_COND:
5904 size_t i, n = gimple_switch_num_labels (switch_stmt); 5930 size_t i, n = gimple_switch_num_labels (switch_stmt);
5905 5931
5906 for (i = 0; i < n; i++) 5932 for (i = 0; i < n; i++)
5907 { 5933 {
5908 tree elt = gimple_switch_label (switch_stmt, i); 5934 tree elt = gimple_switch_label (switch_stmt, i);
5909 if (label_to_block (CASE_LABEL (elt)) == e->dest) 5935 if (label_to_block (cfun, CASE_LABEL (elt)) == e->dest)
5910 CASE_LABEL (elt) = label; 5936 CASE_LABEL (elt) = label;
5911 } 5937 }
5912 } 5938 }
5913 } 5939 }
5914 break; 5940 break;
5920 tree label = NULL; 5946 tree label = NULL;
5921 5947
5922 for (i = 0; i < n; ++i) 5948 for (i = 0; i < n; ++i)
5923 { 5949 {
5924 tree cons = gimple_asm_label_op (asm_stmt, i); 5950 tree cons = gimple_asm_label_op (asm_stmt, i);
5925 if (label_to_block (TREE_VALUE (cons)) == e->dest) 5951 if (label_to_block (cfun, TREE_VALUE (cons)) == e->dest)
5926 { 5952 {
5927 if (!label) 5953 if (!label)
5928 label = gimple_block_label (dest); 5954 label = gimple_block_label (dest);
5929 TREE_VALUE (cons) = label; 5955 TREE_VALUE (cons) = label;
5930 } 5956 }
6076 { 6102 {
6077 /* BB must have no executable statements. */ 6103 /* BB must have no executable statements. */
6078 gimple_stmt_iterator gsi = gsi_after_labels (bb); 6104 gimple_stmt_iterator gsi = gsi_after_labels (bb);
6079 if (phi_nodes (bb)) 6105 if (phi_nodes (bb))
6080 return false; 6106 return false;
6081 if (gsi_end_p (gsi)) 6107 while (!gsi_end_p (gsi))
6082 return true; 6108 {
6083 if (is_gimple_debug (gsi_stmt (gsi))) 6109 gimple *stmt = gsi_stmt (gsi);
6084 gsi_next_nondebug (&gsi); 6110 if (is_gimple_debug (stmt))
6085 return gsi_end_p (gsi); 6111 ;
6112 else if (gimple_code (stmt) == GIMPLE_NOP
6113 || gimple_code (stmt) == GIMPLE_PREDICT)
6114 ;
6115 else
6116 return false;
6117 gsi_next (&gsi);
6118 }
6119 return true;
6086 } 6120 }
6087 6121
6088 6122
6089 /* Split a basic block if it ends with a conditional branch and if the 6123 /* Split a basic block if it ends with a conditional branch and if the
6090 other part of the block is not empty. */ 6124 other part of the block is not empty. */
6304 { 6338 {
6305 unsigned i; 6339 unsigned i;
6306 bool free_region_copy = false, copying_header = false; 6340 bool free_region_copy = false, copying_header = false;
6307 struct loop *loop = entry->dest->loop_father; 6341 struct loop *loop = entry->dest->loop_father;
6308 edge exit_copy; 6342 edge exit_copy;
6309 vec<basic_block> doms; 6343 vec<basic_block> doms = vNULL;
6310 edge redirected; 6344 edge redirected;
6311 int total_freq = 0, entry_freq = 0;
6312 profile_count total_count = profile_count::uninitialized (); 6345 profile_count total_count = profile_count::uninitialized ();
6313 profile_count entry_count = profile_count::uninitialized (); 6346 profile_count entry_count = profile_count::uninitialized ();
6314 6347
6315 if (!can_copy_bbs_p (region, n_region)) 6348 if (!can_copy_bbs_p (region, n_region))
6316 return false; 6349 return false;
6374 /* Fix up corner cases, to avoid division by zero or creation of negative 6407 /* Fix up corner cases, to avoid division by zero or creation of negative
6375 frequencies. */ 6408 frequencies. */
6376 if (entry_count > total_count) 6409 if (entry_count > total_count)
6377 entry_count = total_count; 6410 entry_count = total_count;
6378 } 6411 }
6379 if (!(total_count > 0) || !(entry_count > 0))
6380 {
6381 total_freq = entry->dest->frequency;
6382 entry_freq = EDGE_FREQUENCY (entry);
6383 /* Fix up corner cases, to avoid division by zero or creation of negative
6384 frequencies. */
6385 if (total_freq == 0)
6386 total_freq = 1;
6387 else if (entry_freq > total_freq)
6388 entry_freq = total_freq;
6389 }
6390 6412
6391 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop, 6413 copy_bbs (region, n_region, region_copy, &exit, 1, &exit_copy, loop,
6392 split_edge_bb_loc (entry), update_dominance); 6414 split_edge_bb_loc (entry), update_dominance);
6393 if (total_count > 0 && entry_count > 0) 6415 if (total_count.initialized_p () && entry_count.initialized_p ())
6394 { 6416 {
6395 scale_bbs_frequencies_profile_count (region, n_region, 6417 scale_bbs_frequencies_profile_count (region, n_region,
6396 total_count - entry_count, 6418 total_count - entry_count,
6397 total_count); 6419 total_count);
6398 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count, 6420 scale_bbs_frequencies_profile_count (region_copy, n_region, entry_count,
6399 total_count); 6421 total_count);
6400 }
6401 else
6402 {
6403 scale_bbs_frequencies_int (region, n_region, total_freq - entry_freq,
6404 total_freq);
6405 scale_bbs_frequencies_int (region_copy, n_region, entry_freq, total_freq);
6406 } 6422 }
6407 6423
6408 if (copying_header) 6424 if (copying_header)
6409 { 6425 {
6410 loop->header = exit->dest; 6426 loop->header = exit->dest;
6490 bool free_region_copy = false; 6506 bool free_region_copy = false;
6491 struct loop *loop = exit->dest->loop_father; 6507 struct loop *loop = exit->dest->loop_father;
6492 struct loop *orig_loop = entry->dest->loop_father; 6508 struct loop *orig_loop = entry->dest->loop_father;
6493 basic_block switch_bb, entry_bb, nentry_bb; 6509 basic_block switch_bb, entry_bb, nentry_bb;
6494 vec<basic_block> doms; 6510 vec<basic_block> doms;
6495 int total_freq = 0, exit_freq = 0;
6496 profile_count total_count = profile_count::uninitialized (), 6511 profile_count total_count = profile_count::uninitialized (),
6497 exit_count = profile_count::uninitialized (); 6512 exit_count = profile_count::uninitialized ();
6498 edge exits[2], nexits[2], e; 6513 edge exits[2], nexits[2], e;
6499 gimple_stmt_iterator gsi; 6514 gimple_stmt_iterator gsi;
6500 gimple *cond_stmt; 6515 gimple *cond_stmt;
6535 6550
6536 /* Record blocks outside the region that are dominated by something 6551 /* Record blocks outside the region that are dominated by something
6537 inside. */ 6552 inside. */
6538 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region); 6553 doms = get_dominated_by_region (CDI_DOMINATORS, region, n_region);
6539 6554
6540 if (exit->src->count > 0) 6555 total_count = exit->src->count;
6541 { 6556 exit_count = exit->count ();
6542 total_count = exit->src->count; 6557 /* Fix up corner cases, to avoid division by zero or creation of negative
6543 exit_count = exit->count (); 6558 frequencies. */
6544 /* Fix up corner cases, to avoid division by zero or creation of negative 6559 if (exit_count > total_count)
6545 frequencies. */ 6560 exit_count = total_count;
6546 if (exit_count > total_count)
6547 exit_count = total_count;
6548 }
6549 else
6550 {
6551 total_freq = exit->src->frequency;
6552 exit_freq = EDGE_FREQUENCY (exit);
6553 /* Fix up corner cases, to avoid division by zero or creation of negative
6554 frequencies. */
6555 if (total_freq == 0)
6556 total_freq = 1;
6557 if (exit_freq > total_freq)
6558 exit_freq = total_freq;
6559 }
6560 6561
6561 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop, 6562 copy_bbs (region, n_region, region_copy, exits, 2, nexits, orig_loop,
6562 split_edge_bb_loc (exit), true); 6563 split_edge_bb_loc (exit), true);
6563 if (total_count.initialized_p ()) 6564 if (total_count.initialized_p () && exit_count.initialized_p ())
6564 { 6565 {
6565 scale_bbs_frequencies_profile_count (region, n_region, 6566 scale_bbs_frequencies_profile_count (region, n_region,
6566 total_count - exit_count, 6567 total_count - exit_count,
6567 total_count); 6568 total_count);
6568 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count, 6569 scale_bbs_frequencies_profile_count (region_copy, n_region, exit_count,
6569 total_count); 6570 total_count);
6570 }
6571 else
6572 {
6573 scale_bbs_frequencies_int (region, n_region, total_freq - exit_freq,
6574 total_freq);
6575 scale_bbs_frequencies_int (region_copy, n_region, exit_freq, total_freq);
6576 } 6571 }
6577 6572
6578 /* Create the switch block, and put the exit condition to it. */ 6573 /* Create the switch block, and put the exit condition to it. */
6579 entry_bb = entry->dest; 6574 entry_bb = entry->dest;
6580 nentry_bb = get_bb_copy (entry_bb); 6575 nentry_bb = get_bb_copy (entry_bb);
6772 tree block = TREE_BLOCK (t); 6767 tree block = TREE_BLOCK (t);
6773 if (block == NULL_TREE) 6768 if (block == NULL_TREE)
6774 ; 6769 ;
6775 else if (block == p->orig_block 6770 else if (block == p->orig_block
6776 || p->orig_block == NULL_TREE) 6771 || p->orig_block == NULL_TREE)
6777 TREE_SET_BLOCK (t, p->new_block); 6772 {
6773 /* tree_node_can_be_shared says we can share invariant
6774 addresses but unshare_expr copies them anyways. Make sure
6775 to unshare before adjusting the block in place - we do not
6776 always see a copy here. */
6777 if (TREE_CODE (t) == ADDR_EXPR
6778 && is_gimple_min_invariant (t))
6779 *tp = t = unshare_expr (t);
6780 TREE_SET_BLOCK (t, p->new_block);
6781 }
6778 else if (flag_checking) 6782 else if (flag_checking)
6779 { 6783 {
6780 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block) 6784 while (block && TREE_CODE (block) == BLOCK && block != p->orig_block)
6781 block = BLOCK_SUPERCONTEXT (block); 6785 block = BLOCK_SUPERCONTEXT (block);
6782 gcc_assert (block == p->orig_block); 6786 gcc_assert (block == p->orig_block);
6882 { 6886 {
6883 case GIMPLE_CALL: 6887 case GIMPLE_CALL:
6884 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */ 6888 /* Remap the region numbers for __builtin_eh_{pointer,filter}. */
6885 { 6889 {
6886 tree r, fndecl = gimple_call_fndecl (stmt); 6890 tree r, fndecl = gimple_call_fndecl (stmt);
6887 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) 6891 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
6888 switch (DECL_FUNCTION_CODE (fndecl)) 6892 switch (DECL_FUNCTION_CODE (fndecl))
6889 { 6893 {
6890 case BUILT_IN_EH_COPY_VALUES: 6894 case BUILT_IN_EH_COPY_VALUES:
6891 r = gimple_call_arg (stmt, 1); 6895 r = gimple_call_arg (stmt, 1);
6892 r = move_stmt_eh_region_tree_nr (r, p); 6896 r = move_stmt_eh_region_tree_nr (r, p);
7326 if (TREE_CODE (from) != SSA_NAME) 7330 if (TREE_CODE (from) != SSA_NAME)
7327 return true; 7331 return true;
7328 7332
7329 bitmap_set_bit (release_names, SSA_NAME_VERSION (from)); 7333 bitmap_set_bit (release_names, SSA_NAME_VERSION (from));
7330 return true; 7334 return true;
7335 }
7336
7337 /* Return LOOP_DIST_ALIAS call if present in BB. */
7338
7339 static gimple *
7340 find_loop_dist_alias (basic_block bb)
7341 {
7342 gimple *g = last_stmt (bb);
7343 if (g == NULL || gimple_code (g) != GIMPLE_COND)
7344 return NULL;
7345
7346 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7347 gsi_prev (&gsi);
7348 if (gsi_end_p (gsi))
7349 return NULL;
7350
7351 g = gsi_stmt (gsi);
7352 if (gimple_call_internal_p (g, IFN_LOOP_DIST_ALIAS))
7353 return g;
7354 return NULL;
7355 }
7356
7357 /* Fold loop internal call G like IFN_LOOP_VECTORIZED/IFN_LOOP_DIST_ALIAS
7358 to VALUE and update any immediate uses of it's LHS. */
7359
7360 void
7361 fold_loop_internal_call (gimple *g, tree value)
7362 {
7363 tree lhs = gimple_call_lhs (g);
7364 use_operand_p use_p;
7365 imm_use_iterator iter;
7366 gimple *use_stmt;
7367 gimple_stmt_iterator gsi = gsi_for_stmt (g);
7368
7369 update_call_from_tree (&gsi, value);
7370 FOR_EACH_IMM_USE_STMT (use_stmt, iter, lhs)
7371 {
7372 FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
7373 SET_USE (use_p, value);
7374 update_stmt (use_stmt);
7375 }
7331 } 7376 }
7332 7377
7333 /* Move a single-entry, single-exit region delimited by ENTRY_BB and 7378 /* Move a single-entry, single-exit region delimited by ENTRY_BB and
7334 EXIT_BB to function DEST_CFUN. The whole region is replaced by a 7379 EXIT_BB to function DEST_CFUN. The whole region is replaced by a
7335 single basic block in the original CFG and the new basic block is 7380 single basic block in the original CFG and the new basic block is
7460 /* Initialize an empty loop tree. */ 7505 /* Initialize an empty loop tree. */
7461 struct loops *loops = ggc_cleared_alloc<struct loops> (); 7506 struct loops *loops = ggc_cleared_alloc<struct loops> ();
7462 init_loops_structure (dest_cfun, loops, 1); 7507 init_loops_structure (dest_cfun, loops, 1);
7463 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES; 7508 loops->state = LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
7464 set_loops_for_fn (dest_cfun, loops); 7509 set_loops_for_fn (dest_cfun, loops);
7510
7511 vec<loop_p, va_gc> *larray = get_loops (saved_cfun)->copy ();
7465 7512
7466 /* Move the outlined loop tree part. */ 7513 /* Move the outlined loop tree part. */
7467 num_nodes = bbs.length (); 7514 num_nodes = bbs.length ();
7468 FOR_EACH_VEC_ELT (bbs, i, bb) 7515 FOR_EACH_VEC_ELT (bbs, i, bb)
7469 { 7516 {
7499 if (slot) 7546 if (slot)
7500 l->exits->clear_slot (slot); 7547 l->exits->clear_slot (slot);
7501 } 7548 }
7502 } 7549 }
7503 7550
7504
7505 /* Adjust the number of blocks in the tree root of the outlined part. */ 7551 /* Adjust the number of blocks in the tree root of the outlined part. */
7506 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2; 7552 get_loop (dest_cfun, 0)->num_nodes = bbs.length () + 2;
7507 7553
7508 /* Setup a mapping to be used by move_block_to_fn. */ 7554 /* Setup a mapping to be used by move_block_to_fn. */
7509 loop->aux = current_loops->tree_root; 7555 loop->aux = current_loops->tree_root;
7510 loop0->aux = current_loops->tree_root; 7556 loop0->aux = current_loops->tree_root;
7511 7557
7558 /* Fix up orig_loop_num. If the block referenced in it has been moved
7559 to dest_cfun, update orig_loop_num field, otherwise clear it. */
7560 struct loop *dloop;
7561 signed char *moved_orig_loop_num = NULL;
7562 FOR_EACH_LOOP_FN (dest_cfun, dloop, 0)
7563 if (dloop->orig_loop_num)
7564 {
7565 if (moved_orig_loop_num == NULL)
7566 moved_orig_loop_num
7567 = XCNEWVEC (signed char, vec_safe_length (larray));
7568 if ((*larray)[dloop->orig_loop_num] != NULL
7569 && get_loop (saved_cfun, dloop->orig_loop_num) == NULL)
7570 {
7571 if (moved_orig_loop_num[dloop->orig_loop_num] >= 0
7572 && moved_orig_loop_num[dloop->orig_loop_num] < 2)
7573 moved_orig_loop_num[dloop->orig_loop_num]++;
7574 dloop->orig_loop_num = (*larray)[dloop->orig_loop_num]->num;
7575 }
7576 else
7577 {
7578 moved_orig_loop_num[dloop->orig_loop_num] = -1;
7579 dloop->orig_loop_num = 0;
7580 }
7581 }
7512 pop_cfun (); 7582 pop_cfun ();
7583
7584 if (moved_orig_loop_num)
7585 {
7586 FOR_EACH_VEC_ELT (bbs, i, bb)
7587 {
7588 gimple *g = find_loop_dist_alias (bb);
7589 if (g == NULL)
7590 continue;
7591
7592 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7593 gcc_assert (orig_loop_num
7594 && (unsigned) orig_loop_num < vec_safe_length (larray));
7595 if (moved_orig_loop_num[orig_loop_num] == 2)
7596 {
7597 /* If we have moved both loops with this orig_loop_num into
7598 dest_cfun and the LOOP_DIST_ALIAS call is being moved there
7599 too, update the first argument. */
7600 gcc_assert ((*larray)[dloop->orig_loop_num] != NULL
7601 && (get_loop (saved_cfun, dloop->orig_loop_num)
7602 == NULL));
7603 tree t = build_int_cst (integer_type_node,
7604 (*larray)[dloop->orig_loop_num]->num);
7605 gimple_call_set_arg (g, 0, t);
7606 update_stmt (g);
7607 /* Make sure the following loop will not update it. */
7608 moved_orig_loop_num[orig_loop_num] = 0;
7609 }
7610 else
7611 /* Otherwise at least one of the loops stayed in saved_cfun.
7612 Remove the LOOP_DIST_ALIAS call. */
7613 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7614 }
7615 FOR_EACH_BB_FN (bb, saved_cfun)
7616 {
7617 gimple *g = find_loop_dist_alias (bb);
7618 if (g == NULL)
7619 continue;
7620 int orig_loop_num = tree_to_shwi (gimple_call_arg (g, 0));
7621 gcc_assert (orig_loop_num
7622 && (unsigned) orig_loop_num < vec_safe_length (larray));
7623 if (moved_orig_loop_num[orig_loop_num])
7624 /* LOOP_DIST_ALIAS call remained in saved_cfun, if at least one
7625 of the corresponding loops was moved, remove it. */
7626 fold_loop_internal_call (g, gimple_call_arg (g, 1));
7627 }
7628 XDELETEVEC (moved_orig_loop_num);
7629 }
7630 ggc_free (larray);
7513 7631
7514 /* Move blocks from BBS into DEST_CFUN. */ 7632 /* Move blocks from BBS into DEST_CFUN. */
7515 gcc_assert (bbs.length () >= 2); 7633 gcc_assert (bbs.length () >= 2);
7516 after = dest_cfun->cfg->x_entry_block_ptr; 7634 after = dest_cfun->cfg->x_entry_block_ptr;
7517 hash_map<tree, tree> vars_map; 7635 hash_map<tree, tree> vars_map;
7612 various CFG manipulation function get to the right CFG. 7730 various CFG manipulation function get to the right CFG.
7613 7731
7614 FIXME, this is silly. The CFG ought to become a parameter to 7732 FIXME, this is silly. The CFG ought to become a parameter to
7615 these helpers. */ 7733 these helpers. */
7616 push_cfun (dest_cfun); 7734 push_cfun (dest_cfun);
7617 make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU); 7735 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = entry_bb->count;
7736 make_single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), entry_bb, EDGE_FALLTHRU);
7618 if (exit_bb) 7737 if (exit_bb)
7619 make_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0); 7738 {
7739 make_single_succ_edge (exit_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
7740 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = exit_bb->count;
7741 }
7742 else
7743 EXIT_BLOCK_PTR_FOR_FN (cfun)->count = profile_count::zero ();
7620 pop_cfun (); 7744 pop_cfun ();
7621 7745
7622 /* Back in the original function, the SESE region has disappeared, 7746 /* Back in the original function, the SESE region has disappeared,
7623 create a new basic block in its place. */ 7747 create a new basic block in its place. */
7624 bb = create_empty_bb (entry_pred[0]); 7748 bb = create_empty_bb (entry_pred[0]);
8025 if (loop->any_estimate) 8149 if (loop->any_estimate)
8026 { 8150 {
8027 fprintf (file, ", estimate = "); 8151 fprintf (file, ", estimate = ");
8028 print_decu (loop->nb_iterations_estimate, file); 8152 print_decu (loop->nb_iterations_estimate, file);
8029 } 8153 }
8154 if (loop->unroll)
8155 fprintf (file, ", unroll = %d", loop->unroll);
8030 fprintf (file, ")\n"); 8156 fprintf (file, ")\n");
8031 8157
8032 /* Print loop's body. */ 8158 /* Print loop's body. */
8033 if (verbosity >= 1) 8159 if (verbosity >= 1)
8034 { 8160 {
8163 tree fndecl = NULL_TREE; 8289 tree fndecl = NULL_TREE;
8164 int call_flags = 0; 8290 int call_flags = 0;
8165 8291
8166 /* Eh exception not handled internally terminates execution of the whole 8292 /* Eh exception not handled internally terminates execution of the whole
8167 function. */ 8293 function. */
8168 if (stmt_can_throw_external (t)) 8294 if (stmt_can_throw_external (cfun, t))
8169 return true; 8295 return true;
8170 8296
8171 /* NORETURN and LONGJMP calls already have an edge to exit. 8297 /* NORETURN and LONGJMP calls already have an edge to exit.
8172 CONST and PURE calls do not need one. 8298 CONST and PURE calls do not need one.
8173 We don't currently check for CONST and PURE here, although 8299 We don't currently check for CONST and PURE here, although
8181 call_flags = gimple_call_flags (t); 8307 call_flags = gimple_call_flags (t);
8182 } 8308 }
8183 8309
8184 if (is_gimple_call (t) 8310 if (is_gimple_call (t)
8185 && fndecl 8311 && fndecl
8186 && DECL_BUILT_IN (fndecl) 8312 && fndecl_built_in_p (fndecl)
8187 && (call_flags & ECF_NOTHROW) 8313 && (call_flags & ECF_NOTHROW)
8188 && !(call_flags & ECF_RETURNS_TWICE) 8314 && !(call_flags & ECF_RETURNS_TWICE)
8189 /* fork() doesn't really return twice, but the effect of 8315 /* fork() doesn't really return twice, but the effect of
8190 wrapping it in __gcov_fork() which calls __gcov_flush() 8316 wrapping it in __gcov_fork() which calls __gcov_flush()
8191 and clears the counters before forking has the same 8317 and clears the counters before forking has the same
8192 effect as returning twice. Force a fake edge. */ 8318 effect as returning twice. Force a fake edge. */
8193 && !(DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL 8319 && !fndecl_built_in_p (fndecl, BUILT_IN_FORK))
8194 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FORK))
8195 return false; 8320 return false;
8196 8321
8197 if (is_gimple_call (t)) 8322 if (is_gimple_call (t))
8198 { 8323 {
8199 edge_iterator ei; 8324 edge_iterator ei;
8480 bool changed = false; 8605 bool changed = false;
8481 edge e; 8606 edge e;
8482 edge_iterator ei; 8607 edge_iterator ei;
8483 gimple *stmt = last_stmt (bb); 8608 gimple *stmt = last_stmt (bb);
8484 8609
8485 if (stmt && stmt_can_throw_internal (stmt)) 8610 if (stmt && stmt_can_throw_internal (cfun, stmt))
8486 return false; 8611 return false;
8487 8612
8488 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) 8613 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
8489 { 8614 {
8490 if (e->flags & EDGE_EH) 8615 if (e->flags & EDGE_EH)
8689 += estimate_num_insns (gsi_stmt (i), 8814 += estimate_num_insns (gsi_stmt (i),
8690 &eni_time_weights) * bb->count.to_gcov_type (); 8815 &eni_time_weights) * bb->count.to_gcov_type ();
8691 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED) 8816 else if (profile_status_for_fn (cfun) == PROFILE_GUESSED)
8692 record->time[after_pass] 8817 record->time[after_pass]
8693 += estimate_num_insns (gsi_stmt (i), 8818 += estimate_num_insns (gsi_stmt (i),
8694 &eni_time_weights) * bb->frequency; 8819 &eni_time_weights) * bb->count.to_frequency (cfun);
8695 } 8820 }
8696 } 8821 }
8697 8822
8698 struct cfg_hooks gimple_cfg_hooks = { 8823 struct cfg_hooks gimple_cfg_hooks = {
8699 "gimple", 8824 "gimple",
8841 /* Create conditionally executed block. */ 8966 /* Create conditionally executed block. */
8842 new_bb = create_empty_bb (bb); 8967 new_bb = create_empty_bb (bb);
8843 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE); 8968 edge e = make_edge (bb, new_bb, EDGE_TRUE_VALUE);
8844 e->probability = prob; 8969 e->probability = prob;
8845 new_bb->count = e->count (); 8970 new_bb->count = e->count ();
8846 new_bb->frequency = prob.apply (bb->frequency);
8847 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU); 8971 make_single_succ_edge (new_bb, fall->dest, EDGE_FALLTHRU);
8848 8972
8849 /* Fix edge for split bb. */ 8973 /* Fix edge for split bb. */
8850 fall->flags = EDGE_FALSE_VALUE; 8974 fall->flags = EDGE_FALSE_VALUE;
8851 fall->probability -= e->probability; 8975 fall->probability -= e->probability;
8873 { 8997 {
8874 tree ret; 8998 tree ret;
8875 location_t loc = gimple_location (gsi_stmt (*gsi)); 8999 location_t loc = gimple_location (gsi_stmt (*gsi));
8876 9000
8877 ret = fold_build3_loc (loc, code, type, a, b, c); 9001 ret = fold_build3_loc (loc, code, type, a, b, c);
8878 STRIP_NOPS (ret);
8879
8880 return force_gimple_operand_gsi (gsi, ret, true, NULL, true, 9002 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8881 GSI_SAME_STMT); 9003 GSI_SAME_STMT);
8882 } 9004 }
8883 9005
8884 /* Build a binary operation and gimplify it. Emit code before GSI. 9006 /* Build a binary operation and gimplify it. Emit code before GSI.
8889 tree type, tree a, tree b) 9011 tree type, tree a, tree b)
8890 { 9012 {
8891 tree ret; 9013 tree ret;
8892 9014
8893 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b); 9015 ret = fold_build2_loc (gimple_location (gsi_stmt (*gsi)), code, type, a, b);
8894 STRIP_NOPS (ret);
8895
8896 return force_gimple_operand_gsi (gsi, ret, true, NULL, true, 9016 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8897 GSI_SAME_STMT); 9017 GSI_SAME_STMT);
8898 } 9018 }
8899 9019
8900 /* Build a unary operation and gimplify it. Emit code before GSI. 9020 /* Build a unary operation and gimplify it. Emit code before GSI.
8905 tree a) 9025 tree a)
8906 { 9026 {
8907 tree ret; 9027 tree ret;
8908 9028
8909 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a); 9029 ret = fold_build1_loc (gimple_location (gsi_stmt (*gsi)), code, type, a);
8910 STRIP_NOPS (ret);
8911
8912 return force_gimple_operand_gsi (gsi, ret, true, NULL, true, 9030 return force_gimple_operand_gsi (gsi, ret, true, NULL, true,
8913 GSI_SAME_STMT); 9031 GSI_SAME_STMT);
8914 } 9032 }
8915 9033
8916 9034
9014 tree *lhs, tree *rhs) 9132 tree *lhs, tree *rhs)
9015 { 9133 {
9016 tree type = TREE_TYPE (index); 9134 tree type = TREE_TYPE (index);
9017 tree utype = unsigned_type_for (type); 9135 tree utype = unsigned_type_for (type);
9018 9136
9019 low = fold_convert (type, low); 9137 low = fold_convert (utype, low);
9020 high = fold_convert (type, high); 9138 high = fold_convert (utype, high);
9021 9139
9022 tree tmp = make_ssa_name (type); 9140 gimple_seq seq = NULL;
9023 gassign *sub1 9141 index = gimple_convert (&seq, utype, index);
9024 = gimple_build_assign (tmp, MINUS_EXPR, index, low); 9142 *lhs = gimple_build (&seq, MINUS_EXPR, utype, index, low);
9025 9143 *rhs = const_binop (MINUS_EXPR, utype, high, low);
9026 *lhs = make_ssa_name (utype); 9144
9027 gassign *a = gimple_build_assign (*lhs, NOP_EXPR, tmp);
9028
9029 *rhs = fold_build2 (MINUS_EXPR, utype, high, low);
9030 gimple_stmt_iterator gsi = gsi_last_bb (bb); 9145 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9031 gsi_insert_before (&gsi, sub1, GSI_SAME_STMT); 9146 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
9032 gsi_insert_before (&gsi, a, GSI_SAME_STMT); 9147 }
9033 } 9148
9149 /* Return the basic block that belongs to label numbered INDEX
9150 of a switch statement. */
9151
9152 basic_block
9153 gimple_switch_label_bb (function *ifun, gswitch *gs, unsigned index)
9154 {
9155 return label_to_block (ifun, CASE_LABEL (gimple_switch_label (gs, index)));
9156 }
9157
9158 /* Return the default basic block of a switch statement. */
9159
9160 basic_block
9161 gimple_switch_default_bb (function *ifun, gswitch *gs)
9162 {
9163 return gimple_switch_label_bb (ifun, gs, 0);
9164 }
9165
9166 /* Return the edge that belongs to label numbered INDEX
9167 of a switch statement. */
9168
9169 edge
9170 gimple_switch_edge (function *ifun, gswitch *gs, unsigned index)
9171 {
9172 return find_edge (gimple_bb (gs), gimple_switch_label_bb (ifun, gs, index));
9173 }
9174
9175 /* Return the default edge of a switch statement. */
9176
9177 edge
9178 gimple_switch_default_edge (function *ifun, gswitch *gs)
9179 {
9180 return gimple_switch_edge (ifun, gs, 0);
9181 }
9182
9034 9183
9035 /* Emit return warnings. */ 9184 /* Emit return warnings. */
9036 9185
9037 namespace { 9186 namespace {
9038 9187
9082 { 9231 {
9083 last = last_stmt (e->src); 9232 last = last_stmt (e->src);
9084 if ((gimple_code (last) == GIMPLE_RETURN 9233 if ((gimple_code (last) == GIMPLE_RETURN
9085 || gimple_call_builtin_p (last, BUILT_IN_RETURN)) 9234 || gimple_call_builtin_p (last, BUILT_IN_RETURN))
9086 && location == UNKNOWN_LOCATION 9235 && location == UNKNOWN_LOCATION
9087 && (location = gimple_location (last)) != UNKNOWN_LOCATION 9236 && ((location = LOCATION_LOCUS (gimple_location (last)))
9237 != UNKNOWN_LOCATION)
9088 && !optimize) 9238 && !optimize)
9089 break; 9239 break;
9090 /* When optimizing, replace return stmts in noreturn functions 9240 /* When optimizing, replace return stmts in noreturn functions
9091 with __builtin_unreachable () call. */ 9241 with __builtin_unreachable () call. */
9092 if (optimize && gimple_code (last) == GIMPLE_RETURN) 9242 if (optimize && gimple_code (last) == GIMPLE_RETURN)
9106 warning_at (location, 0, "%<noreturn%> function does return"); 9256 warning_at (location, 0, "%<noreturn%> function does return");
9107 } 9257 }
9108 9258
9109 /* If we see "return;" in some basic block, then we do reach the end 9259 /* If we see "return;" in some basic block, then we do reach the end
9110 without returning a value. */ 9260 without returning a value. */
9111 else if (warn_return_type 9261 else if (warn_return_type > 0
9112 && !TREE_NO_WARNING (fun->decl) 9262 && !TREE_NO_WARNING (fun->decl)
9113 && EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (fun)->preds) > 0
9114 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl)))) 9263 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fun->decl))))
9115 { 9264 {
9116 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds) 9265 FOR_EACH_EDGE (e, ei, EXIT_BLOCK_PTR_FOR_FN (fun)->preds)
9117 { 9266 {
9118 gimple *last = last_stmt (e->src); 9267 gimple *last = last_stmt (e->src);
9120 if (return_stmt 9269 if (return_stmt
9121 && gimple_return_retval (return_stmt) == NULL 9270 && gimple_return_retval (return_stmt) == NULL
9122 && !gimple_no_warning_p (last)) 9271 && !gimple_no_warning_p (last))
9123 { 9272 {
9124 location = gimple_location (last); 9273 location = gimple_location (last);
9125 if (location == UNKNOWN_LOCATION) 9274 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9126 location = fun->function_end_locus; 9275 location = fun->function_end_locus;
9127 warning_at (location, OPT_Wreturn_type, "control reaches end of non-void function"); 9276 warning_at (location, OPT_Wreturn_type,
9277 "control reaches end of non-void function");
9128 TREE_NO_WARNING (fun->decl) = 1; 9278 TREE_NO_WARNING (fun->decl) = 1;
9129 break; 9279 break;
9130 } 9280 }
9131 } 9281 }
9282 /* The C++ FE turns fallthrough from the end of non-void function
9283 into __builtin_unreachable () call with BUILTINS_LOCATION.
9284 Recognize those too. */
9285 basic_block bb;
9286 if (!TREE_NO_WARNING (fun->decl))
9287 FOR_EACH_BB_FN (bb, fun)
9288 if (EDGE_COUNT (bb->succs) == 0)
9289 {
9290 gimple *last = last_stmt (bb);
9291 const enum built_in_function ubsan_missing_ret
9292 = BUILT_IN_UBSAN_HANDLE_MISSING_RETURN;
9293 if (last
9294 && ((LOCATION_LOCUS (gimple_location (last))
9295 == BUILTINS_LOCATION
9296 && gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE))
9297 || gimple_call_builtin_p (last, ubsan_missing_ret)))
9298 {
9299 gimple_stmt_iterator gsi = gsi_for_stmt (last);
9300 gsi_prev_nondebug (&gsi);
9301 gimple *prev = gsi_stmt (gsi);
9302 if (prev == NULL)
9303 location = UNKNOWN_LOCATION;
9304 else
9305 location = gimple_location (prev);
9306 if (LOCATION_LOCUS (location) == UNKNOWN_LOCATION)
9307 location = fun->function_end_locus;
9308 warning_at (location, OPT_Wreturn_type,
9309 "control reaches end of non-void function");
9310 TREE_NO_WARNING (fun->decl) = 1;
9311 break;
9312 }
9313 }
9132 } 9314 }
9133 return 0; 9315 return 0;
9134 } 9316 }
9135 9317
9136 } // anon namespace 9318 } // anon namespace
9262 gimple_stmt_iterator gsi; 9444 gimple_stmt_iterator gsi;
9263 int todo = 0; 9445 int todo = 0;
9264 cgraph_node *node = cgraph_node::get (current_function_decl); 9446 cgraph_node *node = cgraph_node::get (current_function_decl);
9265 profile_count num = node->count; 9447 profile_count num = node->count;
9266 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count; 9448 profile_count den = ENTRY_BLOCK_PTR_FOR_FN (cfun)->count;
9267 bool scale = num.initialized_p () 9449 bool scale = num.initialized_p () && !(num == den);
9268 && (den > 0 || num == profile_count::zero ())
9269 && !(num == den);
9270 9450
9271 if (scale) 9451 if (scale)
9272 { 9452 {
9453 profile_count::adjust_for_ipa_scaling (&num, &den);
9273 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count; 9454 ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = node->count;
9274 EXIT_BLOCK_PTR_FOR_FN (cfun)->count 9455 EXIT_BLOCK_PTR_FOR_FN (cfun)->count
9275 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den); 9456 = EXIT_BLOCK_PTR_FOR_FN (cfun)->count.apply_scale (num, den);
9276 } 9457 }
9277 9458
9366 gimple_stmt_iterator gsi = gsi_last_bb (bb); 9547 gimple_stmt_iterator gsi = gsi_last_bb (bb);
9367 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT); 9548 gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
9368 if (!cfun->after_inlining) 9549 if (!cfun->after_inlining)
9369 { 9550 {
9370 gcall *call_stmt = dyn_cast <gcall *> (stmt); 9551 gcall *call_stmt = dyn_cast <gcall *> (stmt);
9371 int freq
9372 = compute_call_stmt_bb_frequency (current_function_decl,
9373 bb);
9374 node->create_edge (cgraph_node::get_create (fndecl), 9552 node->create_edge (cgraph_node::get_create (fndecl),
9375 call_stmt, bb->count, freq); 9553 call_stmt, bb->count);
9376 } 9554 }
9377 } 9555 }
9378 } 9556 }
9379 } 9557 }
9380 if (scale) 9558 if (scale)