comparison gcc/tree-ssa-pre.c @ 63:b7f97abdc517 gcc-4.6-20100522

update gcc from gcc-4.5.0 to gcc-4.6
author ryoma <e075725@ie.u-ryukyu.ac.jp>
date Mon, 24 May 2010 12:47:05 +0900
parents 77e2b8dfacca
children f6334be47118
comparison
equal deleted inserted replaced
56:3c8a44c06a95 63:b7f97abdc517
1 /* SSA-PRE for trees. 1 /* SSA-PRE for trees.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 2 Copyright (C) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc. 3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher 4 Contributed by Daniel Berlin <dan@dberlin.org> and Steven Bosscher
5 <stevenb@suse.de> 5 <stevenb@suse.de>
6 6
7 This file is part of GCC. 7 This file is part of GCC.
22 22
23 #include "config.h" 23 #include "config.h"
24 #include "system.h" 24 #include "system.h"
25 #include "coretypes.h" 25 #include "coretypes.h"
26 #include "tm.h" 26 #include "tm.h"
27 #include "ggc.h"
28 #include "tree.h" 27 #include "tree.h"
29 #include "basic-block.h" 28 #include "basic-block.h"
30 #include "diagnostic.h" 29 #include "diagnostic.h"
30 #include "tree-pretty-print.h"
31 #include "gimple-pretty-print.h"
31 #include "tree-inline.h" 32 #include "tree-inline.h"
32 #include "tree-flow.h" 33 #include "tree-flow.h"
33 #include "gimple.h" 34 #include "gimple.h"
34 #include "tree-dump.h" 35 #include "tree-dump.h"
35 #include "timevar.h" 36 #include "timevar.h"
36 #include "fibheap.h" 37 #include "fibheap.h"
37 #include "hashtab.h" 38 #include "hashtab.h"
38 #include "tree-iterator.h" 39 #include "tree-iterator.h"
39 #include "real.h"
40 #include "alloc-pool.h" 40 #include "alloc-pool.h"
41 #include "obstack.h" 41 #include "obstack.h"
42 #include "tree-pass.h" 42 #include "tree-pass.h"
43 #include "flags.h" 43 #include "flags.h"
44 #include "bitmap.h" 44 #include "bitmap.h"
202 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2)); 202 return vn_nary_op_eq (PRE_EXPR_NARY (e1), PRE_EXPR_NARY (e2));
203 case REFERENCE: 203 case REFERENCE:
204 return vn_reference_eq (PRE_EXPR_REFERENCE (e1), 204 return vn_reference_eq (PRE_EXPR_REFERENCE (e1),
205 PRE_EXPR_REFERENCE (e2)); 205 PRE_EXPR_REFERENCE (e2));
206 default: 206 default:
207 abort(); 207 gcc_unreachable ();
208 } 208 }
209 } 209 }
210 210
211 static hashval_t 211 static hashval_t
212 pre_expr_hash (const void *p1) 212 pre_expr_hash (const void *p1)
215 switch (e->kind) 215 switch (e->kind)
216 { 216 {
217 case CONSTANT: 217 case CONSTANT:
218 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e)); 218 return vn_hash_constant_with_type (PRE_EXPR_CONSTANT (e));
219 case NAME: 219 case NAME:
220 return iterative_hash_hashval_t (SSA_NAME_VERSION (PRE_EXPR_NAME (e)), 0); 220 return SSA_NAME_VERSION (PRE_EXPR_NAME (e));
221 case NARY: 221 case NARY:
222 return PRE_EXPR_NARY (e)->hashcode; 222 return PRE_EXPR_NARY (e)->hashcode;
223 case REFERENCE: 223 case REFERENCE:
224 return PRE_EXPR_REFERENCE (e)->hashcode; 224 return PRE_EXPR_REFERENCE (e)->hashcode;
225 default: 225 default:
226 abort (); 226 gcc_unreachable ();
227 } 227 }
228 } 228 }
229 229
230 230
231 /* Next global expression id number. */ 231 /* Next global expression id number. */
234 /* Mapping from expression to id number we can use in bitmap sets. */ 234 /* Mapping from expression to id number we can use in bitmap sets. */
235 DEF_VEC_P (pre_expr); 235 DEF_VEC_P (pre_expr);
236 DEF_VEC_ALLOC_P (pre_expr, heap); 236 DEF_VEC_ALLOC_P (pre_expr, heap);
237 static VEC(pre_expr, heap) *expressions; 237 static VEC(pre_expr, heap) *expressions;
238 static htab_t expression_to_id; 238 static htab_t expression_to_id;
239 static VEC(unsigned, heap) *name_to_id;
239 240
240 /* Allocate an expression id for EXPR. */ 241 /* Allocate an expression id for EXPR. */
241 242
242 static inline unsigned int 243 static inline unsigned int
243 alloc_expression_id (pre_expr expr) 244 alloc_expression_id (pre_expr expr)
245 void **slot; 246 void **slot;
246 /* Make sure we won't overflow. */ 247 /* Make sure we won't overflow. */
247 gcc_assert (next_expression_id + 1 > next_expression_id); 248 gcc_assert (next_expression_id + 1 > next_expression_id);
248 expr->id = next_expression_id++; 249 expr->id = next_expression_id++;
249 VEC_safe_push (pre_expr, heap, expressions, expr); 250 VEC_safe_push (pre_expr, heap, expressions, expr);
250 slot = htab_find_slot (expression_to_id, expr, INSERT); 251 if (expr->kind == NAME)
251 gcc_assert (!*slot); 252 {
252 *slot = expr; 253 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
254 /* VEC_safe_grow_cleared allocates no headroom. Avoid frequent
255 re-allocations by using VEC_reserve upfront. There is no
256 VEC_quick_grow_cleared unfortunately. */
257 VEC_reserve (unsigned, heap, name_to_id, num_ssa_names);
258 VEC_safe_grow_cleared (unsigned, heap, name_to_id, num_ssa_names);
259 gcc_assert (VEC_index (unsigned, name_to_id, version) == 0);
260 VEC_replace (unsigned, name_to_id, version, expr->id);
261 }
262 else
263 {
264 slot = htab_find_slot (expression_to_id, expr, INSERT);
265 gcc_assert (!*slot);
266 *slot = expr;
267 }
253 return next_expression_id - 1; 268 return next_expression_id - 1;
254 } 269 }
255 270
256 /* Return the expression id for tree EXPR. */ 271 /* Return the expression id for tree EXPR. */
257 272
264 static inline unsigned int 279 static inline unsigned int
265 lookup_expression_id (const pre_expr expr) 280 lookup_expression_id (const pre_expr expr)
266 { 281 {
267 void **slot; 282 void **slot;
268 283
269 slot = htab_find_slot (expression_to_id, expr, NO_INSERT); 284 if (expr->kind == NAME)
270 if (!slot) 285 {
271 return 0; 286 unsigned version = SSA_NAME_VERSION (PRE_EXPR_NAME (expr));
272 return ((pre_expr)*slot)->id; 287 if (VEC_length (unsigned, name_to_id) <= version)
288 return 0;
289 return VEC_index (unsigned, name_to_id, version);
290 }
291 else
292 {
293 slot = htab_find_slot (expression_to_id, expr, NO_INSERT);
294 if (!slot)
295 return 0;
296 return ((pre_expr)*slot)->id;
297 }
273 } 298 }
274 299
275 /* Return the existing expression id for EXPR, or create one if one 300 /* Return the existing expression id for EXPR, or create one if one
276 does not exist yet. */ 301 does not exist yet. */
277 302
306 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */ 331 /* Given an SSA_NAME NAME, get or create a pre_expr to represent it. */
307 332
308 static pre_expr 333 static pre_expr
309 get_or_alloc_expr_for_name (tree name) 334 get_or_alloc_expr_for_name (tree name)
310 { 335 {
311 pre_expr result = (pre_expr) pool_alloc (pre_expr_pool); 336 struct pre_expr_d expr;
337 pre_expr result;
312 unsigned int result_id; 338 unsigned int result_id;
313 339
340 expr.kind = NAME;
341 expr.id = 0;
342 PRE_EXPR_NAME (&expr) = name;
343 result_id = lookup_expression_id (&expr);
344 if (result_id != 0)
345 return expression_for_id (result_id);
346
347 result = (pre_expr) pool_alloc (pre_expr_pool);
314 result->kind = NAME; 348 result->kind = NAME;
315 result->id = 0;
316 PRE_EXPR_NAME (result) = name; 349 PRE_EXPR_NAME (result) = name;
317 result_id = lookup_expression_id (result); 350 alloc_expression_id (result);
318 if (result_id != 0)
319 {
320 pool_free (pre_expr_pool, result);
321 result = expression_for_id (result_id);
322 return result;
323 }
324 get_or_alloc_expression_id (result);
325 return result; 351 return result;
326 } 352 }
327 353
328 static bool in_fre = false; 354 static bool in_fre = false;
329 355
380 406
381 /* A cache for value_dies_in_block_x. */ 407 /* A cache for value_dies_in_block_x. */
382 bitmap expr_dies; 408 bitmap expr_dies;
383 409
384 /* True if we have visited this block during ANTIC calculation. */ 410 /* True if we have visited this block during ANTIC calculation. */
385 unsigned int visited:1; 411 unsigned int visited : 1;
386 412
387 /* True we have deferred processing this block during ANTIC 413 /* True we have deferred processing this block during ANTIC
388 calculation until its successor is processed. */ 414 calculation until its successor is processed. */
389 unsigned int deferred : 1; 415 unsigned int deferred : 1;
416
417 /* True when the block contains a call that might not return. */
418 unsigned int contains_may_not_return_call : 1;
390 } *bb_value_sets_t; 419 } *bb_value_sets_t;
391 420
392 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen 421 #define EXP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->exp_gen
393 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen 422 #define PHI_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->phi_gen
394 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen 423 #define TMP_GEN(BB) ((bb_value_sets_t) ((BB)->aux))->tmp_gen
397 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in 426 #define PA_IN(BB) ((bb_value_sets_t) ((BB)->aux))->pa_in
398 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets 427 #define NEW_SETS(BB) ((bb_value_sets_t) ((BB)->aux))->new_sets
399 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies 428 #define EXPR_DIES(BB) ((bb_value_sets_t) ((BB)->aux))->expr_dies
400 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited 429 #define BB_VISITED(BB) ((bb_value_sets_t) ((BB)->aux))->visited
401 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred 430 #define BB_DEFERRED(BB) ((bb_value_sets_t) ((BB)->aux))->deferred
431 #define BB_MAY_NOTRETURN(BB) ((bb_value_sets_t) ((BB)->aux))->contains_may_not_return_call
402 432
403 433
404 /* Basic block list in postorder. */ 434 /* Basic block list in postorder. */
405 static int *postorder; 435 static int *postorder;
406 436
430 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr); 460 static void bitmap_value_insert_into_set (bitmap_set_t, pre_expr);
431 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr); 461 static void bitmap_value_replace_in_set (bitmap_set_t, pre_expr);
432 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t); 462 static void bitmap_set_copy (bitmap_set_t, bitmap_set_t);
433 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int); 463 static bool bitmap_set_contains_value (bitmap_set_t, unsigned int);
434 static void bitmap_insert_into_set (bitmap_set_t, pre_expr); 464 static void bitmap_insert_into_set (bitmap_set_t, pre_expr);
435 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr, bool); 465 static void bitmap_insert_into_set_1 (bitmap_set_t, pre_expr,
466 unsigned int, bool);
436 static bitmap_set_t bitmap_set_new (void); 467 static bitmap_set_t bitmap_set_new (void);
437 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *, 468 static tree create_expression_by_pieces (basic_block, pre_expr, gimple_seq *,
438 gimple, tree); 469 gimple, tree);
439 static tree find_or_generate_expression (basic_block, pre_expr, gimple_seq *, 470 static tree find_or_generate_expression (basic_block, pre_expr, gimple_seq *,
440 gimple); 471 gimple);
574 { 605 {
575 set = bitmap_set_new (); 606 set = bitmap_set_new ();
576 VEC_replace (bitmap_set_t, value_expressions, v, set); 607 VEC_replace (bitmap_set_t, value_expressions, v, set);
577 } 608 }
578 609
579 bitmap_insert_into_set_1 (set, e, true); 610 bitmap_insert_into_set_1 (set, e, v, true);
580 } 611 }
581 612
582 /* Create a new bitmap set and return it. */ 613 /* Create a new bitmap set and return it. */
583 614
584 static bitmap_set_t 615 static bitmap_set_t
632 } 663 }
633 } 664 }
634 665
635 static void 666 static void
636 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr, 667 bitmap_insert_into_set_1 (bitmap_set_t set, pre_expr expr,
637 bool allow_constants) 668 unsigned int val, bool allow_constants)
638 { 669 {
639 unsigned int val = get_expr_value_id (expr);
640 if (allow_constants || !value_id_constant_p (val)) 670 if (allow_constants || !value_id_constant_p (val))
641 { 671 {
642 /* We specifically expect this and only this function to be able to 672 /* We specifically expect this and only this function to be able to
643 insert constants into a set. */ 673 insert constants into a set. */
644 bitmap_set_bit (set->values, val); 674 bitmap_set_bit (set->values, val);
649 /* Insert an expression EXPR into a bitmapped set. */ 679 /* Insert an expression EXPR into a bitmapped set. */
650 680
651 static void 681 static void
652 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr) 682 bitmap_insert_into_set (bitmap_set_t set, pre_expr expr)
653 { 683 {
654 bitmap_insert_into_set_1 (set, expr, false); 684 bitmap_insert_into_set_1 (set, expr, get_expr_value_id (expr), false);
655 } 685 }
656 686
657 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */ 687 /* Copy a bitmapped set ORIG, into bitmapped set DEST. */
658 688
659 static void 689 static void
678 static VEC(pre_expr, heap) * 708 static VEC(pre_expr, heap) *
679 sorted_array_from_bitmap_set (bitmap_set_t set) 709 sorted_array_from_bitmap_set (bitmap_set_t set)
680 { 710 {
681 unsigned int i, j; 711 unsigned int i, j;
682 bitmap_iterator bi, bj; 712 bitmap_iterator bi, bj;
683 VEC(pre_expr, heap) *result = NULL; 713 VEC(pre_expr, heap) *result;
714
715 /* Pre-allocate roughly enough space for the array. */
716 result = VEC_alloc (pre_expr, heap, bitmap_count_bits (set->values));
684 717
685 FOR_EACH_VALUE_ID_IN_SET (set, i, bi) 718 FOR_EACH_VALUE_ID_IN_SET (set, i, bi)
686 { 719 {
687 /* The number of expressions having a given value is usually 720 /* The number of expressions having a given value is usually
688 relatively small. Thus, rather than making a vector of all 721 relatively small. Thus, rather than making a vector of all
857 static void 890 static void
858 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr) 891 bitmap_value_insert_into_set (bitmap_set_t set, pre_expr expr)
859 { 892 {
860 unsigned int val = get_expr_value_id (expr); 893 unsigned int val = get_expr_value_id (expr);
861 894
895 #ifdef ENABLE_CHECKING
896 gcc_assert (expr->id == get_or_alloc_expression_id (expr));
897 #endif
898
899 /* Constant values are always considered to be part of the set. */
862 if (value_id_constant_p (val)) 900 if (value_id_constant_p (val))
863 return; 901 return;
864 902
865 if (!bitmap_set_contains_value (set, val)) 903 /* If the value membership changed, add the expression. */
866 bitmap_insert_into_set (set, expr); 904 if (bitmap_set_bit (set->values, val))
905 bitmap_set_bit (set->expressions, expr->id);
867 } 906 }
868 907
869 /* Print out EXPR to outfile. */ 908 /* Print out EXPR to outfile. */
870 909
871 static void 910 static void
1017 static pre_expr 1056 static pre_expr
1018 get_or_alloc_expr_for_constant (tree constant) 1057 get_or_alloc_expr_for_constant (tree constant)
1019 { 1058 {
1020 unsigned int result_id; 1059 unsigned int result_id;
1021 unsigned int value_id; 1060 unsigned int value_id;
1022 pre_expr newexpr = (pre_expr) pool_alloc (pre_expr_pool); 1061 struct pre_expr_d expr;
1062 pre_expr newexpr;
1063
1064 expr.kind = CONSTANT;
1065 PRE_EXPR_CONSTANT (&expr) = constant;
1066 result_id = lookup_expression_id (&expr);
1067 if (result_id != 0)
1068 return expression_for_id (result_id);
1069
1070 newexpr = (pre_expr) pool_alloc (pre_expr_pool);
1023 newexpr->kind = CONSTANT; 1071 newexpr->kind = CONSTANT;
1024 PRE_EXPR_CONSTANT (newexpr) = constant; 1072 PRE_EXPR_CONSTANT (newexpr) = constant;
1025 result_id = lookup_expression_id (newexpr); 1073 alloc_expression_id (newexpr);
1026 if (result_id != 0)
1027 {
1028 pool_free (pre_expr_pool, newexpr);
1029 newexpr = expression_for_id (result_id);
1030 return newexpr;
1031 }
1032 value_id = get_or_alloc_constant_value_id (constant); 1074 value_id = get_or_alloc_constant_value_id (constant);
1033 get_or_alloc_expression_id (newexpr);
1034 add_to_value (value_id, newexpr); 1075 add_to_value (value_id, newexpr);
1035 return newexpr; 1076 return newexpr;
1036 } 1077 }
1037 1078
1038 /* Given a value id V, find the actual tree representing the constant 1079 /* Given a value id V, find the actual tree representing the constant
1188 } 1229 }
1189 } 1230 }
1190 case REFERENCE: 1231 case REFERENCE:
1191 { 1232 {
1192 vn_reference_t ref = PRE_EXPR_REFERENCE (e); 1233 vn_reference_t ref = PRE_EXPR_REFERENCE (e);
1193 VEC (vn_reference_op_s, heap) *operands = ref->operands; 1234 tree folded;
1194 vn_reference_op_t op; 1235 if ((folded = fully_constant_vn_reference_p (ref)))
1195 1236 return get_or_alloc_expr_for_constant (folded);
1196 /* Try to simplify the translated expression if it is 1237 return e;
1197 a call to a builtin function with at most two arguments. */ 1238 }
1198 op = VEC_index (vn_reference_op_s, operands, 0);
1199 if (op->opcode == CALL_EXPR
1200 && TREE_CODE (op->op0) == ADDR_EXPR
1201 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1202 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1203 && VEC_length (vn_reference_op_s, operands) >= 2
1204 && VEC_length (vn_reference_op_s, operands) <= 3)
1205 {
1206 vn_reference_op_t arg0, arg1 = NULL;
1207 bool anyconst = false;
1208 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1209 if (VEC_length (vn_reference_op_s, operands) > 2)
1210 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1211 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1212 || (arg0->opcode == ADDR_EXPR
1213 && is_gimple_min_invariant (arg0->op0)))
1214 anyconst = true;
1215 if (arg1
1216 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1217 || (arg1->opcode == ADDR_EXPR
1218 && is_gimple_min_invariant (arg1->op0))))
1219 anyconst = true;
1220 if (anyconst)
1221 {
1222 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1223 arg1 ? 2 : 1,
1224 arg0->op0,
1225 arg1 ? arg1->op0 : NULL);
1226 if (folded
1227 && TREE_CODE (folded) == NOP_EXPR)
1228 folded = TREE_OPERAND (folded, 0);
1229 if (folded
1230 && is_gimple_min_invariant (folded))
1231 return get_or_alloc_expr_for_constant (folded);
1232 }
1233 }
1234 return e;
1235 }
1236 default: 1239 default:
1237 return e; 1240 return e;
1238 } 1241 }
1239 return e; 1242 return e;
1240 } 1243 }
1402 1405
1403 /* Build and insert the assignment of the end result to the temporary 1406 /* Build and insert the assignment of the end result to the temporary
1404 that we will return. */ 1407 that we will return. */
1405 if (!pretemp || exprtype != TREE_TYPE (pretemp)) 1408 if (!pretemp || exprtype != TREE_TYPE (pretemp))
1406 { 1409 {
1407 pretemp = create_tmp_var (exprtype, "pretmp"); 1410 pretemp = create_tmp_reg (exprtype, "pretmp");
1408 get_var_ann (pretemp); 1411 get_var_ann (pretemp);
1409 } 1412 }
1410 1413
1411 name = make_ssa_name (pretemp, gimple_build_nop ()); 1414 name = make_ssa_name (pretemp, gimple_build_nop ());
1412 VN_INFO_GET (name)->value_id = value_id; 1415 VN_INFO_GET (name)->value_id = value_id;
1428 return name; 1431 return name;
1429 } 1432 }
1430 1433
1431 1434
1432 1435
1436 static pre_expr
1437 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1438 basic_block pred, basic_block phiblock);
1433 1439
1434 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of 1440 /* Translate EXPR using phis in PHIBLOCK, so that it has the values of
1435 the phis in PRED. Return NULL if we can't find a leader for each part 1441 the phis in PRED. Return NULL if we can't find a leader for each part
1436 of the translated expression. */ 1442 of the translated expression. */
1437 1443
1438 static pre_expr 1444 static pre_expr
1439 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2, 1445 phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1440 basic_block pred, basic_block phiblock) 1446 basic_block pred, basic_block phiblock)
1441 { 1447 {
1442 pre_expr oldexpr = expr;
1443 pre_expr phitrans;
1444
1445 if (!expr)
1446 return NULL;
1447
1448 if (value_id_constant_p (get_expr_value_id (expr)))
1449 return expr;
1450
1451 phitrans = phi_trans_lookup (expr, pred);
1452 if (phitrans)
1453 return phitrans;
1454
1455 switch (expr->kind) 1448 switch (expr->kind)
1456 { 1449 {
1457 /* Constants contain no values that need translation. */
1458 case CONSTANT:
1459 return expr;
1460
1461 case NARY: 1450 case NARY:
1462 { 1451 {
1463 unsigned int i; 1452 unsigned int i;
1464 bool changed = false; 1453 bool changed = false;
1465 vn_nary_op_t nary = PRE_EXPR_NARY (expr); 1454 vn_nary_op_t nary = PRE_EXPR_NARY (expr);
1493 } 1482 }
1494 } 1483 }
1495 if (changed) 1484 if (changed)
1496 { 1485 {
1497 pre_expr constant; 1486 pre_expr constant;
1487 unsigned int new_val_id;
1498 1488
1499 tree result = vn_nary_op_lookup_pieces (newnary.length, 1489 tree result = vn_nary_op_lookup_pieces (newnary.length,
1500 newnary.opcode, 1490 newnary.opcode,
1501 newnary.type, 1491 newnary.type,
1502 newnary.op[0], 1492 newnary.op[0],
1503 newnary.op[1], 1493 newnary.op[1],
1504 newnary.op[2], 1494 newnary.op[2],
1505 newnary.op[3], 1495 newnary.op[3],
1506 &nary); 1496 &nary);
1507 unsigned int new_val_id; 1497 if (result && is_gimple_min_invariant (result))
1498 return get_or_alloc_expr_for_constant (result);
1508 1499
1509 expr = (pre_expr) pool_alloc (pre_expr_pool); 1500 expr = (pre_expr) pool_alloc (pre_expr_pool);
1510 expr->kind = NARY; 1501 expr->kind = NARY;
1511 expr->id = 0; 1502 expr->id = 0;
1512 if (result && is_gimple_min_invariant (result))
1513 return get_or_alloc_expr_for_constant (result);
1514
1515
1516 if (nary) 1503 if (nary)
1517 { 1504 {
1518 PRE_EXPR_NARY (expr) = nary; 1505 PRE_EXPR_NARY (expr) = nary;
1519 constant = fully_constant_expression (expr); 1506 constant = fully_constant_expression (expr);
1520 if (constant != expr) 1507 if (constant != expr)
1543 return constant; 1530 return constant;
1544 get_or_alloc_expression_id (expr); 1531 get_or_alloc_expression_id (expr);
1545 } 1532 }
1546 add_to_value (new_val_id, expr); 1533 add_to_value (new_val_id, expr);
1547 } 1534 }
1548 phi_trans_add (oldexpr, expr, pred);
1549 return expr; 1535 return expr;
1550 } 1536 }
1551 break; 1537 break;
1552 1538
1553 case REFERENCE: 1539 case REFERENCE:
1676 1662
1677 tree result = vn_reference_lookup_pieces (newvuse, ref->set, 1663 tree result = vn_reference_lookup_pieces (newvuse, ref->set,
1678 ref->type, 1664 ref->type,
1679 newoperands, 1665 newoperands,
1680 &newref, true); 1666 &newref, true);
1681 if (newref) 1667 if (result)
1682 VEC_free (vn_reference_op_s, heap, newoperands); 1668 VEC_free (vn_reference_op_s, heap, newoperands);
1683 1669
1684 if (result && is_gimple_min_invariant (result)) 1670 if (result && is_gimple_min_invariant (result))
1685 { 1671 {
1686 gcc_assert (!newoperands); 1672 gcc_assert (!newoperands);
1724 get_or_alloc_expression_id (expr); 1710 get_or_alloc_expression_id (expr);
1725 } 1711 }
1726 add_to_value (new_val_id, expr); 1712 add_to_value (new_val_id, expr);
1727 } 1713 }
1728 VEC_free (vn_reference_op_s, heap, newoperands); 1714 VEC_free (vn_reference_op_s, heap, newoperands);
1729 phi_trans_add (oldexpr, expr, pred);
1730 return expr; 1715 return expr;
1731 } 1716 }
1732 break; 1717 break;
1733 1718
1734 case NAME: 1719 case NAME:
1770 default: 1755 default:
1771 gcc_unreachable (); 1756 gcc_unreachable ();
1772 } 1757 }
1773 } 1758 }
1774 1759
1760 /* Wrapper around phi_translate_1 providing caching functionality. */
1761
1762 static pre_expr
1763 phi_translate (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
1764 basic_block pred, basic_block phiblock)
1765 {
1766 pre_expr phitrans;
1767
1768 if (!expr)
1769 return NULL;
1770
1771 /* Constants contain no values that need translation. */
1772 if (expr->kind == CONSTANT)
1773 return expr;
1774
1775 if (value_id_constant_p (get_expr_value_id (expr)))
1776 return expr;
1777
1778 if (expr->kind != NAME)
1779 {
1780 phitrans = phi_trans_lookup (expr, pred);
1781 if (phitrans)
1782 return phitrans;
1783 }
1784
1785 /* Translate. */
1786 phitrans = phi_translate_1 (expr, set1, set2, pred, phiblock);
1787
1788 /* Don't add empty translations to the cache. Neither add
1789 translations of NAMEs as those are cheap to translate. */
1790 if (phitrans
1791 && expr->kind != NAME)
1792 phi_trans_add (expr, phitrans, pred);
1793
1794 return phitrans;
1795 }
1796
1797
1775 /* For each expression in SET, translate the values through phi nodes 1798 /* For each expression in SET, translate the values through phi nodes
1776 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting 1799 in PHIBLOCK using edge PHIBLOCK->PRED, and store the resulting
1777 expressions in DEST. */ 1800 expressions in DEST. */
1778 1801
1779 static void 1802 static void
1782 { 1805 {
1783 VEC (pre_expr, heap) *exprs; 1806 VEC (pre_expr, heap) *exprs;
1784 pre_expr expr; 1807 pre_expr expr;
1785 int i; 1808 int i;
1786 1809
1787 if (!phi_nodes (phiblock)) 1810 if (gimple_seq_empty_p (phi_nodes (phiblock)))
1788 { 1811 {
1789 bitmap_set_copy (dest, set); 1812 bitmap_set_copy (dest, set);
1790 return; 1813 return;
1791 } 1814 }
1792 1815
1793 exprs = sorted_array_from_bitmap_set (set); 1816 exprs = sorted_array_from_bitmap_set (set);
1794 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++) 1817 for (i = 0; VEC_iterate (pre_expr, exprs, i, expr); i++)
1795 { 1818 {
1796 pre_expr translated; 1819 pre_expr translated;
1797 translated = phi_translate (expr, set, NULL, pred, phiblock); 1820 translated = phi_translate (expr, set, NULL, pred, phiblock);
1798 1821 if (!translated)
1799 /* Don't add empty translations to the cache */ 1822 continue;
1800 if (translated) 1823
1801 phi_trans_add (expr, translated, pred); 1824 /* We might end up with multiple expressions from SET being
1802 1825 translated to the same value. In this case we do not want
1803 if (translated != NULL) 1826 to retain the NARY or REFERENCE expression but prefer a NAME
1827 which would be the leader. */
1828 if (translated->kind == NAME)
1829 bitmap_value_replace_in_set (dest, translated);
1830 else
1804 bitmap_value_insert_into_set (dest, translated); 1831 bitmap_value_insert_into_set (dest, translated);
1805 } 1832 }
1806 VEC_free (pre_expr, heap, exprs); 1833 VEC_free (pre_expr, heap, exprs);
1807 } 1834 }
1808 1835
2030 if (!union_contains_value (set1, set2, 2057 if (!union_contains_value (set1, set2,
2031 get_expr_value_id (&temp))) 2058 get_expr_value_id (&temp)))
2032 return false; 2059 return false;
2033 } 2060 }
2034 } 2061 }
2062 /* If the NARY may trap make sure the block does not contain
2063 a possible exit point.
2064 ??? This is overly conservative if we translate AVAIL_OUT
2065 as the available expression might be after the exit point. */
2066 if (BB_MAY_NOTRETURN (block)
2067 && vn_nary_may_trap (nary))
2068 return false;
2035 return true; 2069 return true;
2036 } 2070 }
2037 break; 2071 break;
2038 case REFERENCE: 2072 case REFERENCE:
2039 { 2073 {
2221 changed = true; 2255 changed = true;
2222 VEC_free (basic_block, heap, worklist); 2256 VEC_free (basic_block, heap, worklist);
2223 goto maybe_dump_sets; 2257 goto maybe_dump_sets;
2224 } 2258 }
2225 2259
2226 if (phi_nodes (first)) 2260 if (!gimple_seq_empty_p (phi_nodes (first)))
2227 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first); 2261 phi_translate_set (ANTIC_OUT, ANTIC_IN (first), block, first);
2228 else 2262 else
2229 bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first)); 2263 bitmap_set_copy (ANTIC_OUT, ANTIC_IN (first));
2230 2264
2231 for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++) 2265 for (i = 0; VEC_iterate (basic_block, worklist, i, bprime); i++)
2232 { 2266 {
2233 if (phi_nodes (bprime)) 2267 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2234 { 2268 {
2235 bitmap_set_t tmp = bitmap_set_new (); 2269 bitmap_set_t tmp = bitmap_set_new ();
2236 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime); 2270 phi_translate_set (tmp, ANTIC_IN (bprime), block, bprime);
2237 bitmap_set_and (ANTIC_OUT, tmp); 2271 bitmap_set_and (ANTIC_OUT, tmp);
2238 bitmap_set_free (tmp); 2272 bitmap_set_free (tmp);
2378 bitmap_iterator bi; 2412 bitmap_iterator bi;
2379 2413
2380 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi) 2414 FOR_EACH_EXPR_ID_IN_SET (ANTIC_IN (bprime), i, bi)
2381 bitmap_value_insert_into_set (PA_OUT, 2415 bitmap_value_insert_into_set (PA_OUT,
2382 expression_for_id (i)); 2416 expression_for_id (i));
2383 if (phi_nodes (bprime)) 2417 if (!gimple_seq_empty_p (phi_nodes (bprime)))
2384 { 2418 {
2385 bitmap_set_t pa_in = bitmap_set_new (); 2419 bitmap_set_t pa_in = bitmap_set_new ();
2386 phi_translate_set (pa_in, PA_IN (bprime), block, bprime); 2420 phi_translate_set (pa_in, PA_IN (bprime), block, bprime);
2387 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi) 2421 FOR_EACH_EXPR_ID_IN_SET (pa_in, i, bi)
2388 bitmap_value_insert_into_set (PA_OUT, 2422 bitmap_value_insert_into_set (PA_OUT,
2467 } 2501 }
2468 } 2502 }
2469 2503
2470 BB_VISITED (block) = 0; 2504 BB_VISITED (block) = 0;
2471 BB_DEFERRED (block) = 0; 2505 BB_DEFERRED (block) = 0;
2506
2472 /* While we are here, give empty ANTIC_IN sets to each block. */ 2507 /* While we are here, give empty ANTIC_IN sets to each block. */
2473 ANTIC_IN (block) = bitmap_set_new (); 2508 ANTIC_IN (block) = bitmap_set_new ();
2474 PA_IN (block) = bitmap_set_new (); 2509 PA_IN (block) = bitmap_set_new ();
2475 } 2510 }
2476 2511
2485 { 2520 {
2486 if (dump_file && (dump_flags & TDF_DETAILS)) 2521 if (dump_file && (dump_flags & TDF_DETAILS))
2487 fprintf (dump_file, "Starting iteration %d\n", num_iterations); 2522 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2488 num_iterations++; 2523 num_iterations++;
2489 changed = false; 2524 changed = false;
2490 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++) 2525 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1; i >= 0; i--)
2491 { 2526 {
2492 if (TEST_BIT (changed_blocks, postorder[i])) 2527 if (TEST_BIT (changed_blocks, postorder[i]))
2493 { 2528 {
2494 basic_block block = BASIC_BLOCK (postorder[i]); 2529 basic_block block = BASIC_BLOCK (postorder[i]);
2495 changed |= compute_antic_aux (block, 2530 changed |= compute_antic_aux (block,
2516 { 2551 {
2517 if (dump_file && (dump_flags & TDF_DETAILS)) 2552 if (dump_file && (dump_flags & TDF_DETAILS))
2518 fprintf (dump_file, "Starting iteration %d\n", num_iterations); 2553 fprintf (dump_file, "Starting iteration %d\n", num_iterations);
2519 num_iterations++; 2554 num_iterations++;
2520 changed = false; 2555 changed = false;
2521 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++) 2556 for (i = n_basic_blocks - NUM_FIXED_BLOCKS - 1 ; i >= 0; i--)
2522 { 2557 {
2523 if (TEST_BIT (changed_blocks, postorder[i])) 2558 if (TEST_BIT (changed_blocks, postorder[i]))
2524 { 2559 {
2525 basic_block block = BASIC_BLOCK (postorder[i]); 2560 basic_block block = BASIC_BLOCK (postorder[i]);
2526 changed 2561 changed
2571 2606
2572 2607
2573 /* Inserted expressions are placed onto this worklist, which is used 2608 /* Inserted expressions are placed onto this worklist, which is used
2574 for performing quick dead code elimination of insertions we made 2609 for performing quick dead code elimination of insertions we made
2575 that didn't turn out to be necessary. */ 2610 that didn't turn out to be necessary. */
2576 static VEC(gimple,heap) *inserted_exprs; 2611 static bitmap inserted_exprs;
2577 static bitmap inserted_phi_names;
2578 2612
2579 /* Pool allocated fake store expressions are placed onto this 2613 /* Pool allocated fake store expressions are placed onto this
2580 worklist, which, after performing dead code elimination, is walked 2614 worklist, which, after performing dead code elimination, is walked
2581 to see which expressions need to be put into GC'able memory */ 2615 to see which expressions need to be put into GC'able memory */
2582 static VEC(gimple, heap) *need_creation; 2616 static VEC(gimple, heap) *need_creation;
2594 ++*operand; 2628 ++*operand;
2595 switch (currop->opcode) 2629 switch (currop->opcode)
2596 { 2630 {
2597 case CALL_EXPR: 2631 case CALL_EXPR:
2598 { 2632 {
2599 tree folded, sc = currop->op1; 2633 tree folded, sc = NULL_TREE;
2600 unsigned int nargs = 0; 2634 unsigned int nargs = 0;
2601 tree *args = XNEWVEC (tree, VEC_length (vn_reference_op_s, 2635 tree fn, *args;
2602 ref->operands) - 1); 2636 if (TREE_CODE (currop->op0) == FUNCTION_DECL)
2637 fn = currop->op0;
2638 else
2639 {
2640 pre_expr op0 = get_or_alloc_expr_for (currop->op0);
2641 fn = find_or_generate_expression (block, op0, stmts, domstmt);
2642 if (!fn)
2643 return NULL_TREE;
2644 }
2645 if (currop->op1)
2646 {
2647 pre_expr scexpr = get_or_alloc_expr_for (currop->op1);
2648 sc = find_or_generate_expression (block, scexpr, stmts, domstmt);
2649 if (!sc)
2650 return NULL_TREE;
2651 }
2652 args = XNEWVEC (tree, VEC_length (vn_reference_op_s,
2653 ref->operands) - 1);
2603 while (*operand < VEC_length (vn_reference_op_s, ref->operands)) 2654 while (*operand < VEC_length (vn_reference_op_s, ref->operands))
2604 { 2655 {
2605 args[nargs] = create_component_ref_by_pieces_1 (block, ref, 2656 args[nargs] = create_component_ref_by_pieces_1 (block, ref,
2606 operand, stmts, 2657 operand, stmts,
2607 domstmt); 2658 domstmt);
2659 if (!args[nargs])
2660 {
2661 free (args);
2662 return NULL_TREE;
2663 }
2608 nargs++; 2664 nargs++;
2609 } 2665 }
2610 folded = build_call_array (currop->type, 2666 folded = build_call_array (currop->type,
2611 TREE_CODE (currop->op0) == FUNCTION_DECL 2667 (TREE_CODE (fn) == FUNCTION_DECL
2612 ? build_fold_addr_expr (currop->op0) 2668 ? build_fold_addr_expr (fn) : fn),
2613 : currop->op0,
2614 nargs, args); 2669 nargs, args);
2615 free (args); 2670 free (args);
2616 if (sc) 2671 if (sc)
2617 { 2672 CALL_EXPR_STATIC_CHAIN (folded) = sc;
2618 pre_expr scexpr = get_or_alloc_expr_for (sc);
2619 sc = find_or_generate_expression (block, scexpr, stmts, domstmt);
2620 if (!sc)
2621 return NULL_TREE;
2622 CALL_EXPR_STATIC_CHAIN (folded) = sc;
2623 }
2624 return folded; 2673 return folded;
2625 } 2674 }
2626 break; 2675 break;
2627 case TARGET_MEM_REF: 2676 case TARGET_MEM_REF:
2628 { 2677 {
2742 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt); 2791 genop1 = find_or_generate_expression (block, op1expr, stmts, domstmt);
2743 if (!genop1) 2792 if (!genop1)
2744 return NULL_TREE; 2793 return NULL_TREE;
2745 if (genop2) 2794 if (genop2)
2746 { 2795 {
2747 op2expr = get_or_alloc_expr_for (genop2); 2796 /* Drop zero minimum index. */
2748 genop2 = find_or_generate_expression (block, op2expr, stmts, 2797 if (tree_int_cst_equal (genop2, integer_zero_node))
2749 domstmt); 2798 genop2 = NULL_TREE;
2750 if (!genop2) 2799 else
2751 return NULL_TREE; 2800 {
2801 op2expr = get_or_alloc_expr_for (genop2);
2802 genop2 = find_or_generate_expression (block, op2expr, stmts,
2803 domstmt);
2804 if (!genop2)
2805 return NULL_TREE;
2806 }
2752 } 2807 }
2753 if (genop3) 2808 if (genop3)
2754 { 2809 {
2755 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0)); 2810 tree elmt_type = TREE_TYPE (TREE_TYPE (genop0));
2756 genop3 = size_binop (EXACT_DIV_EXPR, genop3, 2811 /* We can't always put a size in units of the element alignment
2757 size_int (TYPE_ALIGN_UNIT (elmt_type))); 2812 here as the element alignment may be not visible. See
2758 op3expr = get_or_alloc_expr_for (genop3); 2813 PR43783. Simply drop the element size for constant
2759 genop3 = find_or_generate_expression (block, op3expr, stmts, 2814 sizes. */
2760 domstmt); 2815 if (tree_int_cst_equal (genop3, TYPE_SIZE_UNIT (elmt_type)))
2761 if (!genop3) 2816 genop3 = NULL_TREE;
2762 return NULL_TREE; 2817 else
2818 {
2819 genop3 = size_binop (EXACT_DIV_EXPR, genop3,
2820 size_int (TYPE_ALIGN_UNIT (elmt_type)));
2821 op3expr = get_or_alloc_expr_for (genop3);
2822 genop3 = find_or_generate_expression (block, op3expr, stmts,
2823 domstmt);
2824 if (!genop3)
2825 return NULL_TREE;
2826 }
2763 } 2827 }
2764 return build4 (currop->opcode, currop->type, genop0, genop1, 2828 return build4 (currop->opcode, currop->type, genop0, genop1,
2765 genop2, genop3); 2829 genop2, genop3);
2766 } 2830 }
2767 case COMPONENT_REF: 2831 case COMPONENT_REF:
2956 stmts, domstmt); 3020 stmts, domstmt);
2957 tree genop2 = find_or_generate_expression (block, op2, 3021 tree genop2 = find_or_generate_expression (block, op2,
2958 stmts, domstmt); 3022 stmts, domstmt);
2959 if (!genop1 || !genop2) 3023 if (!genop1 || !genop2)
2960 return NULL_TREE; 3024 return NULL_TREE;
2961 genop1 = fold_convert (TREE_TYPE (nary->op[0]),
2962 genop1);
2963 /* Ensure op2 is a sizetype for POINTER_PLUS_EXPR. It 3025 /* Ensure op2 is a sizetype for POINTER_PLUS_EXPR. It
2964 may be a constant with the wrong type. */ 3026 may be a constant with the wrong type. */
2965 if (nary->opcode == POINTER_PLUS_EXPR) 3027 if (nary->opcode == POINTER_PLUS_EXPR)
2966 genop2 = fold_convert (sizetype, genop2); 3028 {
3029 genop1 = fold_convert (nary->type, genop1);
3030 genop2 = fold_convert (sizetype, genop2);
3031 }
2967 else 3032 else
2968 genop2 = fold_convert (TREE_TYPE (nary->op[1]), genop2); 3033 {
3034 genop1 = fold_convert (TREE_TYPE (nary->op[0]), genop1);
3035 genop2 = fold_convert (TREE_TYPE (nary->op[1]), genop2);
3036 }
2969 3037
2970 folded = fold_build2 (nary->opcode, nary->type, 3038 folded = fold_build2 (nary->opcode, nary->type,
2971 genop1, genop2); 3039 genop1, genop2);
2972 } 3040 }
2973 break; 3041 break;
3012 { 3080 {
3013 gimple stmt = gsi_stmt (gsi); 3081 gimple stmt = gsi_stmt (gsi);
3014 tree forcedname = gimple_get_lhs (stmt); 3082 tree forcedname = gimple_get_lhs (stmt);
3015 pre_expr nameexpr; 3083 pre_expr nameexpr;
3016 3084
3017 VEC_safe_push (gimple, heap, inserted_exprs, stmt);
3018 if (TREE_CODE (forcedname) == SSA_NAME) 3085 if (TREE_CODE (forcedname) == SSA_NAME)
3019 { 3086 {
3087 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (forcedname));
3020 VN_INFO_GET (forcedname)->valnum = forcedname; 3088 VN_INFO_GET (forcedname)->valnum = forcedname;
3021 VN_INFO (forcedname)->value_id = get_next_value_id (); 3089 VN_INFO (forcedname)->value_id = get_next_value_id ();
3022 nameexpr = get_or_alloc_expr_for_name (forcedname); 3090 nameexpr = get_or_alloc_expr_for_name (forcedname);
3023 add_to_value (VN_INFO (forcedname)->value_id, nameexpr); 3091 add_to_value (VN_INFO (forcedname)->value_id, nameexpr);
3024 if (!in_fre) 3092 if (!in_fre)
3032 3100
3033 /* Build and insert the assignment of the end result to the temporary 3101 /* Build and insert the assignment of the end result to the temporary
3034 that we will return. */ 3102 that we will return. */
3035 if (!pretemp || exprtype != TREE_TYPE (pretemp)) 3103 if (!pretemp || exprtype != TREE_TYPE (pretemp))
3036 { 3104 {
3037 pretemp = create_tmp_var (exprtype, "pretmp"); 3105 pretemp = create_tmp_reg (exprtype, "pretmp");
3038 get_var_ann (pretemp); 3106 get_var_ann (pretemp);
3039 } 3107 }
3040 3108
3041 temp = pretemp; 3109 temp = pretemp;
3042 add_referenced_var (temp); 3110 add_referenced_var (temp);
3043
3044 if (TREE_CODE (exprtype) == COMPLEX_TYPE
3045 || TREE_CODE (exprtype) == VECTOR_TYPE)
3046 DECL_GIMPLE_REG_P (temp) = 1;
3047 3111
3048 newstmt = gimple_build_assign (temp, folded); 3112 newstmt = gimple_build_assign (temp, folded);
3049 name = make_ssa_name (temp, newstmt); 3113 name = make_ssa_name (temp, newstmt);
3050 gimple_assign_set_lhs (newstmt, name); 3114 gimple_assign_set_lhs (newstmt, name);
3051 gimple_set_plf (newstmt, NECESSARY, false); 3115 gimple_set_plf (newstmt, NECESSARY, false);
3052 3116
3053 gimple_seq_add_stmt (stmts, newstmt); 3117 gimple_seq_add_stmt (stmts, newstmt);
3054 VEC_safe_push (gimple, heap, inserted_exprs, newstmt); 3118 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (name));
3055 3119
3056 /* All the symbols in NEWEXPR should be put into SSA form. */ 3120 /* All the symbols in NEWEXPR should be put into SSA form. */
3057 mark_symbols_for_renaming (newstmt); 3121 mark_symbols_for_renaming (newstmt);
3058 3122
3059 /* Add a value number to the temporary. 3123 /* Add a value number to the temporary.
3183 { 3247 {
3184 if (dump_file && (dump_flags & TDF_DETAILS)) 3248 if (dump_file && (dump_flags & TDF_DETAILS))
3185 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n"); 3249 fprintf (dump_file, "Skipping insertion of phi for partial redundancy: Looks like an induction variable\n");
3186 nophi = true; 3250 nophi = true;
3187 } 3251 }
3188 }
3189
3190 /* Make sure we are not inserting trapping expressions. */
3191 FOR_EACH_EDGE (pred, ei, block->preds)
3192 {
3193 bprime = pred->src;
3194 eprime = avail[bprime->index];
3195 if (eprime->kind == NARY
3196 && vn_nary_may_trap (PRE_EXPR_NARY (eprime)))
3197 return false;
3198 } 3252 }
3199 3253
3200 /* Make the necessary insertions. */ 3254 /* Make the necessary insertions. */
3201 FOR_EACH_EDGE (pred, ei, block->preds) 3255 FOR_EACH_EDGE (pred, ei, block->preds)
3202 { 3256 {
3242 gimple_stmt_iterator gsi; 3296 gimple_stmt_iterator gsi;
3243 gsi = gsi_start (stmts); 3297 gsi = gsi_start (stmts);
3244 for (; !gsi_end_p (gsi); gsi_next (&gsi)) 3298 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3245 { 3299 {
3246 gimple stmt = gsi_stmt (gsi); 3300 gimple stmt = gsi_stmt (gsi);
3247 VEC_safe_push (gimple, heap, inserted_exprs, stmt); 3301 tree lhs = gimple_get_lhs (stmt);
3302 if (TREE_CODE (lhs) == SSA_NAME)
3303 bitmap_set_bit (inserted_exprs,
3304 SSA_NAME_VERSION (lhs));
3248 gimple_set_plf (stmt, NECESSARY, false); 3305 gimple_set_plf (stmt, NECESSARY, false);
3249 } 3306 }
3250 gsi_insert_seq_on_edge (pred, stmts); 3307 gsi_insert_seq_on_edge (pred, stmts);
3251 } 3308 }
3252 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr); 3309 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
3281 gimple_stmt_iterator gsi; 3338 gimple_stmt_iterator gsi;
3282 gsi = gsi_start (stmts); 3339 gsi = gsi_start (stmts);
3283 for (; !gsi_end_p (gsi); gsi_next (&gsi)) 3340 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3284 { 3341 {
3285 gimple stmt = gsi_stmt (gsi); 3342 gimple stmt = gsi_stmt (gsi);
3286 VEC_safe_push (gimple, heap, inserted_exprs, stmt); 3343 tree lhs = gimple_get_lhs (stmt);
3344 if (TREE_CODE (lhs) == SSA_NAME)
3345 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
3287 gimple_set_plf (stmt, NECESSARY, false); 3346 gimple_set_plf (stmt, NECESSARY, false);
3288 } 3347 }
3289 gsi_insert_seq_on_edge (pred, stmts); 3348 gsi_insert_seq_on_edge (pred, stmts);
3290 } 3349 }
3291 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr); 3350 avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
3317 phi = create_phi_node (temp, block); 3376 phi = create_phi_node (temp, block);
3318 3377
3319 gimple_set_plf (phi, NECESSARY, false); 3378 gimple_set_plf (phi, NECESSARY, false);
3320 VN_INFO_GET (gimple_phi_result (phi))->valnum = gimple_phi_result (phi); 3379 VN_INFO_GET (gimple_phi_result (phi))->valnum = gimple_phi_result (phi);
3321 VN_INFO (gimple_phi_result (phi))->value_id = val; 3380 VN_INFO (gimple_phi_result (phi))->value_id = val;
3322 VEC_safe_push (gimple, heap, inserted_exprs, phi); 3381 bitmap_set_bit (inserted_exprs, SSA_NAME_VERSION (gimple_phi_result (phi)));
3323 bitmap_set_bit (inserted_phi_names,
3324 SSA_NAME_VERSION (gimple_phi_result (phi)));
3325 FOR_EACH_EDGE (pred, ei, block->preds) 3382 FOR_EACH_EDGE (pred, ei, block->preds)
3326 { 3383 {
3327 pre_expr ae = avail[pred->src->index]; 3384 pre_expr ae = avail[pred->src->index];
3328 gcc_assert (get_expr_type (ae) == type 3385 gcc_assert (get_expr_type (ae) == type
3329 || useless_type_conversion_p (type, get_expr_type (ae))); 3386 || useless_type_conversion_p (type, get_expr_type (ae)));
3802 3859
3803 /* Generate values for PHI nodes. */ 3860 /* Generate values for PHI nodes. */
3804 for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi)) 3861 for (gsi = gsi_start_phis (block); !gsi_end_p (gsi); gsi_next (&gsi))
3805 make_values_for_phi (gsi_stmt (gsi), block); 3862 make_values_for_phi (gsi_stmt (gsi), block);
3806 3863
3864 BB_MAY_NOTRETURN (block) = 0;
3865
3807 /* Now compute value numbers and populate value sets with all 3866 /* Now compute value numbers and populate value sets with all
3808 the expressions computed in BLOCK. */ 3867 the expressions computed in BLOCK. */
3809 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi)) 3868 for (gsi = gsi_start_bb (block); !gsi_end_p (gsi); gsi_next (&gsi))
3810 { 3869 {
3811 ssa_op_iter iter; 3870 ssa_op_iter iter;
3812 tree op; 3871 tree op;
3813 3872
3814 stmt = gsi_stmt (gsi); 3873 stmt = gsi_stmt (gsi);
3815 gimple_set_uid (stmt, stmt_uid++); 3874 gimple_set_uid (stmt, stmt_uid++);
3875
3876 /* Cache whether the basic-block has any non-visible side-effect
3877 or control flow.
3878 If this isn't a call or it is the last stmt in the
3879 basic-block then the CFG represents things correctly. */
3880 if (is_gimple_call (stmt)
3881 && !stmt_ends_bb_p (stmt))
3882 {
3883 /* Non-looping const functions always return normally.
3884 Otherwise the call might not return or have side-effects
3885 that forbids hoisting possibly trapping expressions
3886 before it. */
3887 int flags = gimple_call_flags (stmt);
3888 if (!(flags & ECF_CONST)
3889 || (flags & ECF_LOOPING_CONST_OR_PURE))
3890 BB_MAY_NOTRETURN (block) = 1;
3891 }
3816 3892
3817 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF) 3893 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
3818 { 3894 {
3819 pre_expr e = get_or_alloc_expr_for_name (op); 3895 pre_expr e = get_or_alloc_expr_for_name (op);
3820 3896
4233 4309
4234 /* We want to perform redundant PHI elimination. Do so by 4310 /* We want to perform redundant PHI elimination. Do so by
4235 replacing the PHI with a single copy if possible. 4311 replacing the PHI with a single copy if possible.
4236 Do not touch inserted, single-argument or virtual PHIs. */ 4312 Do not touch inserted, single-argument or virtual PHIs. */
4237 if (gimple_phi_num_args (phi) == 1 4313 if (gimple_phi_num_args (phi) == 1
4238 || !is_gimple_reg (res) 4314 || !is_gimple_reg (res))
4239 || bitmap_bit_p (inserted_phi_names, SSA_NAME_VERSION (res)))
4240 { 4315 {
4241 gsi_next (&gsi); 4316 gsi_next (&gsi);
4242 continue; 4317 continue;
4243 } 4318 }
4244 4319
4270 fprintf (dump_file, "\n"); 4345 fprintf (dump_file, "\n");
4271 } 4346 }
4272 4347
4273 remove_phi_node (&gsi, false); 4348 remove_phi_node (&gsi, false);
4274 4349
4350 if (!bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res))
4351 && TREE_CODE (sprime) == SSA_NAME)
4352 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), NECESSARY, true);
4353
4275 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime))) 4354 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
4276 sprime = fold_convert (TREE_TYPE (res), sprime); 4355 sprime = fold_convert (TREE_TYPE (res), sprime);
4277 stmt = gimple_build_assign (res, sprime); 4356 stmt = gimple_build_assign (res, sprime);
4278 SSA_NAME_DEF_STMT (res) = stmt; 4357 SSA_NAME_DEF_STMT (res) = stmt;
4279 if (TREE_CODE (sprime) == SSA_NAME) 4358 gimple_set_plf (stmt, NECESSARY, gimple_plf (phi, NECESSARY));
4280 gimple_set_plf (SSA_NAME_DEF_STMT (sprime), 4359
4281 NECESSARY, true);
4282 gsi2 = gsi_after_labels (b); 4360 gsi2 = gsi_after_labels (b);
4283 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT); 4361 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
4284 /* Queue the copy for eventual removal. */ 4362 /* Queue the copy for eventual removal. */
4285 VEC_safe_push (gimple, heap, to_remove, stmt); 4363 VEC_safe_push (gimple, heap, to_remove, stmt);
4286 pre_stats.eliminations++; 4364 /* If we inserted this PHI node ourself, it's not an elimination. */
4365 if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
4366 pre_stats.phis--;
4367 else
4368 pre_stats.eliminations++;
4287 } 4369 }
4288 } 4370 }
4289 4371
4290 /* We cannot remove stmts during BB walk, especially not release SSA 4372 /* We cannot remove stmts during BB walk, especially not release SSA
4291 names there as this confuses the VN machinery. The stmts ending 4373 names there as this confuses the VN machinery. The stmts ending
4292 up in to_remove are either stores or simple copies. */ 4374 up in to_remove are either stores or simple copies. */
4293 for (i = 0; VEC_iterate (gimple, to_remove, i, stmt); ++i) 4375 for (i = 0; VEC_iterate (gimple, to_remove, i, stmt); ++i)
4294 { 4376 {
4295 tree lhs = gimple_assign_lhs (stmt); 4377 tree lhs = gimple_assign_lhs (stmt);
4378 tree rhs = gimple_assign_rhs1 (stmt);
4296 use_operand_p use_p; 4379 use_operand_p use_p;
4297 gimple use_stmt; 4380 gimple use_stmt;
4298 4381
4299 /* If there is a single use only, propagate the equivalency 4382 /* If there is a single use only, propagate the equivalency
4300 instead of keeping the copy. */ 4383 instead of keeping the copy. */
4301 if (TREE_CODE (lhs) == SSA_NAME 4384 if (TREE_CODE (lhs) == SSA_NAME
4385 && TREE_CODE (rhs) == SSA_NAME
4302 && single_imm_use (lhs, &use_p, &use_stmt) 4386 && single_imm_use (lhs, &use_p, &use_stmt)
4303 && may_propagate_copy (USE_FROM_PTR (use_p), 4387 && may_propagate_copy (USE_FROM_PTR (use_p), rhs))
4304 gimple_assign_rhs1 (stmt)))
4305 { 4388 {
4306 SET_USE (use_p, gimple_assign_rhs1 (stmt)); 4389 SET_USE (use_p, rhs);
4307 update_stmt (use_stmt); 4390 update_stmt (use_stmt);
4391 if (bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (lhs))
4392 && TREE_CODE (rhs) == SSA_NAME)
4393 gimple_set_plf (SSA_NAME_DEF_STMT (rhs), NECESSARY, true);
4308 } 4394 }
4309 4395
4310 /* If this is a store or a now unused copy, remove it. */ 4396 /* If this is a store or a now unused copy, remove it. */
4311 if (TREE_CODE (lhs) != SSA_NAME 4397 if (TREE_CODE (lhs) != SSA_NAME
4312 || has_zero_uses (lhs)) 4398 || has_zero_uses (lhs))
4313 { 4399 {
4314 gsi = gsi_for_stmt (stmt); 4400 gsi = gsi_for_stmt (stmt);
4315 unlink_stmt_vdef (stmt); 4401 unlink_stmt_vdef (stmt);
4316 gsi_remove (&gsi, true); 4402 gsi_remove (&gsi, true);
4403 if (TREE_CODE (lhs) == SSA_NAME)
4404 bitmap_clear_bit (inserted_exprs, SSA_NAME_VERSION (lhs));
4317 release_defs (stmt); 4405 release_defs (stmt);
4318 } 4406 }
4319 } 4407 }
4320 VEC_free (gimple, heap, to_remove); 4408 VEC_free (gimple, heap, to_remove);
4321 4409
4357 pass removes any insertions we made that weren't actually used. */ 4445 pass removes any insertions we made that weren't actually used. */
4358 4446
4359 static void 4447 static void
4360 remove_dead_inserted_code (void) 4448 remove_dead_inserted_code (void)
4361 { 4449 {
4362 VEC(gimple,heap) *worklist = NULL; 4450 bitmap worklist;
4363 int i; 4451 unsigned i;
4452 bitmap_iterator bi;
4364 gimple t; 4453 gimple t;
4365 4454
4366 worklist = VEC_alloc (gimple, heap, VEC_length (gimple, inserted_exprs)); 4455 worklist = BITMAP_ALLOC (NULL);
4367 for (i = 0; VEC_iterate (gimple, inserted_exprs, i, t); i++) 4456 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4368 { 4457 {
4458 t = SSA_NAME_DEF_STMT (ssa_name (i));
4369 if (gimple_plf (t, NECESSARY)) 4459 if (gimple_plf (t, NECESSARY))
4370 VEC_quick_push (gimple, worklist, t); 4460 bitmap_set_bit (worklist, i);
4371 } 4461 }
4372 while (VEC_length (gimple, worklist) > 0) 4462 while (!bitmap_empty_p (worklist))
4373 { 4463 {
4374 t = VEC_pop (gimple, worklist); 4464 i = bitmap_first_set_bit (worklist);
4465 bitmap_clear_bit (worklist, i);
4466 t = SSA_NAME_DEF_STMT (ssa_name (i));
4375 4467
4376 /* PHI nodes are somewhat special in that each PHI alternative has 4468 /* PHI nodes are somewhat special in that each PHI alternative has
4377 data and control dependencies. All the statements feeding the 4469 data and control dependencies. All the statements feeding the
4378 PHI node's arguments are always necessary. */ 4470 PHI node's arguments are always necessary. */
4379 if (gimple_code (t) == GIMPLE_PHI) 4471 if (gimple_code (t) == GIMPLE_PHI)
4380 { 4472 {
4381 unsigned k; 4473 unsigned k;
4382 4474
4383 VEC_reserve (gimple, heap, worklist, gimple_phi_num_args (t));
4384 for (k = 0; k < gimple_phi_num_args (t); k++) 4475 for (k = 0; k < gimple_phi_num_args (t); k++)
4385 { 4476 {
4386 tree arg = PHI_ARG_DEF (t, k); 4477 tree arg = PHI_ARG_DEF (t, k);
4387 if (TREE_CODE (arg) == SSA_NAME) 4478 if (TREE_CODE (arg) == SSA_NAME)
4388 { 4479 {
4389 gimple n = mark_operand_necessary (arg); 4480 gimple n = mark_operand_necessary (arg);
4390 if (n) 4481 if (n)
4391 VEC_quick_push (gimple, worklist, n); 4482 bitmap_set_bit (worklist, SSA_NAME_VERSION (arg));
4392 } 4483 }
4393 } 4484 }
4394 } 4485 }
4395 else 4486 else
4396 { 4487 {
4407 4498
4408 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES) 4499 FOR_EACH_SSA_TREE_OPERAND (use, t, iter, SSA_OP_ALL_USES)
4409 { 4500 {
4410 gimple n = mark_operand_necessary (use); 4501 gimple n = mark_operand_necessary (use);
4411 if (n) 4502 if (n)
4412 VEC_safe_push (gimple, heap, worklist, n); 4503 bitmap_set_bit (worklist, SSA_NAME_VERSION (use));
4413 } 4504 }
4414 } 4505 }
4415 } 4506 }
4416 4507
4417 for (i = 0; VEC_iterate (gimple, inserted_exprs, i, t); i++) 4508 EXECUTE_IF_SET_IN_BITMAP (inserted_exprs, 0, i, bi)
4418 { 4509 {
4510 t = SSA_NAME_DEF_STMT (ssa_name (i));
4419 if (!gimple_plf (t, NECESSARY)) 4511 if (!gimple_plf (t, NECESSARY))
4420 { 4512 {
4421 gimple_stmt_iterator gsi; 4513 gimple_stmt_iterator gsi;
4422 4514
4423 if (dump_file && (dump_flags & TDF_DETAILS)) 4515 if (dump_file && (dump_flags & TDF_DETAILS))
4434 gsi_remove (&gsi, true); 4526 gsi_remove (&gsi, true);
4435 release_defs (t); 4527 release_defs (t);
4436 } 4528 }
4437 } 4529 }
4438 } 4530 }
4439 VEC_free (gimple, heap, worklist); 4531 BITMAP_FREE (worklist);
4440 } 4532 }
4533
4534 /* Compute a reverse post-order in *POST_ORDER. If INCLUDE_ENTRY_EXIT is
4535 true, then then ENTRY_BLOCK and EXIT_BLOCK are included. Returns
4536 the number of visited blocks. */
4537
4538 static int
4539 my_rev_post_order_compute (int *post_order, bool include_entry_exit)
4540 {
4541 edge_iterator *stack;
4542 int sp;
4543 int post_order_num = 0;
4544 sbitmap visited;
4545
4546 if (include_entry_exit)
4547 post_order[post_order_num++] = EXIT_BLOCK;
4548
4549 /* Allocate stack for back-tracking up CFG. */
4550 stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
4551 sp = 0;
4552
4553 /* Allocate bitmap to track nodes that have been visited. */
4554 visited = sbitmap_alloc (last_basic_block);
4555
4556 /* None of the nodes in the CFG have been visited yet. */
4557 sbitmap_zero (visited);
4558
4559 /* Push the last edge on to the stack. */
4560 stack[sp++] = ei_start (EXIT_BLOCK_PTR->preds);
4561
4562 while (sp)
4563 {
4564 edge_iterator ei;
4565 basic_block src;
4566 basic_block dest;
4567
4568 /* Look at the edge on the top of the stack. */
4569 ei = stack[sp - 1];
4570 src = ei_edge (ei)->src;
4571 dest = ei_edge (ei)->dest;
4572
4573 /* Check if the edge destination has been visited yet. */
4574 if (src != ENTRY_BLOCK_PTR && ! TEST_BIT (visited, src->index))
4575 {
4576 /* Mark that we have visited the destination. */
4577 SET_BIT (visited, src->index);
4578
4579 if (EDGE_COUNT (src->preds) > 0)
4580 /* Since the DEST node has been visited for the first
4581 time, check its successors. */
4582 stack[sp++] = ei_start (src->preds);
4583 else
4584 post_order[post_order_num++] = src->index;
4585 }
4586 else
4587 {
4588 if (ei_one_before_end_p (ei) && dest != EXIT_BLOCK_PTR)
4589 post_order[post_order_num++] = dest->index;
4590
4591 if (!ei_one_before_end_p (ei))
4592 ei_next (&stack[sp - 1]);
4593 else
4594 sp--;
4595 }
4596 }
4597
4598 if (include_entry_exit)
4599 post_order[post_order_num++] = ENTRY_BLOCK;
4600
4601 free (stack);
4602 sbitmap_free (visited);
4603 return post_order_num;
4604 }
4605
4441 4606
4442 /* Initialize data structures used by PRE. */ 4607 /* Initialize data structures used by PRE. */
4443 4608
4444 static void 4609 static void
4445 init_pre (bool do_fre) 4610 init_pre (bool do_fre)
4450 expressions = NULL; 4615 expressions = NULL;
4451 VEC_safe_push (pre_expr, heap, expressions, NULL); 4616 VEC_safe_push (pre_expr, heap, expressions, NULL);
4452 value_expressions = VEC_alloc (bitmap_set_t, heap, get_max_value_id () + 1); 4617 value_expressions = VEC_alloc (bitmap_set_t, heap, get_max_value_id () + 1);
4453 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions, 4618 VEC_safe_grow_cleared (bitmap_set_t, heap, value_expressions,
4454 get_max_value_id() + 1); 4619 get_max_value_id() + 1);
4620 name_to_id = NULL;
4455 4621
4456 in_fre = do_fre; 4622 in_fre = do_fre;
4457 4623
4458 inserted_exprs = NULL; 4624 inserted_exprs = BITMAP_ALLOC (NULL);
4459 need_creation = NULL; 4625 need_creation = NULL;
4460 pretemp = NULL_TREE; 4626 pretemp = NULL_TREE;
4461 storetemp = NULL_TREE; 4627 storetemp = NULL_TREE;
4462 prephitemp = NULL_TREE; 4628 prephitemp = NULL_TREE;
4463 4629
4464 connect_infinite_loops_to_exit (); 4630 connect_infinite_loops_to_exit ();
4465 memset (&pre_stats, 0, sizeof (pre_stats)); 4631 memset (&pre_stats, 0, sizeof (pre_stats));
4466 4632
4467 4633
4468 postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS); 4634 postorder = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
4469 post_order_compute (postorder, false, false); 4635 my_rev_post_order_compute (postorder, false);
4470 4636
4471 FOR_ALL_BB (bb) 4637 FOR_ALL_BB (bb)
4472 bb->aux = XCNEWVEC (struct bb_bitmap_sets, 1); 4638 bb->aux = XCNEWVEC (struct bb_bitmap_sets, 1);
4473 4639
4474 calculate_dominance_info (CDI_POST_DOMINATORS); 4640 calculate_dominance_info (CDI_POST_DOMINATORS);
4475 calculate_dominance_info (CDI_DOMINATORS); 4641 calculate_dominance_info (CDI_DOMINATORS);
4476 4642
4477 bitmap_obstack_initialize (&grand_bitmap_obstack); 4643 bitmap_obstack_initialize (&grand_bitmap_obstack);
4478 inserted_phi_names = BITMAP_ALLOC (&grand_bitmap_obstack);
4479 phi_translate_table = htab_create (5110, expr_pred_trans_hash, 4644 phi_translate_table = htab_create (5110, expr_pred_trans_hash,
4480 expr_pred_trans_eq, free); 4645 expr_pred_trans_eq, free);
4481 expression_to_id = htab_create (num_ssa_names * 3, 4646 expression_to_id = htab_create (num_ssa_names * 3,
4482 pre_expr_hash, 4647 pre_expr_hash,
4483 pre_expr_eq, NULL); 4648 pre_expr_eq, NULL);
4504 { 4669 {
4505 basic_block bb; 4670 basic_block bb;
4506 4671
4507 free (postorder); 4672 free (postorder);
4508 VEC_free (bitmap_set_t, heap, value_expressions); 4673 VEC_free (bitmap_set_t, heap, value_expressions);
4509 VEC_free (gimple, heap, inserted_exprs); 4674 BITMAP_FREE (inserted_exprs);
4510 VEC_free (gimple, heap, need_creation); 4675 VEC_free (gimple, heap, need_creation);
4511 bitmap_obstack_release (&grand_bitmap_obstack); 4676 bitmap_obstack_release (&grand_bitmap_obstack);
4512 free_alloc_pool (bitmap_set_pool); 4677 free_alloc_pool (bitmap_set_pool);
4513 free_alloc_pool (pre_expr_pool); 4678 free_alloc_pool (pre_expr_pool);
4514 htab_delete (phi_translate_table); 4679 htab_delete (phi_translate_table);
4515 htab_delete (expression_to_id); 4680 htab_delete (expression_to_id);
4681 VEC_free (unsigned, heap, name_to_id);
4516 4682
4517 FOR_ALL_BB (bb) 4683 FOR_ALL_BB (bb)
4518 { 4684 {
4519 free (bb->aux); 4685 free (bb->aux);
4520 bb->aux = NULL; 4686 bb->aux = NULL;
4550 loop_optimizer_init (LOOPS_NORMAL); 4716 loop_optimizer_init (LOOPS_NORMAL);
4551 4717
4552 if (!run_scc_vn (do_fre)) 4718 if (!run_scc_vn (do_fre))
4553 { 4719 {
4554 if (!do_fre) 4720 if (!do_fre)
4555 { 4721 loop_optimizer_finalize ();
4556 remove_dead_inserted_code ();
4557 loop_optimizer_finalize ();
4558 }
4559 4722
4560 return 0; 4723 return 0;
4561 } 4724 }
4725
4562 init_pre (do_fre); 4726 init_pre (do_fre);
4563 scev_initialize (); 4727 scev_initialize ();
4564
4565 4728
4566 /* Collect and value number expressions computed in each basic block. */ 4729 /* Collect and value number expressions computed in each basic block. */
4567 compute_avail (); 4730 compute_avail ();
4568 4731
4569 if (dump_file && (dump_flags & TDF_DETAILS)) 4732 if (dump_file && (dump_flags & TDF_DETAILS))
4588 { 4751 {
4589 compute_antic (); 4752 compute_antic ();
4590 insert (); 4753 insert ();
4591 } 4754 }
4592 4755
4756 /* Make sure to remove fake edges before committing our inserts.
4757 This makes sure we don't end up with extra critical edges that
4758 we would need to split. */
4759 remove_fake_exit_edges ();
4760 gsi_commit_edge_inserts ();
4761
4593 /* Remove all the redundant expressions. */ 4762 /* Remove all the redundant expressions. */
4594 todo |= eliminate (); 4763 todo |= eliminate ();
4595 4764
4596 statistics_counter_event (cfun, "Insertions", pre_stats.insertions); 4765 statistics_counter_event (cfun, "Insertions", pre_stats.insertions);
4597 statistics_counter_event (cfun, "PA inserted", pre_stats.pa_insert); 4766 statistics_counter_event (cfun, "PA inserted", pre_stats.pa_insert);
4598 statistics_counter_event (cfun, "New PHIs", pre_stats.phis); 4767 statistics_counter_event (cfun, "New PHIs", pre_stats.phis);
4599 statistics_counter_event (cfun, "Eliminated", pre_stats.eliminations); 4768 statistics_counter_event (cfun, "Eliminated", pre_stats.eliminations);
4600 statistics_counter_event (cfun, "Constified", pre_stats.constified); 4769 statistics_counter_event (cfun, "Constified", pre_stats.constified);
4601
4602 /* Make sure to remove fake edges before committing our inserts.
4603 This makes sure we don't end up with extra critical edges that
4604 we would need to split. */
4605 remove_fake_exit_edges ();
4606 gsi_commit_edge_inserts ();
4607 4770
4608 clear_expression_ids (); 4771 clear_expression_ids ();
4609 free_scc_vn (); 4772 free_scc_vn ();
4610 if (!do_fre) 4773 if (!do_fre)
4611 remove_dead_inserted_code (); 4774 remove_dead_inserted_code ();