comparison gcc/tree-ssa-reassoc.c @ 131:84e7813d76e9

gcc-8.2
author mir3636
date Thu, 25 Oct 2018 07:37:49 +0900
parents 04ced10e8804
children 1830386684a0
comparison
equal deleted inserted replaced
111:04ced10e8804 131:84e7813d76e9
1 /* Reassociation for trees. 1 /* Reassociation for trees.
2 Copyright (C) 2005-2017 Free Software Foundation, Inc. 2 Copyright (C) 2005-2018 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org> 3 Contributed by Daniel Berlin <dan@dberlin.org>
4 4
5 This file is part of GCC. 5 This file is part of GCC.
6 6
7 GCC is free software; you can redistribute it and/or modify 7 GCC is free software; you can redistribute it and/or modify
230 bool 230 bool
231 reassoc_remove_stmt (gimple_stmt_iterator *gsi) 231 reassoc_remove_stmt (gimple_stmt_iterator *gsi)
232 { 232 {
233 gimple *stmt = gsi_stmt (*gsi); 233 gimple *stmt = gsi_stmt (*gsi);
234 234
235 if (!MAY_HAVE_DEBUG_STMTS || gimple_code (stmt) == GIMPLE_PHI) 235 if (!MAY_HAVE_DEBUG_BIND_STMTS || gimple_code (stmt) == GIMPLE_PHI)
236 return gsi_remove (gsi, true); 236 return gsi_remove (gsi, true);
237 237
238 gimple_stmt_iterator prev = *gsi; 238 gimple_stmt_iterator prev = *gsi;
239 gsi_prev (&prev); 239 gsi_prev (&prev);
240 unsigned uid = gimple_uid (stmt); 240 unsigned uid = gimple_uid (stmt);
468 } 468 }
469 469
470 470
471 /* We want integer ones to end up last no matter what, since they are 471 /* We want integer ones to end up last no matter what, since they are
472 the ones we can do the most with. */ 472 the ones we can do the most with. */
473 #define INTEGER_CONST_TYPE 1 << 3 473 #define INTEGER_CONST_TYPE 1 << 4
474 #define FLOAT_ONE_CONST_TYPE 1 << 3
474 #define FLOAT_CONST_TYPE 1 << 2 475 #define FLOAT_CONST_TYPE 1 << 2
475 #define OTHER_CONST_TYPE 1 << 1 476 #define OTHER_CONST_TYPE 1 << 1
476 477
477 /* Classify an invariant tree into integer, float, or other, so that 478 /* Classify an invariant tree into integer, float, or other, so that
478 we can sort them to be near other constants of the same type. */ 479 we can sort them to be near other constants of the same type. */
480 constant_type (tree t) 481 constant_type (tree t)
481 { 482 {
482 if (INTEGRAL_TYPE_P (TREE_TYPE (t))) 483 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
483 return INTEGER_CONST_TYPE; 484 return INTEGER_CONST_TYPE;
484 else if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (t))) 485 else if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (t)))
485 return FLOAT_CONST_TYPE; 486 {
487 /* Sort -1.0 and 1.0 constants last, while in some cases
488 const_binop can't optimize some inexact operations, multiplication
489 by -1.0 or 1.0 can be always merged with others. */
490 if (real_onep (t) || real_minus_onep (t))
491 return FLOAT_ONE_CONST_TYPE;
492 return FLOAT_CONST_TYPE;
493 }
486 else 494 else
487 return OTHER_CONST_TYPE; 495 return OTHER_CONST_TYPE;
488 } 496 }
489 497
490 /* qsort comparison function to sort operand entries PA and PB by rank 498 /* qsort comparison function to sort operand entries PA and PB by rank
502 to fold when added/multiplied/whatever are put next to each 510 to fold when added/multiplied/whatever are put next to each
503 other. Since all constants have rank 0, order them by type. */ 511 other. Since all constants have rank 0, order them by type. */
504 if (oea->rank == 0) 512 if (oea->rank == 0)
505 { 513 {
506 if (constant_type (oeb->op) != constant_type (oea->op)) 514 if (constant_type (oeb->op) != constant_type (oea->op))
507 return constant_type (oeb->op) - constant_type (oea->op); 515 return constant_type (oea->op) - constant_type (oeb->op);
508 else 516 else
509 /* To make sorting result stable, we use unique IDs to determine 517 /* To make sorting result stable, we use unique IDs to determine
510 order. */ 518 order. */
511 return oeb->id > oea->id ? 1 : -1; 519 return oeb->id > oea->id ? 1 : -1;
512 } 520 }
541 return 1; 549 return 1;
542 if (!bbb) 550 if (!bbb)
543 return -1; 551 return -1;
544 /* If neither is, compare bb_rank. */ 552 /* If neither is, compare bb_rank. */
545 if (bb_rank[bbb->index] != bb_rank[bba->index]) 553 if (bb_rank[bbb->index] != bb_rank[bba->index])
546 return bb_rank[bbb->index] - bb_rank[bba->index]; 554 return (bb_rank[bbb->index] >> 16) - (bb_rank[bba->index] >> 16);
547 } 555 }
548 556
549 bool da = reassoc_stmt_dominates_stmt_p (stmta, stmtb); 557 bool da = reassoc_stmt_dominates_stmt_p (stmta, stmtb);
550 bool db = reassoc_stmt_dominates_stmt_p (stmtb, stmta); 558 bool db = reassoc_stmt_dominates_stmt_p (stmtb, stmta);
551 if (da != db) 559 if (da != db)
608 if (is_gimple_assign (stmt) 616 if (is_gimple_assign (stmt)
609 && gimple_assign_rhs_code (stmt) == code 617 && gimple_assign_rhs_code (stmt) == code
610 && has_single_use (gimple_assign_lhs (stmt))) 618 && has_single_use (gimple_assign_lhs (stmt)))
611 { 619 {
612 tree rhs1 = gimple_assign_rhs1 (stmt); 620 tree rhs1 = gimple_assign_rhs1 (stmt);
613 tree rhs2 = gimple_assign_rhs1 (stmt); 621 tree rhs2 = gimple_assign_rhs2 (stmt);
614 if (TREE_CODE (rhs1) == SSA_NAME 622 if (TREE_CODE (rhs1) == SSA_NAME
615 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)) 623 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1))
616 return false; 624 return false;
617 if (rhs2 625 if (rhs2
618 && TREE_CODE (rhs2) == SSA_NAME 626 && TREE_CODE (rhs2) == SSA_NAME
1596 1604
1597 if (dump_file && (dump_flags & TDF_DETAILS)) 1605 if (dump_file && (dump_flags & TDF_DETAILS))
1598 { 1606 {
1599 fprintf (dump_file, "searching for un-distribute opportunities "); 1607 fprintf (dump_file, "searching for un-distribute opportunities ");
1600 print_generic_expr (dump_file, 1608 print_generic_expr (dump_file,
1601 (*ops)[bitmap_first_set_bit (candidates)]->op, 0); 1609 (*ops)[bitmap_first_set_bit (candidates)]->op, TDF_NONE);
1602 fprintf (dump_file, " %d\n", nr_candidates); 1610 fprintf (dump_file, " %d\n", nr_candidates);
1603 } 1611 }
1604 1612
1605 /* Build linearized sub-operand lists and the counting table. */ 1613 /* Build linearized sub-operand lists and the counting table. */
1606 cvec.create (0); 1614 cvec.create (0);
2158 case SSA_NAME: 2166 case SSA_NAME:
2159 exp = arg0; 2167 exp = arg0;
2160 continue; 2168 continue;
2161 CASE_CONVERT: 2169 CASE_CONVERT:
2162 if (is_bool) 2170 if (is_bool)
2163 goto do_default; 2171 {
2164 if (TYPE_PRECISION (TREE_TYPE (arg0)) == 1) 2172 if ((TYPE_PRECISION (exp_type) == 1
2173 || TREE_CODE (exp_type) == BOOLEAN_TYPE)
2174 && TYPE_PRECISION (TREE_TYPE (arg0)) > 1)
2175 return;
2176 }
2177 else if (TYPE_PRECISION (TREE_TYPE (arg0)) == 1)
2165 { 2178 {
2166 if (TYPE_UNSIGNED (TREE_TYPE (arg0))) 2179 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
2167 is_bool = true; 2180 is_bool = true;
2168 else 2181 else
2169 return; 2182 return;
3025 a >= 0 && a <= b into (unsigned) a <= (unsigned) b */ 3038 a >= 0 && a <= b into (unsigned) a <= (unsigned) b */
3026 3039
3027 static bool 3040 static bool
3028 optimize_range_tests_var_bound (enum tree_code opcode, int first, int length, 3041 optimize_range_tests_var_bound (enum tree_code opcode, int first, int length,
3029 vec<operand_entry *> *ops, 3042 vec<operand_entry *> *ops,
3030 struct range_entry *ranges) 3043 struct range_entry *ranges,
3044 basic_block first_bb)
3031 { 3045 {
3032 int i; 3046 int i;
3033 bool any_changes = false; 3047 bool any_changes = false;
3034 hash_map<tree, int> *map = NULL; 3048 hash_map<tree, int> *map = NULL;
3035 3049
3131 gcc_unreachable (); 3145 gcc_unreachable ();
3132 } 3146 }
3133 3147
3134 int *idx = map->get (rhs1); 3148 int *idx = map->get (rhs1);
3135 if (idx == NULL) 3149 if (idx == NULL)
3150 continue;
3151
3152 /* maybe_optimize_range_tests allows statements without side-effects
3153 in the basic blocks as long as they are consumed in the same bb.
3154 Make sure rhs2's def stmt is not among them, otherwise we can't
3155 use safely get_nonzero_bits on it. E.g. in:
3156 # RANGE [-83, 1] NONZERO 173
3157 # k_32 = PHI <k_47(13), k_12(9)>
3158 ...
3159 if (k_32 >= 0)
3160 goto <bb 5>; [26.46%]
3161 else
3162 goto <bb 9>; [73.54%]
3163
3164 <bb 5> [local count: 140323371]:
3165 # RANGE [0, 1] NONZERO 1
3166 _5 = (int) k_32;
3167 # RANGE [0, 4] NONZERO 4
3168 _21 = _5 << 2;
3169 # RANGE [0, 4] NONZERO 4
3170 iftmp.0_44 = (char) _21;
3171 if (k_32 < iftmp.0_44)
3172 goto <bb 6>; [84.48%]
3173 else
3174 goto <bb 9>; [15.52%]
3175 the ranges on _5/_21/iftmp.0_44 are flow sensitive, assume that
3176 k_32 >= 0. If we'd optimize k_32 >= 0 to true and k_32 < iftmp.0_44
3177 to (unsigned) k_32 < (unsigned) iftmp.0_44, then we would execute
3178 those stmts even for negative k_32 and the value ranges would be no
3179 longer guaranteed and so the optimization would be invalid. */
3180 while (opcode == ERROR_MARK)
3181 {
3182 gimple *g = SSA_NAME_DEF_STMT (rhs2);
3183 basic_block bb2 = gimple_bb (g);
3184 if (bb2
3185 && bb2 != first_bb
3186 && dominated_by_p (CDI_DOMINATORS, bb2, first_bb))
3187 {
3188 /* As an exception, handle a few common cases. */
3189 if (gimple_assign_cast_p (g)
3190 && INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (g))))
3191 {
3192 tree op0 = gimple_assign_rhs1 (g);
3193 if (TYPE_UNSIGNED (TREE_TYPE (op0))
3194 && (TYPE_PRECISION (TREE_TYPE (rhs2))
3195 > TYPE_PRECISION (TREE_TYPE (op0))))
3196 /* Zero-extension is always ok. */
3197 break;
3198 else if (TYPE_PRECISION (TREE_TYPE (rhs2))
3199 == TYPE_PRECISION (TREE_TYPE (op0))
3200 && TREE_CODE (op0) == SSA_NAME)
3201 {
3202 /* Cast from signed to unsigned or vice versa. Retry
3203 with the op0 as new rhs2. */
3204 rhs2 = op0;
3205 continue;
3206 }
3207 }
3208 else if (is_gimple_assign (g)
3209 && gimple_assign_rhs_code (g) == BIT_AND_EXPR
3210 && TREE_CODE (gimple_assign_rhs2 (g)) == INTEGER_CST
3211 && !wi::neg_p (wi::to_wide (gimple_assign_rhs2 (g))))
3212 /* Masking with INTEGER_CST with MSB clear is always ok
3213 too. */
3214 break;
3215 rhs2 = NULL_TREE;
3216 }
3217 break;
3218 }
3219 if (rhs2 == NULL_TREE)
3136 continue; 3220 continue;
3137 3221
3138 wide_int nz = get_nonzero_bits (rhs2); 3222 wide_int nz = get_nonzero_bits (rhs2);
3139 if (wi::neg_p (nz)) 3223 if (wi::neg_p (nz))
3140 continue; 3224 continue;
3188 gimple_stmt_iterator gsi = gsi_for_stmt (stmt); 3272 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
3189 gimple *g = gimple_build_assign (make_ssa_name (utype), NOP_EXPR, rhs1); 3273 gimple *g = gimple_build_assign (make_ssa_name (utype), NOP_EXPR, rhs1);
3190 gimple_set_uid (g, uid); 3274 gimple_set_uid (g, uid);
3191 rhs1 = gimple_assign_lhs (g); 3275 rhs1 = gimple_assign_lhs (g);
3192 gsi_insert_before (&gsi, g, GSI_SAME_STMT); 3276 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
3193 g = gimple_build_assign (make_ssa_name (utype), NOP_EXPR, rhs2); 3277 if (!useless_type_conversion_p (utype, TREE_TYPE (rhs2)))
3194 gimple_set_uid (g, uid); 3278 {
3195 rhs2 = gimple_assign_lhs (g); 3279 g = gimple_build_assign (make_ssa_name (utype), NOP_EXPR, rhs2);
3196 gsi_insert_before (&gsi, g, GSI_SAME_STMT); 3280 gimple_set_uid (g, uid);
3281 rhs2 = gimple_assign_lhs (g);
3282 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
3283 }
3197 if (tree_swap_operands_p (rhs1, rhs2)) 3284 if (tree_swap_operands_p (rhs1, rhs2))
3198 { 3285 {
3199 std::swap (rhs1, rhs2); 3286 std::swap (rhs1, rhs2);
3200 ccode = swap_tree_comparison (ccode); 3287 ccode = swap_tree_comparison (ccode);
3201 } 3288 }
3259 operation between all the operands is OPCODE. 3346 operation between all the operands is OPCODE.
3260 If OPCODE is ERROR_MARK, optimize_range_tests is called from within 3347 If OPCODE is ERROR_MARK, optimize_range_tests is called from within
3261 maybe_optimize_range_tests for inter-bb range optimization. 3348 maybe_optimize_range_tests for inter-bb range optimization.
3262 In that case if oe->op is NULL, oe->id is bb->index whose 3349 In that case if oe->op is NULL, oe->id is bb->index whose
3263 GIMPLE_COND is && or ||ed into the test, and oe->rank says 3350 GIMPLE_COND is && or ||ed into the test, and oe->rank says
3264 the actual opcode. */ 3351 the actual opcode.
3352 FIRST_BB is the first basic block if OPCODE is ERROR_MARK. */
3265 3353
3266 static bool 3354 static bool
3267 optimize_range_tests (enum tree_code opcode, 3355 optimize_range_tests (enum tree_code opcode,
3268 vec<operand_entry *> *ops) 3356 vec<operand_entry *> *ops, basic_block first_bb)
3269 { 3357 {
3270 unsigned int length = ops->length (), i, j, first; 3358 unsigned int length = ops->length (), i, j, first;
3271 operand_entry *oe; 3359 operand_entry *oe;
3272 struct range_entry *ranges; 3360 struct range_entry *ranges;
3273 bool any_changes = false; 3361 bool any_changes = false;
3343 any_changes |= optimize_range_tests_to_bit_test (opcode, first, length, 3431 any_changes |= optimize_range_tests_to_bit_test (opcode, first, length,
3344 ops, ranges); 3432 ops, ranges);
3345 any_changes |= optimize_range_tests_cmp_bitwise (opcode, first, length, 3433 any_changes |= optimize_range_tests_cmp_bitwise (opcode, first, length,
3346 ops, ranges); 3434 ops, ranges);
3347 any_changes |= optimize_range_tests_var_bound (opcode, first, length, ops, 3435 any_changes |= optimize_range_tests_var_bound (opcode, first, length, ops,
3348 ranges); 3436 ranges, first_bb);
3349 3437
3350 if (any_changes && opcode != ERROR_MARK) 3438 if (any_changes && opcode != ERROR_MARK)
3351 { 3439 {
3352 j = 0; 3440 j = 0;
3353 FOR_EACH_VEC_ELT (*ops, i, oe) 3441 FOR_EACH_VEC_ELT (*ops, i, oe)
3612 stmt = last_stmt (bb); 3700 stmt = last_stmt (bb);
3613 if (stmt == NULL 3701 if (stmt == NULL
3614 || (gimple_code (stmt) != GIMPLE_COND 3702 || (gimple_code (stmt) != GIMPLE_COND
3615 && (backward || !final_range_test_p (stmt))) 3703 && (backward || !final_range_test_p (stmt)))
3616 || gimple_visited_p (stmt) 3704 || gimple_visited_p (stmt)
3617 || stmt_could_throw_p (stmt) 3705 || stmt_could_throw_p (cfun, stmt)
3618 || *other_bb == bb) 3706 || *other_bb == bb)
3619 return false; 3707 return false;
3620 is_cond = gimple_code (stmt) == GIMPLE_COND; 3708 is_cond = gimple_code (stmt) == GIMPLE_COND;
3621 if (is_cond) 3709 if (is_cond)
3622 { 3710 {
3868 else if (final_range_test_p (stmt)) 3956 else if (final_range_test_p (stmt))
3869 other_bb = single_succ (first_bb); 3957 other_bb = single_succ (first_bb);
3870 else 3958 else
3871 return cfg_cleanup_needed; 3959 return cfg_cleanup_needed;
3872 3960
3873 if (stmt_could_throw_p (stmt)) 3961 if (stmt_could_throw_p (cfun, stmt))
3874 return cfg_cleanup_needed; 3962 return cfg_cleanup_needed;
3875 3963
3876 /* As relative ordering of post-dominator sons isn't fixed, 3964 /* As relative ordering of post-dominator sons isn't fixed,
3877 maybe_optimize_range_tests can be called first on any 3965 maybe_optimize_range_tests can be called first on any
3878 bb in the range we want to optimize. So, start searching 3966 bb in the range we want to optimize. So, start searching
4090 bbinfo.safe_push (bb_ent); 4178 bbinfo.safe_push (bb_ent);
4091 if (bb == first_bb) 4179 if (bb == first_bb)
4092 break; 4180 break;
4093 } 4181 }
4094 if (ops.length () > 1) 4182 if (ops.length () > 1)
4095 any_changes = optimize_range_tests (ERROR_MARK, &ops); 4183 any_changes = optimize_range_tests (ERROR_MARK, &ops, first_bb);
4096 if (any_changes) 4184 if (any_changes)
4097 { 4185 {
4098 unsigned int idx, max_idx = 0; 4186 unsigned int idx, max_idx = 0;
4099 /* update_ops relies on has_single_use predicates returning the 4187 /* update_ops relies on has_single_use predicates returning the
4100 same values as it did during get_ops earlier. Additionally it 4188 same values as it did during get_ops earlier. Additionally it
5019 5107
5020 if (TREE_CODE (binlhs) == SSA_NAME) 5108 if (TREE_CODE (binlhs) == SSA_NAME)
5021 { 5109 {
5022 binlhsdef = SSA_NAME_DEF_STMT (binlhs); 5110 binlhsdef = SSA_NAME_DEF_STMT (binlhs);
5023 binlhsisreassoc = (is_reassociable_op (binlhsdef, rhscode, loop) 5111 binlhsisreassoc = (is_reassociable_op (binlhsdef, rhscode, loop)
5024 && !stmt_could_throw_p (binlhsdef)); 5112 && !stmt_could_throw_p (cfun, binlhsdef));
5025 } 5113 }
5026 5114
5027 if (TREE_CODE (binrhs) == SSA_NAME) 5115 if (TREE_CODE (binrhs) == SSA_NAME)
5028 { 5116 {
5029 binrhsdef = SSA_NAME_DEF_STMT (binrhs); 5117 binrhsdef = SSA_NAME_DEF_STMT (binrhs);
5030 binrhsisreassoc = (is_reassociable_op (binrhsdef, rhscode, loop) 5118 binrhsisreassoc = (is_reassociable_op (binrhsdef, rhscode, loop)
5031 && !stmt_could_throw_p (binrhsdef)); 5119 && !stmt_could_throw_p (cfun, binrhsdef));
5032 } 5120 }
5033 5121
5034 /* If the LHS is not reassociable, but the RHS is, we need to swap 5122 /* If the LHS is not reassociable, but the RHS is, we need to swap
5035 them. If neither is reassociable, there is nothing we can do, so 5123 them. If neither is reassociable, there is nothing we can do, so
5036 just put them in the ops vector. If the LHS is reassociable, 5124 just put them in the ops vector. If the LHS is reassociable,
5623 { 5711 {
5624 tree arg0, arg1; 5712 tree arg0, arg1;
5625 switch (gimple_call_combined_fn (old_call)) 5713 switch (gimple_call_combined_fn (old_call))
5626 { 5714 {
5627 CASE_CFN_COPYSIGN: 5715 CASE_CFN_COPYSIGN:
5716 CASE_CFN_COPYSIGN_FN:
5628 arg0 = gimple_call_arg (old_call, 0); 5717 arg0 = gimple_call_arg (old_call, 0);
5629 arg1 = gimple_call_arg (old_call, 1); 5718 arg1 = gimple_call_arg (old_call, 1);
5630 /* The first argument of copysign must be a constant, 5719 /* The first argument of copysign must be a constant,
5631 otherwise there's nothing to do. */ 5720 otherwise there's nothing to do. */
5632 if (TREE_CODE (arg0) == REAL_CST) 5721 if (TREE_CODE (arg0) == REAL_CST)
5760 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi)) 5849 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
5761 { 5850 {
5762 stmt = gsi_stmt (gsi); 5851 stmt = gsi_stmt (gsi);
5763 5852
5764 if (is_gimple_assign (stmt) 5853 if (is_gimple_assign (stmt)
5765 && !stmt_could_throw_p (stmt)) 5854 && !stmt_could_throw_p (cfun, stmt))
5766 { 5855 {
5767 tree lhs, rhs1, rhs2; 5856 tree lhs, rhs1, rhs2;
5768 enum tree_code rhs_code = gimple_assign_rhs_code (stmt); 5857 enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
5769 5858
5770 /* If this is not a gimple binary expression, there is 5859 /* If this is not a gimple binary expression, there is
5844 if (rhs_code == BIT_IOR_EXPR || rhs_code == BIT_AND_EXPR) 5933 if (rhs_code == BIT_IOR_EXPR || rhs_code == BIT_AND_EXPR)
5845 { 5934 {
5846 if (is_vector) 5935 if (is_vector)
5847 optimize_vec_cond_expr (rhs_code, &ops); 5936 optimize_vec_cond_expr (rhs_code, &ops);
5848 else 5937 else
5849 optimize_range_tests (rhs_code, &ops); 5938 optimize_range_tests (rhs_code, &ops, NULL);
5850 } 5939 }
5851 5940
5852 if (rhs_code == MULT_EXPR && !is_vector) 5941 if (rhs_code == MULT_EXPR && !is_vector)
5853 { 5942 {
5854 attempt_builtin_copysign (&ops); 5943 attempt_builtin_copysign (&ops);
6128 insert_operand_rank (name, ++rank); 6217 insert_operand_rank (name, ++rank);
6129 } 6218 }
6130 6219
6131 /* Set up rank for each BB */ 6220 /* Set up rank for each BB */
6132 for (i = 0; i < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; i++) 6221 for (i = 0; i < n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS; i++)
6133 bb_rank[bbs[i]] = ++rank << 16; 6222 bb_rank[bbs[i]] = ++rank << 16;
6134 6223
6135 free (bbs); 6224 free (bbs);
6136 calculate_dominance_info (CDI_POST_DOMINATORS); 6225 calculate_dominance_info (CDI_POST_DOMINATORS);
6137 plus_negates = vNULL; 6226 plus_negates = vNULL;
6138 } 6227 }