comparison gcc/cfgexpand.c @ 63:b7f97abdc517 gcc-4.6-20100522

update gcc from gcc-4.5.0 to gcc-4.6
author ryoma <e075725@ie.u-ryukyu.ac.jp>
date Mon, 24 May 2010 12:47:05 +0900
parents 77e2b8dfacca
children f6334be47118
comparison
equal deleted inserted replaced
56:3c8a44c06a95 63:b7f97abdc517
1 /* A pass for lowering trees to RTL. 1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009 2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc. 3 Free Software Foundation, Inc.
4 4
5 This file is part of GCC. 5 This file is part of GCC.
6 6
7 GCC is free software; you can redistribute it and/or modify 7 GCC is free software; you can redistribute it and/or modify
34 #include "tree-dump.h" 34 #include "tree-dump.h"
35 #include "tree-pass.h" 35 #include "tree-pass.h"
36 #include "except.h" 36 #include "except.h"
37 #include "flags.h" 37 #include "flags.h"
38 #include "diagnostic.h" 38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
39 #include "toplev.h" 41 #include "toplev.h"
40 #include "debug.h" 42 #include "debug.h"
41 #include "params.h" 43 #include "params.h"
42 #include "tree-inline.h" 44 #include "tree-inline.h"
43 #include "value-prof.h" 45 #include "value-prof.h"
44 #include "target.h" 46 #include "target.h"
45 #include "ssaexpand.h" 47 #include "ssaexpand.h"
46 48 #include "bitmap.h"
49 #include "sbitmap.h"
47 50
48 /* This variable holds information helping the rewriting of SSA trees 51 /* This variable holds information helping the rewriting of SSA trees
49 into RTL. */ 52 into RTL. */
50 struct ssaexpand SA; 53 struct ssaexpand SA;
51 54
75 gimple_assign_rhs1 (stmt)); 78 gimple_assign_rhs1 (stmt));
76 else if (grhs_class == GIMPLE_SINGLE_RHS) 79 else if (grhs_class == GIMPLE_SINGLE_RHS)
77 { 80 {
78 t = gimple_assign_rhs1 (stmt); 81 t = gimple_assign_rhs1 (stmt);
79 /* Avoid modifying this tree in place below. */ 82 /* Avoid modifying this tree in place below. */
80 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t) 83 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
81 && gimple_location (stmt) != EXPR_LOCATION (t)) 84 && gimple_location (stmt) != EXPR_LOCATION (t))
85 || (gimple_block (stmt)
86 && currently_expanding_to_rtl
87 && EXPR_P (t)
88 && gimple_block (stmt) != TREE_BLOCK (t)))
82 t = copy_node (t); 89 t = copy_node (t);
83 } 90 }
84 else 91 else
85 gcc_unreachable (); 92 gcc_unreachable ();
86 93
87 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)) 94 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
88 SET_EXPR_LOCATION (t, gimple_location (stmt)); 95 SET_EXPR_LOCATION (t, gimple_location (stmt));
96 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
97 TREE_BLOCK (t) = gimple_block (stmt);
89 98
90 return t; 99 return t;
91 }
92
93
94 /* Verify that there is exactly single jump instruction since last and attach
95 REG_BR_PROB note specifying probability.
96 ??? We really ought to pass the probability down to RTL expanders and let it
97 re-distribute it when the conditional expands into multiple conditionals.
98 This is however difficult to do. */
99 void
100 add_reg_br_prob_note (rtx last, int probability)
101 {
102 if (profile_status == PROFILE_ABSENT)
103 return;
104 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
105 if (JUMP_P (last))
106 {
107 /* It is common to emit condjump-around-jump sequence when we don't know
108 how to reverse the conditional. Special case this. */
109 if (!any_condjump_p (last)
110 || !JUMP_P (NEXT_INSN (last))
111 || !simplejump_p (NEXT_INSN (last))
112 || !NEXT_INSN (NEXT_INSN (last))
113 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
114 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))
115 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
116 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
117 goto failed;
118 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
119 add_reg_note (last, REG_BR_PROB,
120 GEN_INT (REG_BR_PROB_BASE - probability));
121 return;
122 }
123 if (!last || !JUMP_P (last) || !any_condjump_p (last))
124 goto failed;
125 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
126 add_reg_note (last, REG_BR_PROB, GEN_INT (probability));
127 return;
128 failed:
129 if (dump_file)
130 fprintf (dump_file, "Failed to add probability note\n");
131 } 100 }
132 101
133 102
134 #ifndef STACK_ALIGNMENT_NEEDED 103 #ifndef STACK_ALIGNMENT_NEEDED
135 #define STACK_ALIGNMENT_NEEDED 1 104 #define STACK_ALIGNMENT_NEEDED 1
532 points-to sets later, so use GGC alloc. */ 501 points-to sets later, so use GGC alloc. */
533 part = BITMAP_GGC_ALLOC (); 502 part = BITMAP_GGC_ALLOC ();
534 for (j = i; j != EOC; j = stack_vars[j].next) 503 for (j = i; j != EOC; j = stack_vars[j].next)
535 { 504 {
536 tree decl = stack_vars[j].decl; 505 tree decl = stack_vars[j].decl;
537 unsigned int uid = DECL_UID (decl); 506 unsigned int uid = DECL_PT_UID (decl);
538 /* We should never end up partitioning SSA names (though they 507 /* We should never end up partitioning SSA names (though they
539 may end up on the stack). Neither should we allocate stack 508 may end up on the stack). Neither should we allocate stack
540 space to something that is unused and thus unreferenced. */ 509 space to something that is unused and thus unreferenced. */
541 gcc_assert (DECL_P (decl) 510 gcc_assert (DECL_P (decl)
542 && referenced_var_lookup (uid)); 511 && referenced_var_lookup (DECL_UID (decl)));
543 bitmap_set_bit (part, uid); 512 bitmap_set_bit (part, uid);
544 *((bitmap *) pointer_map_insert (decls_to_partitions, 513 *((bitmap *) pointer_map_insert (decls_to_partitions,
545 (void *)(size_t) uid)) = part; 514 (void *)(size_t) uid)) = part;
546 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers, 515 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
547 decl)) = name; 516 decl)) = name;
548 } 517 }
549 518
550 /* Make the SSA name point to all partition members. */ 519 /* Make the SSA name point to all partition members. */
551 pi = get_ptr_info (name); 520 pi = get_ptr_info (name);
552 pt_solution_set (&pi->pt, part); 521 pt_solution_set (&pi->pt, part, false, false);
553 } 522 }
554 523
555 /* Make all points-to sets that contain one member of a partition 524 /* Make all points-to sets that contain one member of a partition
556 contain all members of the partition. */ 525 contain all members of the partition. */
557 if (decls_to_partitions) 526 if (decls_to_partitions)
571 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions, 540 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
572 visited, temp); 541 visited, temp);
573 } 542 }
574 543
575 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped, 544 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
576 decls_to_partitions, visited, temp);
577 add_partitioned_vars_to_ptset (&cfun->gimple_df->callused,
578 decls_to_partitions, visited, temp); 545 decls_to_partitions, visited, temp);
579 546
580 pointer_set_destroy (visited); 547 pointer_set_destroy (visited);
581 pointer_map_destroy (decls_to_partitions); 548 pointer_map_destroy (decls_to_partitions);
582 BITMAP_FREE (temp); 549 BITMAP_FREE (temp);
1009 else if (use_register_for_decl (var)) 976 else if (use_register_for_decl (var))
1010 { 977 {
1011 if (really_expand) 978 if (really_expand)
1012 expand_one_register_var (origvar); 979 expand_one_register_var (origvar);
1013 } 980 }
981 else if (!host_integerp (DECL_SIZE_UNIT (var), 1))
982 {
983 if (really_expand)
984 {
985 error ("size of variable %q+D is too large", var);
986 expand_one_error_var (var);
987 }
988 }
1014 else if (defer_stack_allocation (var, toplevel)) 989 else if (defer_stack_allocation (var, toplevel))
1015 add_stack_var (origvar); 990 add_stack_var (origvar);
1016 else 991 else
1017 { 992 {
1018 if (really_expand) 993 if (really_expand)
1313 1288
1314 static void 1289 static void
1315 expand_used_vars (void) 1290 expand_used_vars (void)
1316 { 1291 {
1317 tree t, next, outer_block = DECL_INITIAL (current_function_decl); 1292 tree t, next, outer_block = DECL_INITIAL (current_function_decl);
1293 tree maybe_local_decls = NULL_TREE;
1318 unsigned i; 1294 unsigned i;
1319 1295
1320 /* Compute the phase of the stack frame for this function. */ 1296 /* Compute the phase of the stack frame for this function. */
1321 { 1297 {
1322 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; 1298 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1361 1337
1362 /* Expanded above already. */ 1338 /* Expanded above already. */
1363 if (is_gimple_reg (var)) 1339 if (is_gimple_reg (var))
1364 { 1340 {
1365 TREE_USED (var) = 0; 1341 TREE_USED (var) = 0;
1366 ggc_free (t); 1342 goto next;
1367 continue;
1368 } 1343 }
1369 /* We didn't set a block for static or extern because it's hard 1344 /* We didn't set a block for static or extern because it's hard
1370 to tell the difference between a global variable (re)declared 1345 to tell the difference between a global variable (re)declared
1371 in a local scope, and one that's really declared there to 1346 in a local scope, and one that's really declared there to
1372 begin with. And it doesn't really matter much, since we're 1347 begin with. And it doesn't really matter much, since we're
1383 /* Finally, mark all variables on the list as used. We'll use 1358 /* Finally, mark all variables on the list as used. We'll use
1384 this in a moment when we expand those associated with scopes. */ 1359 this in a moment when we expand those associated with scopes. */
1385 TREE_USED (var) = 1; 1360 TREE_USED (var) = 1;
1386 1361
1387 if (expand_now) 1362 if (expand_now)
1388 { 1363 expand_one_var (var, true, true);
1389 expand_one_var (var, true, true); 1364
1390 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var)) 1365 next:
1366 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1367 {
1368 rtx rtl = DECL_RTL_IF_SET (var);
1369
1370 /* Keep artificial non-ignored vars in cfun->local_decls
1371 chain until instantiate_decls. */
1372 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1391 { 1373 {
1392 rtx rtl = DECL_RTL_IF_SET (var); 1374 TREE_CHAIN (t) = cfun->local_decls;
1393 1375 cfun->local_decls = t;
1394 /* Keep artificial non-ignored vars in cfun->local_decls 1376 continue;
1395 chain until instantiate_decls. */ 1377 }
1396 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT)) 1378 else if (rtl == NULL_RTX)
1397 { 1379 {
1398 TREE_CHAIN (t) = cfun->local_decls; 1380 /* If rtl isn't set yet, which can happen e.g. with
1399 cfun->local_decls = t; 1381 -fstack-protector, retry before returning from this
1400 continue; 1382 function. */
1401 } 1383 TREE_CHAIN (t) = maybe_local_decls;
1384 maybe_local_decls = t;
1385 continue;
1402 } 1386 }
1403 } 1387 }
1404 1388
1405 ggc_free (t); 1389 ggc_free (t);
1406 } 1390 }
1454 } 1438 }
1455 1439
1456 expand_stack_vars (NULL); 1440 expand_stack_vars (NULL);
1457 1441
1458 fini_vars_expansion (); 1442 fini_vars_expansion ();
1443 }
1444
1445 /* If there were any artificial non-ignored vars without rtl
1446 found earlier, see if deferred stack allocation hasn't assigned
1447 rtl to them. */
1448 for (t = maybe_local_decls; t; t = next)
1449 {
1450 tree var = TREE_VALUE (t);
1451 rtx rtl = DECL_RTL_IF_SET (var);
1452
1453 next = TREE_CHAIN (t);
1454
1455 /* Keep artificial non-ignored vars in cfun->local_decls
1456 chain until instantiate_decls. */
1457 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1458 {
1459 TREE_CHAIN (t) = cfun->local_decls;
1460 cfun->local_decls = t;
1461 continue;
1462 }
1463
1464 ggc_free (t);
1459 } 1465 }
1460 1466
1461 /* If the target requires that FRAME_OFFSET be aligned, do it. */ 1467 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1462 if (STACK_ALIGNMENT_NEEDED) 1468 if (STACK_ALIGNMENT_NEEDED)
1463 { 1469 {
1653 1659
1654 /* We can either have a pure conditional jump with one fallthru edge or 1660 /* We can either have a pure conditional jump with one fallthru edge or
1655 two-way jump that needs to be decomposed into two basic blocks. */ 1661 two-way jump that needs to be decomposed into two basic blocks. */
1656 if (false_edge->dest == bb->next_bb) 1662 if (false_edge->dest == bb->next_bb)
1657 { 1663 {
1658 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest)); 1664 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1659 add_reg_br_prob_note (last, true_edge->probability); 1665 true_edge->probability);
1660 maybe_dump_rtl_for_gimple_stmt (stmt, last); 1666 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1661 if (true_edge->goto_locus) 1667 if (true_edge->goto_locus)
1662 { 1668 {
1663 set_curr_insn_source_location (true_edge->goto_locus); 1669 set_curr_insn_source_location (true_edge->goto_locus);
1664 set_curr_insn_block (true_edge->goto_block); 1670 set_curr_insn_block (true_edge->goto_block);
1669 maybe_cleanup_end_of_block (false_edge, last); 1675 maybe_cleanup_end_of_block (false_edge, last);
1670 return NULL; 1676 return NULL;
1671 } 1677 }
1672 if (true_edge->dest == bb->next_bb) 1678 if (true_edge->dest == bb->next_bb)
1673 { 1679 {
1674 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest)); 1680 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest),
1675 add_reg_br_prob_note (last, false_edge->probability); 1681 false_edge->probability);
1676 maybe_dump_rtl_for_gimple_stmt (stmt, last); 1682 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1677 if (false_edge->goto_locus) 1683 if (false_edge->goto_locus)
1678 { 1684 {
1679 set_curr_insn_source_location (false_edge->goto_locus); 1685 set_curr_insn_source_location (false_edge->goto_locus);
1680 set_curr_insn_block (false_edge->goto_block); 1686 set_curr_insn_block (false_edge->goto_block);
1684 true_edge->flags |= EDGE_FALLTHRU; 1690 true_edge->flags |= EDGE_FALLTHRU;
1685 maybe_cleanup_end_of_block (true_edge, last); 1691 maybe_cleanup_end_of_block (true_edge, last);
1686 return NULL; 1692 return NULL;
1687 } 1693 }
1688 1694
1689 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest)); 1695 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest),
1690 add_reg_br_prob_note (last, true_edge->probability); 1696 true_edge->probability);
1691 last = get_last_insn (); 1697 last = get_last_insn ();
1692 if (false_edge->goto_locus) 1698 if (false_edge->goto_locus)
1693 { 1699 {
1694 set_curr_insn_source_location (false_edge->goto_locus); 1700 set_curr_insn_source_location (false_edge->goto_locus);
1695 set_curr_insn_block (false_edge->goto_block); 1701 set_curr_insn_block (false_edge->goto_block);
1736 expand_call_stmt (gimple stmt) 1742 expand_call_stmt (gimple stmt)
1737 { 1743 {
1738 tree exp; 1744 tree exp;
1739 tree lhs = gimple_call_lhs (stmt); 1745 tree lhs = gimple_call_lhs (stmt);
1740 size_t i; 1746 size_t i;
1747 bool builtin_p;
1748 tree decl;
1741 1749
1742 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3); 1750 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
1743 1751
1744 CALL_EXPR_FN (exp) = gimple_call_fn (stmt); 1752 CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
1753 decl = gimple_call_fndecl (stmt);
1754 builtin_p = decl && DECL_BUILT_IN (decl);
1755
1745 TREE_TYPE (exp) = gimple_call_return_type (stmt); 1756 TREE_TYPE (exp) = gimple_call_return_type (stmt);
1746 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt); 1757 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt);
1747 1758
1748 for (i = 0; i < gimple_call_num_args (stmt); i++) 1759 for (i = 0; i < gimple_call_num_args (stmt); i++)
1749 CALL_EXPR_ARG (exp, i) = gimple_call_arg (stmt, i); 1760 {
1761 tree arg = gimple_call_arg (stmt, i);
1762 gimple def;
1763 /* TER addresses into arguments of builtin functions so we have a
1764 chance to infer more correct alignment information. See PR39954. */
1765 if (builtin_p
1766 && TREE_CODE (arg) == SSA_NAME
1767 && (def = get_gimple_for_ssa_name (arg))
1768 && gimple_assign_rhs_code (def) == ADDR_EXPR)
1769 arg = gimple_assign_rhs1 (def);
1770 CALL_EXPR_ARG (exp, i) = arg;
1771 }
1750 1772
1751 if (gimple_has_side_effects (stmt)) 1773 if (gimple_has_side_effects (stmt))
1752 TREE_SIDE_EFFECTS (exp) = 1; 1774 TREE_SIDE_EFFECTS (exp) = 1;
1753 1775
1754 if (gimple_call_nothrow_p (stmt)) 1776 if (gimple_call_nothrow_p (stmt))
2213 { 2235 {
2214 case tcc_expression: 2236 case tcc_expression:
2215 switch (TREE_CODE (exp)) 2237 switch (TREE_CODE (exp))
2216 { 2238 {
2217 case COND_EXPR: 2239 case COND_EXPR:
2240 case DOT_PROD_EXPR:
2218 goto ternary; 2241 goto ternary;
2219 2242
2220 case TRUTH_ANDIF_EXPR: 2243 case TRUTH_ANDIF_EXPR:
2221 case TRUTH_ORIF_EXPR: 2244 case TRUTH_ORIF_EXPR:
2222 case TRUTH_AND_EXPR: 2245 case TRUTH_AND_EXPR:
2321 || !DECL_NAME (exp) 2344 || !DECL_NAME (exp)
2322 || DECL_HARD_REGISTER (exp) 2345 || DECL_HARD_REGISTER (exp)
2323 || mode == VOIDmode) 2346 || mode == VOIDmode)
2324 return NULL; 2347 return NULL;
2325 2348
2326 op0 = DECL_RTL (exp); 2349 op0 = make_decl_rtl_for_debug (exp);
2327 SET_DECL_RTL (exp, NULL);
2328 if (!MEM_P (op0) 2350 if (!MEM_P (op0)
2329 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF 2351 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
2330 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp) 2352 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
2331 return NULL; 2353 return NULL;
2332 } 2354 }
2333 else 2355 else
2334 op0 = copy_rtx (op0); 2356 op0 = copy_rtx (op0);
2335 2357
2336 if (GET_MODE (op0) == BLKmode) 2358 if (GET_MODE (op0) == BLKmode
2359 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2360 below would ICE. While it is likely a FE bug,
2361 try to be robust here. See PR43166. */
2362 || mode == BLKmode
2363 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
2337 { 2364 {
2338 gcc_assert (MEM_P (op0)); 2365 gcc_assert (MEM_P (op0));
2339 op0 = adjust_address_nv (op0, mode, 0); 2366 op0 = adjust_address_nv (op0, mode, 0);
2340 return op0; 2367 return op0;
2341 } 2368 }
2352 if (mode == inner_mode) 2379 if (mode == inner_mode)
2353 return op0; 2380 return op0;
2354 2381
2355 if (inner_mode == VOIDmode) 2382 if (inner_mode == VOIDmode)
2356 { 2383 {
2357 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); 2384 if (TREE_CODE (exp) == SSA_NAME)
2385 inner_mode = TYPE_MODE (TREE_TYPE (exp));
2386 else
2387 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
2358 if (mode == inner_mode) 2388 if (mode == inner_mode)
2359 return op0; 2389 return op0;
2360 } 2390 }
2361 2391
2362 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode)) 2392 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
2368 else 2398 else
2369 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode); 2399 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
2370 } 2400 }
2371 else if (FLOAT_MODE_P (mode)) 2401 else if (FLOAT_MODE_P (mode))
2372 { 2402 {
2403 gcc_assert (TREE_CODE (exp) != SSA_NAME);
2373 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))) 2404 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
2374 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode); 2405 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
2375 else 2406 else
2376 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode); 2407 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode);
2377 } 2408 }
2470 2501
2471 if (offset) 2502 if (offset)
2472 { 2503 {
2473 enum machine_mode addrmode, offmode; 2504 enum machine_mode addrmode, offmode;
2474 2505
2475 gcc_assert (MEM_P (op0)); 2506 if (!MEM_P (op0))
2507 return NULL;
2476 2508
2477 op0 = XEXP (op0, 0); 2509 op0 = XEXP (op0, 0);
2478 addrmode = GET_MODE (op0); 2510 addrmode = GET_MODE (op0);
2479 if (addrmode == VOIDmode) 2511 if (addrmode == VOIDmode)
2480 addrmode = Pmode; 2512 addrmode = Pmode;
2530 return op0; 2562 return op0;
2531 2563
2532 if (bitpos < 0) 2564 if (bitpos < 0)
2533 return NULL; 2565 return NULL;
2534 2566
2567 if (GET_MODE (op0) == BLKmode)
2568 return NULL;
2569
2535 if ((bitpos % BITS_PER_UNIT) == 0 2570 if ((bitpos % BITS_PER_UNIT) == 0
2536 && bitsize == GET_MODE_BITSIZE (mode1)) 2571 && bitsize == GET_MODE_BITSIZE (mode1))
2537 { 2572 {
2538 enum machine_mode opmode = GET_MODE (op0); 2573 enum machine_mode opmode = GET_MODE (op0);
2539
2540 gcc_assert (opmode != BLKmode);
2541 2574
2542 if (opmode == VOIDmode) 2575 if (opmode == VOIDmode)
2543 opmode = mode1; 2576 opmode = mode1;
2544 2577
2545 /* This condition may hold if we're expanding the address 2578 /* This condition may hold if we're expanding the address
2549 crash, and the address doesn't really exist, so just 2582 crash, and the address doesn't really exist, so just
2550 drop it. */ 2583 drop it. */
2551 if (bitpos >= GET_MODE_BITSIZE (opmode)) 2584 if (bitpos >= GET_MODE_BITSIZE (opmode))
2552 return NULL; 2585 return NULL;
2553 2586
2554 return simplify_gen_subreg (mode, op0, opmode, 2587 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
2555 bitpos / BITS_PER_UNIT); 2588 return simplify_gen_subreg (mode, op0, opmode,
2589 bitpos / BITS_PER_UNIT);
2556 } 2590 }
2557 2591
2558 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0)) 2592 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
2559 && TYPE_UNSIGNED (TREE_TYPE (exp)) 2593 && TYPE_UNSIGNED (TREE_TYPE (exp))
2560 ? SIGN_EXTRACT 2594 ? SIGN_EXTRACT
2584 return gen_rtx_UNSIGNED_FIX (mode, op0); 2618 return gen_rtx_UNSIGNED_FIX (mode, op0);
2585 else 2619 else
2586 return gen_rtx_FIX (mode, op0); 2620 return gen_rtx_FIX (mode, op0);
2587 2621
2588 case POINTER_PLUS_EXPR: 2622 case POINTER_PLUS_EXPR:
2623 /* For the rare target where pointers are not the same size as
2624 size_t, we need to check for mis-matched modes and correct
2625 the addend. */
2626 if (op0 && op1
2627 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
2628 && GET_MODE (op0) != GET_MODE (op1))
2629 {
2630 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
2631 op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1);
2632 else
2633 /* We always sign-extend, regardless of the signedness of
2634 the operand, because the operand is always unsigned
2635 here even if the original C expression is signed. */
2636 op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1);
2637 }
2638 /* Fall through. */
2589 case PLUS_EXPR: 2639 case PLUS_EXPR:
2590 return gen_rtx_PLUS (mode, op0, op1); 2640 return gen_rtx_PLUS (mode, op0, op1);
2591 2641
2592 case MINUS_EXPR: 2642 case MINUS_EXPR:
2593 return gen_rtx_MINUS (mode, op0, op1); 2643 return gen_rtx_MINUS (mode, op0, op1);
2900 /* ??? Maybe handle some builtins? */ 2950 /* ??? Maybe handle some builtins? */
2901 return NULL; 2951 return NULL;
2902 2952
2903 case SSA_NAME: 2953 case SSA_NAME:
2904 { 2954 {
2905 int part = var_to_partition (SA.map, exp); 2955 gimple g = get_gimple_for_ssa_name (exp);
2906 2956 if (g)
2907 if (part == NO_PARTITION) 2957 {
2908 return NULL; 2958 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
2909 2959 if (!op0)
2910 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions); 2960 return NULL;
2911 2961 }
2912 op0 = SA.partition_to_pseudo[part]; 2962 else
2963 {
2964 int part = var_to_partition (SA.map, exp);
2965
2966 if (part == NO_PARTITION)
2967 return NULL;
2968
2969 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
2970
2971 op0 = SA.partition_to_pseudo[part];
2972 }
2913 goto adjust_mode; 2973 goto adjust_mode;
2914 } 2974 }
2915 2975
2916 case ERROR_MARK: 2976 case ERROR_MARK:
2977 return NULL;
2978
2979 /* Vector stuff. For most of the codes we don't have rtl codes. */
2980 case REALIGN_LOAD_EXPR:
2981 case REDUC_MAX_EXPR:
2982 case REDUC_MIN_EXPR:
2983 case REDUC_PLUS_EXPR:
2984 case VEC_COND_EXPR:
2985 case VEC_EXTRACT_EVEN_EXPR:
2986 case VEC_EXTRACT_ODD_EXPR:
2987 case VEC_INTERLEAVE_HIGH_EXPR:
2988 case VEC_INTERLEAVE_LOW_EXPR:
2989 case VEC_LSHIFT_EXPR:
2990 case VEC_PACK_FIX_TRUNC_EXPR:
2991 case VEC_PACK_SAT_EXPR:
2992 case VEC_PACK_TRUNC_EXPR:
2993 case VEC_RSHIFT_EXPR:
2994 case VEC_UNPACK_FLOAT_HI_EXPR:
2995 case VEC_UNPACK_FLOAT_LO_EXPR:
2996 case VEC_UNPACK_HI_EXPR:
2997 case VEC_UNPACK_LO_EXPR:
2998 case VEC_WIDEN_MULT_HI_EXPR:
2999 case VEC_WIDEN_MULT_LO_EXPR:
3000 return NULL;
3001
3002 /* Misc codes. */
3003 case ADDR_SPACE_CONVERT_EXPR:
3004 case FIXED_CONVERT_EXPR:
3005 case OBJ_TYPE_REF:
3006 case WITH_SIZE_EXPR:
3007 return NULL;
3008
3009 case DOT_PROD_EXPR:
3010 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3011 && SCALAR_INT_MODE_P (mode))
3012 {
3013 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3014 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3015 else
3016 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3017 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3018 op1 = gen_rtx_ZERO_EXTEND (mode, op1);
3019 else
3020 op1 = gen_rtx_SIGN_EXTEND (mode, op1);
3021 op0 = gen_rtx_MULT (mode, op0, op1);
3022 return gen_rtx_PLUS (mode, op0, op2);
3023 }
3024 return NULL;
3025
3026 case WIDEN_MULT_EXPR:
3027 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3028 && SCALAR_INT_MODE_P (mode))
3029 {
3030 enum machine_mode inner_mode = GET_MODE (op0);
3031 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3032 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
3033 else
3034 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
3035 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
3036 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
3037 else
3038 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
3039 return gen_rtx_MULT (mode, op0, op1);
3040 }
3041 return NULL;
3042
3043 case WIDEN_SUM_EXPR:
3044 if (SCALAR_INT_MODE_P (GET_MODE (op0))
3045 && SCALAR_INT_MODE_P (mode))
3046 {
3047 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
3048 op0 = gen_rtx_ZERO_EXTEND (mode, op0);
3049 else
3050 op0 = gen_rtx_SIGN_EXTEND (mode, op0);
3051 return gen_rtx_PLUS (mode, op0, op1);
3052 }
2917 return NULL; 3053 return NULL;
2918 3054
2919 default: 3055 default:
2920 flag_unsupported: 3056 flag_unsupported:
2921 #ifdef ENABLE_CHECKING 3057 #ifdef ENABLE_CHECKING
3064 for (; !gsi_end_p (gsi); gsi_next (&gsi)) 3200 for (; !gsi_end_p (gsi); gsi_next (&gsi))
3065 { 3201 {
3066 basic_block new_bb; 3202 basic_block new_bb;
3067 3203
3068 stmt = gsi_stmt (gsi); 3204 stmt = gsi_stmt (gsi);
3205
3206 /* If this statement is a non-debug one, and we generate debug
3207 insns, then this one might be the last real use of a TERed
3208 SSA_NAME, but where there are still some debug uses further
3209 down. Expanding the current SSA name in such further debug
3210 uses by their RHS might lead to wrong debug info, as coalescing
3211 might make the operands of such RHS be placed into the same
3212 pseudo as something else. Like so:
3213 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3214 use(a_1);
3215 a_2 = ...
3216 #DEBUG ... => a_1
3217 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3218 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3219 the write to a_2 would actually have clobbered the place which
3220 formerly held a_0.
3221
3222 So, instead of that, we recognize the situation, and generate
3223 debug temporaries at the last real use of TERed SSA names:
3224 a_1 = a_0 + 1;
3225 #DEBUG #D1 => a_1
3226 use(a_1);
3227 a_2 = ...
3228 #DEBUG ... => #D1
3229 */
3230 if (MAY_HAVE_DEBUG_INSNS
3231 && SA.values
3232 && !is_gimple_debug (stmt))
3233 {
3234 ssa_op_iter iter;
3235 tree op;
3236 gimple def;
3237
3238 location_t sloc = get_curr_insn_source_location ();
3239 tree sblock = get_curr_insn_block ();
3240
3241 /* Look for SSA names that have their last use here (TERed
3242 names always have only one real use). */
3243 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
3244 if ((def = get_gimple_for_ssa_name (op)))
3245 {
3246 imm_use_iterator imm_iter;
3247 use_operand_p use_p;
3248 bool have_debug_uses = false;
3249
3250 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
3251 {
3252 if (gimple_debug_bind_p (USE_STMT (use_p)))
3253 {
3254 have_debug_uses = true;
3255 break;
3256 }
3257 }
3258
3259 if (have_debug_uses)
3260 {
3261 /* OP is a TERed SSA name, with DEF it's defining
3262 statement, and where OP is used in further debug
3263 instructions. Generate a debug temporary, and
3264 replace all uses of OP in debug insns with that
3265 temporary. */
3266 gimple debugstmt;
3267 tree value = gimple_assign_rhs_to_tree (def);
3268 tree vexpr = make_node (DEBUG_EXPR_DECL);
3269 rtx val;
3270 enum machine_mode mode;
3271
3272 set_curr_insn_source_location (gimple_location (def));
3273 set_curr_insn_block (gimple_block (def));
3274
3275 DECL_ARTIFICIAL (vexpr) = 1;
3276 TREE_TYPE (vexpr) = TREE_TYPE (value);
3277 if (DECL_P (value))
3278 mode = DECL_MODE (value);
3279 else
3280 mode = TYPE_MODE (TREE_TYPE (value));
3281 DECL_MODE (vexpr) = mode;
3282
3283 val = gen_rtx_VAR_LOCATION
3284 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
3285
3286 val = emit_debug_insn (val);
3287
3288 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
3289 {
3290 if (!gimple_debug_bind_p (debugstmt))
3291 continue;
3292
3293 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
3294 SET_USE (use_p, vexpr);
3295
3296 update_stmt (debugstmt);
3297 }
3298 }
3299 }
3300 set_curr_insn_source_location (sloc);
3301 set_curr_insn_block (sblock);
3302 }
3303
3069 currently_expanding_gimple_stmt = stmt; 3304 currently_expanding_gimple_stmt = stmt;
3070 3305
3071 /* Expand this statement, then evaluate the resulting RTL and 3306 /* Expand this statement, then evaluate the resulting RTL and
3072 fixup the CFG accordingly. */ 3307 fixup the CFG accordingly. */
3073 if (gimple_code (stmt) == GIMPLE_COND) 3308 if (gimple_code (stmt) == GIMPLE_COND)
3115 is expected. */ 3350 is expected. */
3116 INSN_VAR_LOCATION_LOC (val) = const0_rtx; 3351 INSN_VAR_LOCATION_LOC (val) = const0_rtx;
3117 maybe_dump_rtl_for_gimple_stmt (stmt, last); 3352 maybe_dump_rtl_for_gimple_stmt (stmt, last);
3118 INSN_VAR_LOCATION_LOC (val) = (rtx)value; 3353 INSN_VAR_LOCATION_LOC (val) = (rtx)value;
3119 } 3354 }
3355
3356 /* In order not to generate too many debug temporaries,
3357 we delink all uses of debug statements we already expanded.
3358 Therefore debug statements between definition and real
3359 use of TERed SSA names will continue to use the SSA name,
3360 and not be replaced with debug temps. */
3361 delink_stmt_imm_use (stmt);
3120 3362
3121 gsi = nsi; 3363 gsi = nsi;
3122 gsi_next (&nsi); 3364 gsi_next (&nsi);
3123 if (gsi_end_p (nsi)) 3365 if (gsi_end_p (nsi))
3124 break; 3366 break;
3399 3641
3400 FOR_EACH_BB (bb) 3642 FOR_EACH_BB (bb)
3401 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 3643 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3402 { 3644 {
3403 gimple stmt = gsi_stmt (gsi); 3645 gimple stmt = gsi_stmt (gsi);
3404 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL); 3646 if (!is_gimple_debug (stmt))
3647 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
3405 } 3648 }
3406 } 3649 }
3407 3650
3408 /* This function sets crtl->args.internal_arg_pointer to a virtual 3651 /* This function sets crtl->args.internal_arg_pointer to a virtual
3409 register if DRAP is needed. Local register allocator will replace 3652 register if DRAP is needed. Local register allocator will replace
3540 /* Honor stack protection warnings. */ 3783 /* Honor stack protection warnings. */
3541 if (warn_stack_protect) 3784 if (warn_stack_protect)
3542 { 3785 {
3543 if (cfun->calls_alloca) 3786 if (cfun->calls_alloca)
3544 warning (OPT_Wstack_protector, 3787 warning (OPT_Wstack_protector,
3545 "not protecting local variables: variable length buffer"); 3788 "stack protector not protecting local variables: "
3789 "variable length buffer");
3546 if (has_short_buffer && !crtl->stack_protect_guard) 3790 if (has_short_buffer && !crtl->stack_protect_guard)
3547 warning (OPT_Wstack_protector, 3791 warning (OPT_Wstack_protector,
3548 "not protecting function: no buffer at least %d bytes long", 3792 "stack protector not protecting function: "
3793 "all local arrays are less than %d bytes long",
3549 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE)); 3794 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
3550 } 3795 }
3551 3796
3552 /* Set up parameters and prepare for return, for the function. */ 3797 /* Set up parameters and prepare for return, for the function. */
3553 expand_function_start (current_function_decl); 3798 expand_function_start (current_function_decl);
3612 if (MAY_HAVE_DEBUG_INSNS) 3857 if (MAY_HAVE_DEBUG_INSNS)
3613 expand_debug_locations (); 3858 expand_debug_locations ();
3614 3859
3615 execute_free_datastructures (); 3860 execute_free_datastructures ();
3616 finish_out_of_ssa (&SA); 3861 finish_out_of_ssa (&SA);
3862
3863 /* We are no longer in SSA form. */
3864 cfun->gimple_df->in_ssa_p = false;
3617 3865
3618 /* Expansion is used by optimization passes too, set maybe_hot_insn_p 3866 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
3619 conservatively to true until they are all profile aware. */ 3867 conservatively to true until they are all profile aware. */
3620 pointer_map_destroy (lab_rtx_for_bb); 3868 pointer_map_destroy (lab_rtx_for_bb);
3621 free_histograms (); 3869 free_histograms ();
3733 gimple_expand_cfg, /* execute */ 3981 gimple_expand_cfg, /* execute */
3734 NULL, /* sub */ 3982 NULL, /* sub */
3735 NULL, /* next */ 3983 NULL, /* next */
3736 0, /* static_pass_number */ 3984 0, /* static_pass_number */
3737 TV_EXPAND, /* tv_id */ 3985 TV_EXPAND, /* tv_id */
3738 PROP_ssa | PROP_gimple_leh | PROP_cfg,/* properties_required */ 3986 PROP_ssa | PROP_gimple_leh | PROP_cfg
3987 | PROP_gimple_lcx, /* properties_required */
3739 PROP_rtl, /* properties_provided */ 3988 PROP_rtl, /* properties_provided */
3740 PROP_ssa | PROP_trees, /* properties_destroyed */ 3989 PROP_ssa | PROP_trees, /* properties_destroyed */
3741 TODO_verify_ssa | TODO_verify_flow 3990 TODO_verify_ssa | TODO_verify_flow
3742 | TODO_verify_stmts, /* todo_flags_start */ 3991 | TODO_verify_stmts, /* todo_flags_start */
3743 TODO_dump_func 3992 TODO_dump_func