Mercurial > hg > CbC > CbC_gcc
comparison gcc/cfgexpand.c @ 69:1b10fe6932e1
merge 69
author | Nobuyasu Oshiro <dimolto@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Sun, 21 Aug 2011 07:53:12 +0900 |
parents | 326d9e06c2e3 f6334be47118 |
children | b81903832de2 |
comparison
equal
deleted
inserted
replaced
66:b362627d71ba | 69:1b10fe6932e1 |
---|---|
1 /* A pass for lowering trees to RTL. | 1 /* A pass for lowering trees to RTL. |
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009 | 2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 |
3 Free Software Foundation, Inc. | 3 Free Software Foundation, Inc. |
4 | 4 |
5 This file is part of GCC. | 5 This file is part of GCC. |
6 | 6 |
7 GCC is free software; you can redistribute it and/or modify | 7 GCC is free software; you can redistribute it and/or modify |
34 #include "tree-dump.h" | 34 #include "tree-dump.h" |
35 #include "tree-pass.h" | 35 #include "tree-pass.h" |
36 #include "except.h" | 36 #include "except.h" |
37 #include "flags.h" | 37 #include "flags.h" |
38 #include "diagnostic.h" | 38 #include "diagnostic.h" |
39 #include "tree-pretty-print.h" | |
40 #include "gimple-pretty-print.h" | |
39 #include "toplev.h" | 41 #include "toplev.h" |
40 #include "debug.h" | 42 #include "debug.h" |
41 #include "params.h" | 43 #include "params.h" |
42 #include "tree-inline.h" | 44 #include "tree-inline.h" |
43 #include "value-prof.h" | 45 #include "value-prof.h" |
44 #include "target.h" | 46 #include "target.h" |
45 #ifndef noCbC | 47 #ifndef noCbC |
46 #include "cbc-tree.h" | 48 #include "cbc-tree.h" |
47 #endif | 49 #endif |
48 #include "ssaexpand.h" | 50 #include "ssaexpand.h" |
49 | 51 #include "bitmap.h" |
52 #include "sbitmap.h" | |
53 #include "insn-attr.h" /* For INSN_SCHEDULING. */ | |
50 | 54 |
51 /* This variable holds information helping the rewriting of SSA trees | 55 /* This variable holds information helping the rewriting of SSA trees |
52 into RTL. */ | 56 into RTL. */ |
53 struct ssaexpand SA; | 57 struct ssaexpand SA; |
54 | 58 |
65 tree t; | 69 tree t; |
66 enum gimple_rhs_class grhs_class; | 70 enum gimple_rhs_class grhs_class; |
67 | 71 |
68 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt)); | 72 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt)); |
69 | 73 |
70 if (grhs_class == GIMPLE_BINARY_RHS) | 74 if (grhs_class == GIMPLE_TERNARY_RHS) |
75 t = build3 (gimple_assign_rhs_code (stmt), | |
76 TREE_TYPE (gimple_assign_lhs (stmt)), | |
77 gimple_assign_rhs1 (stmt), | |
78 gimple_assign_rhs2 (stmt), | |
79 gimple_assign_rhs3 (stmt)); | |
80 else if (grhs_class == GIMPLE_BINARY_RHS) | |
71 t = build2 (gimple_assign_rhs_code (stmt), | 81 t = build2 (gimple_assign_rhs_code (stmt), |
72 TREE_TYPE (gimple_assign_lhs (stmt)), | 82 TREE_TYPE (gimple_assign_lhs (stmt)), |
73 gimple_assign_rhs1 (stmt), | 83 gimple_assign_rhs1 (stmt), |
74 gimple_assign_rhs2 (stmt)); | 84 gimple_assign_rhs2 (stmt)); |
75 else if (grhs_class == GIMPLE_UNARY_RHS) | 85 else if (grhs_class == GIMPLE_UNARY_RHS) |
78 gimple_assign_rhs1 (stmt)); | 88 gimple_assign_rhs1 (stmt)); |
79 else if (grhs_class == GIMPLE_SINGLE_RHS) | 89 else if (grhs_class == GIMPLE_SINGLE_RHS) |
80 { | 90 { |
81 t = gimple_assign_rhs1 (stmt); | 91 t = gimple_assign_rhs1 (stmt); |
82 /* Avoid modifying this tree in place below. */ | 92 /* Avoid modifying this tree in place below. */ |
83 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t) | 93 if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t) |
84 && gimple_location (stmt) != EXPR_LOCATION (t)) | 94 && gimple_location (stmt) != EXPR_LOCATION (t)) |
85 t = copy_node (t); | 95 || (gimple_block (stmt) |
96 && currently_expanding_to_rtl | |
97 && EXPR_P (t) | |
98 && gimple_block (stmt) != TREE_BLOCK (t))) | |
99 t = copy_node (t); | |
86 } | 100 } |
87 else | 101 else |
88 gcc_unreachable (); | 102 gcc_unreachable (); |
89 | 103 |
104 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)) | |
105 SET_EXPR_LOCATION (t, gimple_location (stmt)); | |
106 if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t)) | |
107 TREE_BLOCK (t) = gimple_block (stmt); | |
108 | |
90 return t; | 109 return t; |
91 } | |
92 | |
93 | |
94 /* Verify that there is exactly single jump instruction since last and attach | |
95 REG_BR_PROB note specifying probability. | |
96 ??? We really ought to pass the probability down to RTL expanders and let it | |
97 re-distribute it when the conditional expands into multiple conditionals. | |
98 This is however difficult to do. */ | |
99 void | |
100 add_reg_br_prob_note (rtx last, int probability) | |
101 { | |
102 if (profile_status == PROFILE_ABSENT) | |
103 return; | |
104 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last)) | |
105 if (JUMP_P (last)) | |
106 { | |
107 /* It is common to emit condjump-around-jump sequence when we don't know | |
108 how to reverse the conditional. Special case this. */ | |
109 if (!any_condjump_p (last) | |
110 || !JUMP_P (NEXT_INSN (last)) | |
111 || !simplejump_p (NEXT_INSN (last)) | |
112 || !NEXT_INSN (NEXT_INSN (last)) | |
113 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last))) | |
114 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last))) | |
115 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))) | |
116 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))) | |
117 goto failed; | |
118 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0)); | |
119 add_reg_note (last, REG_BR_PROB, | |
120 GEN_INT (REG_BR_PROB_BASE - probability)); | |
121 return; | |
122 } | |
123 if (!last || !JUMP_P (last) || !any_condjump_p (last)) | |
124 goto failed; | |
125 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0)); | |
126 add_reg_note (last, REG_BR_PROB, GEN_INT (probability)); | |
127 return; | |
128 failed: | |
129 if (dump_file) | |
130 fprintf (dump_file, "Failed to add probability note\n"); | |
131 } | 110 } |
132 | 111 |
133 | 112 |
134 #ifndef STACK_ALIGNMENT_NEEDED | 113 #ifndef STACK_ALIGNMENT_NEEDED |
135 #define STACK_ALIGNMENT_NEEDED 1 | 114 #define STACK_ALIGNMENT_NEEDED 1 |
233 we can't do with expected alignment of the stack boundary. */ | 212 we can't do with expected alignment of the stack boundary. */ |
234 | 213 |
235 static unsigned int | 214 static unsigned int |
236 get_decl_align_unit (tree decl) | 215 get_decl_align_unit (tree decl) |
237 { | 216 { |
238 unsigned int align; | 217 unsigned int align = LOCAL_DECL_ALIGNMENT (decl); |
239 | |
240 align = LOCAL_DECL_ALIGNMENT (decl); | |
241 | |
242 if (align > MAX_SUPPORTED_STACK_ALIGNMENT) | |
243 align = MAX_SUPPORTED_STACK_ALIGNMENT; | |
244 | |
245 if (SUPPORTS_STACK_ALIGNMENT) | |
246 { | |
247 if (crtl->stack_alignment_estimated < align) | |
248 { | |
249 gcc_assert(!crtl->stack_realign_processed); | |
250 crtl->stack_alignment_estimated = align; | |
251 } | |
252 } | |
253 | |
254 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted. | |
255 So here we only make sure stack_alignment_needed >= align. */ | |
256 if (crtl->stack_alignment_needed < align) | |
257 crtl->stack_alignment_needed = align; | |
258 if (crtl->max_used_stack_slot_alignment < align) | |
259 crtl->max_used_stack_slot_alignment = align; | |
260 | |
261 return align / BITS_PER_UNIT; | 218 return align / BITS_PER_UNIT; |
262 } | 219 } |
263 | 220 |
264 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame. | 221 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame. |
265 Return the frame offset. */ | 222 Return the frame offset. */ |
266 | 223 |
267 static HOST_WIDE_INT | 224 static HOST_WIDE_INT |
268 alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align) | 225 alloc_stack_frame_space (HOST_WIDE_INT size, unsigned HOST_WIDE_INT align) |
269 { | 226 { |
270 HOST_WIDE_INT offset, new_frame_offset; | 227 HOST_WIDE_INT offset, new_frame_offset; |
271 | 228 |
272 new_frame_offset = frame_offset; | 229 new_frame_offset = frame_offset; |
273 if (FRAME_GROWS_DOWNWARD) | 230 if (FRAME_GROWS_DOWNWARD) |
297 /* Accumulate DECL into STACK_VARS. */ | 254 /* Accumulate DECL into STACK_VARS. */ |
298 | 255 |
299 static void | 256 static void |
300 add_stack_var (tree decl) | 257 add_stack_var (tree decl) |
301 { | 258 { |
259 struct stack_var *v; | |
260 | |
302 if (stack_vars_num >= stack_vars_alloc) | 261 if (stack_vars_num >= stack_vars_alloc) |
303 { | 262 { |
304 if (stack_vars_alloc) | 263 if (stack_vars_alloc) |
305 stack_vars_alloc = stack_vars_alloc * 3 / 2; | 264 stack_vars_alloc = stack_vars_alloc * 3 / 2; |
306 else | 265 else |
307 stack_vars_alloc = 32; | 266 stack_vars_alloc = 32; |
308 stack_vars | 267 stack_vars |
309 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc); | 268 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc); |
310 } | 269 } |
311 stack_vars[stack_vars_num].decl = decl; | 270 v = &stack_vars[stack_vars_num]; |
312 stack_vars[stack_vars_num].offset = 0; | 271 |
313 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1); | 272 v->decl = decl; |
314 stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl)); | 273 v->offset = 0; |
274 v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1); | |
275 /* Ensure that all variables have size, so that &a != &b for any two | |
276 variables that are simultaneously live. */ | |
277 if (v->size == 0) | |
278 v->size = 1; | |
279 v->alignb = get_decl_align_unit (SSAVAR (decl)); | |
315 | 280 |
316 /* All variables are initially in their own partition. */ | 281 /* All variables are initially in their own partition. */ |
317 stack_vars[stack_vars_num].representative = stack_vars_num; | 282 v->representative = stack_vars_num; |
318 stack_vars[stack_vars_num].next = EOC; | 283 v->next = EOC; |
319 | 284 |
320 /* All variables initially conflict with no other. */ | 285 /* All variables initially conflict with no other. */ |
321 stack_vars[stack_vars_num].conflicts = NULL; | 286 v->conflicts = NULL; |
322 | 287 |
323 /* Ensure that this decl doesn't get put onto the list twice. */ | 288 /* Ensure that this decl doesn't get put onto the list twice. */ |
324 set_rtl (decl, pc_rtx); | 289 set_rtl (decl, pc_rtx); |
325 | 290 |
326 stack_vars_num++; | 291 stack_vars_num++; |
366 if (TREE_CODE (type) == ARRAY_TYPE) | 331 if (TREE_CODE (type) == ARRAY_TYPE) |
367 return aggregate_contains_union_type (TREE_TYPE (type)); | 332 return aggregate_contains_union_type (TREE_TYPE (type)); |
368 if (TREE_CODE (type) != RECORD_TYPE) | 333 if (TREE_CODE (type) != RECORD_TYPE) |
369 return false; | 334 return false; |
370 | 335 |
371 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field)) | 336 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) |
372 if (TREE_CODE (field) == FIELD_DECL) | 337 if (TREE_CODE (field) == FIELD_DECL) |
373 if (aggregate_contains_union_type (TREE_TYPE (field))) | 338 if (aggregate_contains_union_type (TREE_TYPE (field))) |
374 return true; | 339 return true; |
375 | 340 |
376 return false; | 341 return false; |
416 } | 381 } |
417 } | 382 } |
418 } | 383 } |
419 | 384 |
420 /* A subroutine of partition_stack_vars. A comparison function for qsort, | 385 /* A subroutine of partition_stack_vars. A comparison function for qsort, |
421 sorting an array of indices by the size and type of the object. */ | 386 sorting an array of indices by the properties of the object. */ |
422 | 387 |
423 static int | 388 static int |
424 stack_var_size_cmp (const void *a, const void *b) | 389 stack_var_cmp (const void *a, const void *b) |
425 { | 390 { |
426 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size; | 391 size_t ia = *(const size_t *)a; |
427 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size; | 392 size_t ib = *(const size_t *)b; |
428 tree decla, declb; | 393 unsigned int aligna = stack_vars[ia].alignb; |
394 unsigned int alignb = stack_vars[ib].alignb; | |
395 HOST_WIDE_INT sizea = stack_vars[ia].size; | |
396 HOST_WIDE_INT sizeb = stack_vars[ib].size; | |
397 tree decla = stack_vars[ia].decl; | |
398 tree declb = stack_vars[ib].decl; | |
399 bool largea, largeb; | |
429 unsigned int uida, uidb; | 400 unsigned int uida, uidb; |
430 | 401 |
431 if (sa < sb) | 402 /* Primary compare on "large" alignment. Large comes first. */ |
403 largea = (aligna * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT); | |
404 largeb = (alignb * BITS_PER_UNIT > MAX_SUPPORTED_STACK_ALIGNMENT); | |
405 if (largea != largeb) | |
406 return (int)largeb - (int)largea; | |
407 | |
408 /* Secondary compare on size, decreasing */ | |
409 if (sizea < sizeb) | |
432 return -1; | 410 return -1; |
433 if (sa > sb) | 411 if (sizea > sizeb) |
434 return 1; | 412 return 1; |
435 decla = stack_vars[*(const size_t *)a].decl; | 413 |
436 declb = stack_vars[*(const size_t *)b].decl; | 414 /* Tertiary compare on true alignment, decreasing. */ |
437 /* For stack variables of the same size use and id of the decls | 415 if (aligna < alignb) |
438 to make the sort stable. Two SSA names are compared by their | 416 return -1; |
439 version, SSA names come before non-SSA names, and two normal | 417 if (aligna > alignb) |
440 decls are compared by their DECL_UID. */ | 418 return 1; |
419 | |
420 /* Final compare on ID for sort stability, increasing. | |
421 Two SSA names are compared by their version, SSA names come before | |
422 non-SSA names, and two normal decls are compared by their DECL_UID. */ | |
441 if (TREE_CODE (decla) == SSA_NAME) | 423 if (TREE_CODE (decla) == SSA_NAME) |
442 { | 424 { |
443 if (TREE_CODE (declb) == SSA_NAME) | 425 if (TREE_CODE (declb) == SSA_NAME) |
444 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb); | 426 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb); |
445 else | 427 else |
448 else if (TREE_CODE (declb) == SSA_NAME) | 430 else if (TREE_CODE (declb) == SSA_NAME) |
449 return 1; | 431 return 1; |
450 else | 432 else |
451 uida = DECL_UID (decla), uidb = DECL_UID (declb); | 433 uida = DECL_UID (decla), uidb = DECL_UID (declb); |
452 if (uida < uidb) | 434 if (uida < uidb) |
435 return 1; | |
436 if (uida > uidb) | |
453 return -1; | 437 return -1; |
454 if (uida > uidb) | |
455 return 1; | |
456 return 0; | 438 return 0; |
457 } | 439 } |
458 | 440 |
459 | 441 |
460 /* If the points-to solution *PI points to variables that are in a partition | 442 /* If the points-to solution *PI points to variables that are in a partition |
530 | 512 |
531 /* Create bitmaps representing partitions. They will be used for | 513 /* Create bitmaps representing partitions. They will be used for |
532 points-to sets later, so use GGC alloc. */ | 514 points-to sets later, so use GGC alloc. */ |
533 part = BITMAP_GGC_ALLOC (); | 515 part = BITMAP_GGC_ALLOC (); |
534 for (j = i; j != EOC; j = stack_vars[j].next) | 516 for (j = i; j != EOC; j = stack_vars[j].next) |
535 { | 517 { |
536 tree decl = stack_vars[j].decl; | 518 tree decl = stack_vars[j].decl; |
537 unsigned int uid = DECL_UID (decl); | 519 unsigned int uid = DECL_PT_UID (decl); |
538 /* We should never end up partitioning SSA names (though they | 520 /* We should never end up partitioning SSA names (though they |
539 may end up on the stack). Neither should we allocate stack | 521 may end up on the stack). Neither should we allocate stack |
540 space to something that is unused and thus unreferenced. */ | 522 space to something that is unused and thus unreferenced, except |
541 gcc_assert (DECL_P (decl) | 523 for -O0 where we are preserving even unreferenced variables. */ |
542 && referenced_var_lookup (uid)); | 524 gcc_assert (DECL_P (decl) |
543 bitmap_set_bit (part, uid); | 525 && (!optimize |
544 *((bitmap *) pointer_map_insert (decls_to_partitions, | 526 || referenced_var_lookup (cfun, DECL_UID (decl)))); |
545 (void *)(size_t) uid)) = part; | 527 bitmap_set_bit (part, uid); |
546 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers, | 528 *((bitmap *) pointer_map_insert (decls_to_partitions, |
547 decl)) = name; | 529 (void *)(size_t) uid)) = part; |
548 } | 530 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers, |
531 decl)) = name; | |
532 } | |
549 | 533 |
550 /* Make the SSA name point to all partition members. */ | 534 /* Make the SSA name point to all partition members. */ |
551 pi = get_ptr_info (name); | 535 pi = get_ptr_info (name); |
552 pt_solution_set (&pi->pt, part); | 536 pt_solution_set (&pi->pt, part, false, false); |
553 } | 537 } |
554 | 538 |
555 /* Make all points-to sets that contain one member of a partition | 539 /* Make all points-to sets that contain one member of a partition |
556 contain all members of the partition. */ | 540 contain all members of the partition. */ |
557 if (decls_to_partitions) | 541 if (decls_to_partitions) |
571 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions, | 555 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions, |
572 visited, temp); | 556 visited, temp); |
573 } | 557 } |
574 | 558 |
575 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped, | 559 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped, |
576 decls_to_partitions, visited, temp); | 560 decls_to_partitions, visited, temp); |
577 add_partitioned_vars_to_ptset (&cfun->gimple_df->callused, | |
578 decls_to_partitions, visited, temp); | |
579 | 561 |
580 pointer_set_destroy (visited); | 562 pointer_set_destroy (visited); |
581 pointer_map_destroy (decls_to_partitions); | 563 pointer_map_destroy (decls_to_partitions); |
582 BITMAP_FREE (temp); | 564 BITMAP_FREE (temp); |
583 } | 565 } |
650 stack_vars_sorted[si] = si; | 632 stack_vars_sorted[si] = si; |
651 | 633 |
652 if (n == 1) | 634 if (n == 1) |
653 return; | 635 return; |
654 | 636 |
655 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp); | 637 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_cmp); |
656 | 638 |
657 for (si = 0; si < n; ++si) | 639 for (si = 0; si < n; ++si) |
658 { | 640 { |
659 size_t i = stack_vars_sorted[si]; | 641 size_t i = stack_vars_sorted[si]; |
660 HOST_WIDE_INT isize = stack_vars[i].size; | 642 HOST_WIDE_INT isize = stack_vars[i].size; |
643 unsigned int ialign = stack_vars[i].alignb; | |
661 HOST_WIDE_INT offset = 0; | 644 HOST_WIDE_INT offset = 0; |
662 | 645 |
663 for (sj = si; sj-- > 0; ) | 646 for (sj = si; sj-- > 0; ) |
664 { | 647 { |
665 size_t j = stack_vars_sorted[sj]; | 648 size_t j = stack_vars_sorted[sj]; |
666 HOST_WIDE_INT jsize = stack_vars[j].size; | 649 HOST_WIDE_INT jsize = stack_vars[j].size; |
667 unsigned int jalign = stack_vars[j].alignb; | 650 unsigned int jalign = stack_vars[j].alignb; |
668 | 651 |
669 /* Ignore objects that aren't partition representatives. */ | 652 /* Ignore objects that aren't partition representatives. */ |
670 if (stack_vars[j].representative != j) | 653 if (stack_vars[j].representative != j) |
671 continue; | 654 continue; |
672 | 655 |
673 /* Ignore objects too large for the remaining space. */ | 656 /* Ignore objects too large for the remaining space. */ |
674 if (isize < jsize) | 657 if (isize < jsize) |
675 continue; | 658 continue; |
676 | 659 |
677 /* Ignore conflicting objects. */ | 660 /* Ignore conflicting objects. */ |
678 if (stack_var_conflict_p (i, j)) | 661 if (stack_var_conflict_p (i, j)) |
679 continue; | 662 continue; |
680 | 663 |
681 /* Refine the remaining space check to include alignment. */ | 664 /* Do not mix objects of "small" (supported) alignment |
682 if (offset & (jalign - 1)) | 665 and "large" (unsupported) alignment. */ |
683 { | 666 if ((ialign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT) |
684 HOST_WIDE_INT toff = offset; | 667 != (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT)) |
685 toff += jalign - 1; | 668 continue; |
686 toff &= -(HOST_WIDE_INT)jalign; | 669 |
687 if (isize - (toff - offset) < jsize) | 670 /* Refine the remaining space check to include alignment. */ |
688 continue; | 671 if (offset & (jalign - 1)) |
689 | 672 { |
690 isize -= toff - offset; | 673 HOST_WIDE_INT toff = offset; |
691 offset = toff; | 674 toff += jalign - 1; |
692 } | 675 toff &= -(HOST_WIDE_INT)jalign; |
693 | 676 if (isize - (toff - offset) < jsize) |
694 /* UNION the objects, placing J at OFFSET. */ | 677 continue; |
695 union_stack_vars (i, j, offset); | 678 |
696 | 679 isize -= toff - offset; |
697 isize -= jsize; | 680 offset = toff; |
698 if (isize == 0) | 681 } |
699 break; | 682 |
700 } | 683 /* UNION the objects, placing J at OFFSET. */ |
701 } | 684 union_stack_vars (i, j, offset); |
702 | 685 |
703 if (optimize) | 686 isize -= jsize; |
704 update_alias_info_with_stack_vars (); | 687 if (isize == 0) |
688 break; | |
689 } | |
690 } | |
691 | |
692 update_alias_info_with_stack_vars (); | |
705 } | 693 } |
706 | 694 |
707 /* A debugging aid for expand_used_vars. Dump the generated partitions. */ | 695 /* A debugging aid for expand_used_vars. Dump the generated partitions. */ |
708 | 696 |
709 static void | 697 static void |
731 stack_vars[j].offset); | 719 stack_vars[j].offset); |
732 } | 720 } |
733 } | 721 } |
734 } | 722 } |
735 | 723 |
736 /* Assign rtl to DECL at frame offset OFFSET. */ | 724 /* Assign rtl to DECL at BASE + OFFSET. */ |
737 | 725 |
738 static void | 726 static void |
739 expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset) | 727 expand_one_stack_var_at (tree decl, rtx base, unsigned base_align, |
740 { | 728 HOST_WIDE_INT offset) |
741 /* Alignment is unsigned. */ | 729 { |
742 unsigned HOST_WIDE_INT align; | 730 unsigned align; |
743 rtx x; | 731 rtx x; |
744 | 732 |
745 /* If this fails, we've overflowed the stack frame. Error nicely? */ | 733 /* If this fails, we've overflowed the stack frame. Error nicely? */ |
746 gcc_assert (offset == trunc_int_for_mode (offset, Pmode)); | 734 gcc_assert (offset == trunc_int_for_mode (offset, Pmode)); |
747 | 735 |
748 x = plus_constant (virtual_stack_vars_rtx, offset); | 736 x = plus_constant (base, offset); |
749 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x); | 737 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x); |
750 | 738 |
751 if (TREE_CODE (decl) != SSA_NAME) | 739 if (TREE_CODE (decl) != SSA_NAME) |
752 { | 740 { |
753 /* Set alignment we actually gave this decl if it isn't an SSA name. | 741 /* Set alignment we actually gave this decl if it isn't an SSA name. |
754 If it is we generate stack slots only accidentally so it isn't as | 742 If it is we generate stack slots only accidentally so it isn't as |
755 important, we'll simply use the alignment that is already set. */ | 743 important, we'll simply use the alignment that is already set. */ |
756 offset -= frame_phase; | 744 if (base == virtual_stack_vars_rtx) |
745 offset -= frame_phase; | |
757 align = offset & -offset; | 746 align = offset & -offset; |
758 align *= BITS_PER_UNIT; | 747 align *= BITS_PER_UNIT; |
759 if (align == 0) | 748 if (align == 0 || align > base_align) |
760 align = STACK_BOUNDARY; | 749 align = base_align; |
761 else if (align > MAX_SUPPORTED_STACK_ALIGNMENT) | 750 |
762 align = MAX_SUPPORTED_STACK_ALIGNMENT; | 751 /* One would think that we could assert that we're not decreasing |
752 alignment here, but (at least) the i386 port does exactly this | |
753 via the MINIMUM_ALIGNMENT hook. */ | |
763 | 754 |
764 DECL_ALIGN (decl) = align; | 755 DECL_ALIGN (decl) = align; |
765 DECL_USER_ALIGN (decl) = 0; | 756 DECL_USER_ALIGN (decl) = 0; |
766 } | 757 } |
767 | 758 |
775 | 766 |
776 static void | 767 static void |
777 expand_stack_vars (bool (*pred) (tree)) | 768 expand_stack_vars (bool (*pred) (tree)) |
778 { | 769 { |
779 size_t si, i, j, n = stack_vars_num; | 770 size_t si, i, j, n = stack_vars_num; |
771 HOST_WIDE_INT large_size = 0, large_alloc = 0; | |
772 rtx large_base = NULL; | |
773 unsigned large_align = 0; | |
774 tree decl; | |
775 | |
776 /* Determine if there are any variables requiring "large" alignment. | |
777 Since these are dynamically allocated, we only process these if | |
778 no predicate involved. */ | |
779 large_align = stack_vars[stack_vars_sorted[0]].alignb * BITS_PER_UNIT; | |
780 if (pred == NULL && large_align > MAX_SUPPORTED_STACK_ALIGNMENT) | |
781 { | |
782 /* Find the total size of these variables. */ | |
783 for (si = 0; si < n; ++si) | |
784 { | |
785 unsigned alignb; | |
786 | |
787 i = stack_vars_sorted[si]; | |
788 alignb = stack_vars[i].alignb; | |
789 | |
790 /* Stop when we get to the first decl with "small" alignment. */ | |
791 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT) | |
792 break; | |
793 | |
794 /* Skip variables that aren't partition representatives. */ | |
795 if (stack_vars[i].representative != i) | |
796 continue; | |
797 | |
798 /* Skip variables that have already had rtl assigned. See also | |
799 add_stack_var where we perpetrate this pc_rtx hack. */ | |
800 decl = stack_vars[i].decl; | |
801 if ((TREE_CODE (decl) == SSA_NAME | |
802 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] | |
803 : DECL_RTL (decl)) != pc_rtx) | |
804 continue; | |
805 | |
806 large_size += alignb - 1; | |
807 large_size &= -(HOST_WIDE_INT)alignb; | |
808 large_size += stack_vars[i].size; | |
809 } | |
810 | |
811 /* If there were any, allocate space. */ | |
812 if (large_size > 0) | |
813 large_base = allocate_dynamic_stack_space (GEN_INT (large_size), 0, | |
814 large_align, true); | |
815 } | |
780 | 816 |
781 for (si = 0; si < n; ++si) | 817 for (si = 0; si < n; ++si) |
782 { | 818 { |
819 rtx base; | |
820 unsigned base_align, alignb; | |
783 HOST_WIDE_INT offset; | 821 HOST_WIDE_INT offset; |
784 | 822 |
785 i = stack_vars_sorted[si]; | 823 i = stack_vars_sorted[si]; |
786 | 824 |
787 /* Skip variables that aren't partition representatives, for now. */ | 825 /* Skip variables that aren't partition representatives, for now. */ |
788 if (stack_vars[i].representative != i) | 826 if (stack_vars[i].representative != i) |
789 continue; | 827 continue; |
790 | 828 |
791 /* Skip variables that have already had rtl assigned. See also | 829 /* Skip variables that have already had rtl assigned. See also |
792 add_stack_var where we perpetrate this pc_rtx hack. */ | 830 add_stack_var where we perpetrate this pc_rtx hack. */ |
793 if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME | 831 decl = stack_vars[i].decl; |
794 ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)] | 832 if ((TREE_CODE (decl) == SSA_NAME |
795 : DECL_RTL (stack_vars[i].decl)) != pc_rtx) | 833 ? SA.partition_to_pseudo[var_to_partition (SA.map, decl)] |
796 continue; | 834 : DECL_RTL (decl)) != pc_rtx) |
835 continue; | |
797 | 836 |
798 /* Check the predicate to see whether this variable should be | 837 /* Check the predicate to see whether this variable should be |
799 allocated in this pass. */ | 838 allocated in this pass. */ |
800 if (pred && !pred (stack_vars[i].decl)) | 839 if (pred && !pred (decl)) |
801 continue; | 840 continue; |
802 | 841 |
803 offset = alloc_stack_frame_space (stack_vars[i].size, | 842 alignb = stack_vars[i].alignb; |
804 stack_vars[i].alignb); | 843 if (alignb * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT) |
844 { | |
845 offset = alloc_stack_frame_space (stack_vars[i].size, alignb); | |
846 base = virtual_stack_vars_rtx; | |
847 base_align = crtl->max_used_stack_slot_alignment; | |
848 } | |
849 else | |
850 { | |
851 /* Large alignment is only processed in the last pass. */ | |
852 if (pred) | |
853 continue; | |
854 gcc_assert (large_base != NULL); | |
855 | |
856 large_alloc += alignb - 1; | |
857 large_alloc &= -(HOST_WIDE_INT)alignb; | |
858 offset = large_alloc; | |
859 large_alloc += stack_vars[i].size; | |
860 | |
861 base = large_base; | |
862 base_align = large_align; | |
863 } | |
805 | 864 |
806 /* Create rtl for each variable based on their location within the | 865 /* Create rtl for each variable based on their location within the |
807 partition. */ | 866 partition. */ |
808 for (j = i; j != EOC; j = stack_vars[j].next) | 867 for (j = i; j != EOC; j = stack_vars[j].next) |
809 { | 868 { |
810 gcc_assert (stack_vars[j].offset <= stack_vars[i].size); | 869 gcc_assert (stack_vars[j].offset <= stack_vars[i].size); |
811 expand_one_stack_var_at (stack_vars[j].decl, | 870 expand_one_stack_var_at (stack_vars[j].decl, |
812 stack_vars[j].offset + offset); | 871 base, base_align, |
813 } | 872 stack_vars[j].offset + offset); |
814 } | 873 } |
874 } | |
875 | |
876 gcc_assert (large_alloc == large_size); | |
815 } | 877 } |
816 | 878 |
817 /* Take into account all sizes of partitions and reset DECL_RTLs. */ | 879 /* Take into account all sizes of partitions and reset DECL_RTLs. */ |
818 static HOST_WIDE_INT | 880 static HOST_WIDE_INT |
819 account_stack_vars (void) | 881 account_stack_vars (void) |
840 to a variable to be allocated in the stack frame. */ | 902 to a variable to be allocated in the stack frame. */ |
841 | 903 |
842 static void | 904 static void |
843 expand_one_stack_var (tree var) | 905 expand_one_stack_var (tree var) |
844 { | 906 { |
845 HOST_WIDE_INT size, offset, align; | 907 HOST_WIDE_INT size, offset; |
908 unsigned byte_align; | |
846 | 909 |
847 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1); | 910 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1); |
848 align = get_decl_align_unit (SSAVAR (var)); | 911 byte_align = get_decl_align_unit (SSAVAR (var)); |
849 offset = alloc_stack_frame_space (size, align); | 912 |
850 | 913 /* We handle highly aligned variables in expand_stack_vars. */ |
851 expand_one_stack_var_at (var, offset); | 914 gcc_assert (byte_align * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT); |
915 | |
916 offset = alloc_stack_frame_space (size, byte_align); | |
917 | |
918 expand_one_stack_var_at (var, virtual_stack_vars_rtx, | |
919 crtl->max_used_stack_slot_alignment, offset); | |
852 } | 920 } |
853 | 921 |
854 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL | 922 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL |
855 that will reside in a hard register. */ | 923 that will reside in a hard register. */ |
856 | 924 |
913 defer_stack_allocation (tree var, bool toplevel) | 981 defer_stack_allocation (tree var, bool toplevel) |
914 { | 982 { |
915 /* If stack protection is enabled, *all* stack variables must be deferred, | 983 /* If stack protection is enabled, *all* stack variables must be deferred, |
916 so that we can re-order the strings to the top of the frame. */ | 984 so that we can re-order the strings to the top of the frame. */ |
917 if (flag_stack_protect) | 985 if (flag_stack_protect) |
986 return true; | |
987 | |
988 /* We handle "large" alignment via dynamic allocation. We want to handle | |
989 this extra complication in only one place, so defer them. */ | |
990 if (DECL_ALIGN (var) > MAX_SUPPORTED_STACK_ALIGNMENT) | |
918 return true; | 991 return true; |
919 | 992 |
920 /* Variables in the outermost scope automatically conflict with | 993 /* Variables in the outermost scope automatically conflict with |
921 every other variable. The only reason to want to defer them | 994 every other variable. The only reason to want to defer them |
922 at all is that, after sorting, we can more efficiently pack | 995 at all is that, after sorting, we can more efficiently pack |
944 */ | 1017 */ |
945 | 1018 |
946 static HOST_WIDE_INT | 1019 static HOST_WIDE_INT |
947 expand_one_var (tree var, bool toplevel, bool really_expand) | 1020 expand_one_var (tree var, bool toplevel, bool really_expand) |
948 { | 1021 { |
1022 unsigned int align = BITS_PER_UNIT; | |
949 tree origvar = var; | 1023 tree origvar = var; |
1024 | |
950 var = SSAVAR (var); | 1025 var = SSAVAR (var); |
951 | 1026 |
952 if (SUPPORTS_STACK_ALIGNMENT | 1027 if (TREE_TYPE (var) != error_mark_node && TREE_CODE (var) == VAR_DECL) |
953 && TREE_TYPE (var) != error_mark_node | 1028 { |
954 && TREE_CODE (var) == VAR_DECL) | |
955 { | |
956 unsigned int align; | |
957 | |
958 /* Because we don't know if VAR will be in register or on stack, | 1029 /* Because we don't know if VAR will be in register or on stack, |
959 we conservatively assume it will be on stack even if VAR is | 1030 we conservatively assume it will be on stack even if VAR is |
960 eventually put into register after RA pass. For non-automatic | 1031 eventually put into register after RA pass. For non-automatic |
961 variables, which won't be on stack, we collect alignment of | 1032 variables, which won't be on stack, we collect alignment of |
962 type and ignore user specified alignment. */ | 1033 type and ignore user specified alignment. */ |
963 if (TREE_STATIC (var) || DECL_EXTERNAL (var)) | 1034 if (TREE_STATIC (var) || DECL_EXTERNAL (var)) |
964 align = MINIMUM_ALIGNMENT (TREE_TYPE (var), | 1035 align = MINIMUM_ALIGNMENT (TREE_TYPE (var), |
965 TYPE_MODE (TREE_TYPE (var)), | 1036 TYPE_MODE (TREE_TYPE (var)), |
966 TYPE_ALIGN (TREE_TYPE (var))); | 1037 TYPE_ALIGN (TREE_TYPE (var))); |
1038 else if (DECL_HAS_VALUE_EXPR_P (var) | |
1039 || (DECL_RTL_SET_P (var) && MEM_P (DECL_RTL (var)))) | |
1040 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set | |
1041 or variables which were assigned a stack slot already by | |
1042 expand_one_stack_var_at - in the latter case DECL_ALIGN has been | |
1043 changed from the offset chosen to it. */ | |
1044 align = crtl->stack_alignment_estimated; | |
967 else | 1045 else |
968 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var)); | 1046 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var)); |
969 | 1047 |
970 if (crtl->stack_alignment_estimated < align) | 1048 /* If the variable alignment is very large we'll dynamicaly allocate |
971 { | 1049 it, which means that in-frame portion is just a pointer. */ |
972 /* stack_alignment_estimated shouldn't change after stack | 1050 if (align > MAX_SUPPORTED_STACK_ALIGNMENT) |
973 realign decision made */ | 1051 align = POINTER_SIZE; |
974 gcc_assert(!crtl->stack_realign_processed); | 1052 } |
1053 | |
1054 if (SUPPORTS_STACK_ALIGNMENT | |
1055 && crtl->stack_alignment_estimated < align) | |
1056 { | |
1057 /* stack_alignment_estimated shouldn't change after stack | |
1058 realign decision made */ | |
1059 gcc_assert(!crtl->stack_realign_processed); | |
975 crtl->stack_alignment_estimated = align; | 1060 crtl->stack_alignment_estimated = align; |
976 } | 1061 } |
977 } | 1062 |
1063 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted. | |
1064 So here we only make sure stack_alignment_needed >= align. */ | |
1065 if (crtl->stack_alignment_needed < align) | |
1066 crtl->stack_alignment_needed = align; | |
1067 if (crtl->max_used_stack_slot_alignment < align) | |
1068 crtl->max_used_stack_slot_alignment = align; | |
978 | 1069 |
979 if (TREE_CODE (origvar) == SSA_NAME) | 1070 if (TREE_CODE (origvar) == SSA_NAME) |
980 { | 1071 { |
981 gcc_assert (TREE_CODE (var) != VAR_DECL | 1072 gcc_assert (TREE_CODE (var) != VAR_DECL |
982 || (!DECL_EXTERNAL (var) | 1073 || (!DECL_EXTERNAL (var) |
1009 else if (use_register_for_decl (var)) | 1100 else if (use_register_for_decl (var)) |
1010 { | 1101 { |
1011 if (really_expand) | 1102 if (really_expand) |
1012 expand_one_register_var (origvar); | 1103 expand_one_register_var (origvar); |
1013 } | 1104 } |
1105 else if (!host_integerp (DECL_SIZE_UNIT (var), 1)) | |
1106 { | |
1107 if (really_expand) | |
1108 { | |
1109 error ("size of variable %q+D is too large", var); | |
1110 expand_one_error_var (var); | |
1111 } | |
1112 } | |
1014 else if (defer_stack_allocation (var, toplevel)) | 1113 else if (defer_stack_allocation (var, toplevel)) |
1015 add_stack_var (origvar); | 1114 add_stack_var (origvar); |
1016 else | 1115 else |
1017 { | 1116 { |
1018 if (really_expand) | 1117 if (really_expand) |
1035 tree t; | 1134 tree t; |
1036 | 1135 |
1037 old_sv_num = toplevel ? 0 : stack_vars_num; | 1136 old_sv_num = toplevel ? 0 : stack_vars_num; |
1038 | 1137 |
1039 /* Expand all variables at this level. */ | 1138 /* Expand all variables at this level. */ |
1040 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t)) | 1139 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t)) |
1041 if (TREE_USED (t)) | 1140 if (TREE_USED (t)) |
1042 expand_one_var (t, toplevel, true); | 1141 expand_one_var (t, toplevel, true); |
1043 | 1142 |
1044 this_sv_num = stack_vars_num; | 1143 this_sv_num = stack_vars_num; |
1045 | 1144 |
1067 static void | 1166 static void |
1068 clear_tree_used (tree block) | 1167 clear_tree_used (tree block) |
1069 { | 1168 { |
1070 tree t; | 1169 tree t; |
1071 | 1170 |
1072 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t)) | 1171 for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t)) |
1073 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */ | 1172 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */ |
1074 TREE_USED (t) = 0; | 1173 TREE_USED (t) = 0; |
1075 | 1174 |
1076 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) | 1175 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) |
1077 clear_tree_used (t); | 1176 clear_tree_used (t); |
1213 TREE_USED (guard) = 1; | 1312 TREE_USED (guard) = 1; |
1214 expand_one_stack_var (guard); | 1313 expand_one_stack_var (guard); |
1215 crtl->stack_protect_guard = guard; | 1314 crtl->stack_protect_guard = guard; |
1216 } | 1315 } |
1217 | 1316 |
1218 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree | |
1219 expanding variables. Those variables that can be put into registers | |
1220 are allocated pseudos; those that can't are put on the stack. | |
1221 | |
1222 TOPLEVEL is true if this is the outermost BLOCK. */ | |
1223 | |
1224 static HOST_WIDE_INT | |
1225 account_used_vars_for_block (tree block, bool toplevel) | |
1226 { | |
1227 tree t; | |
1228 HOST_WIDE_INT size = 0; | |
1229 | |
1230 /* Expand all variables at this level. */ | |
1231 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t)) | |
1232 if (TREE_USED (t)) | |
1233 size += expand_one_var (t, toplevel, false); | |
1234 | |
1235 /* Expand all variables at containing levels. */ | |
1236 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) | |
1237 size += account_used_vars_for_block (t, false); | |
1238 | |
1239 return size; | |
1240 } | |
1241 | |
1242 /* Prepare for expanding variables. */ | 1317 /* Prepare for expanding variables. */ |
1243 static void | 1318 static void |
1244 init_vars_expansion (void) | 1319 init_vars_expansion (void) |
1245 { | 1320 { |
1246 tree t; | 1321 tree t; |
1322 unsigned ix; | |
1247 /* Set TREE_USED on all variables in the local_decls. */ | 1323 /* Set TREE_USED on all variables in the local_decls. */ |
1248 for (t = cfun->local_decls; t; t = TREE_CHAIN (t)) | 1324 FOR_EACH_LOCAL_DECL (cfun, ix, t) |
1249 TREE_USED (TREE_VALUE (t)) = 1; | 1325 TREE_USED (t) = 1; |
1250 | 1326 |
1251 /* Clear TREE_USED on all variables associated with a block scope. */ | 1327 /* Clear TREE_USED on all variables associated with a block scope. */ |
1252 clear_tree_used (DECL_INITIAL (current_function_decl)); | 1328 clear_tree_used (DECL_INITIAL (current_function_decl)); |
1253 | 1329 |
1254 /* Initialize local stack smashing state. */ | 1330 /* Initialize local stack smashing state. */ |
1267 XDELETEVEC (stack_vars_sorted); | 1343 XDELETEVEC (stack_vars_sorted); |
1268 stack_vars = NULL; | 1344 stack_vars = NULL; |
1269 stack_vars_alloc = stack_vars_num = 0; | 1345 stack_vars_alloc = stack_vars_num = 0; |
1270 } | 1346 } |
1271 | 1347 |
1272 /* Make a fair guess for the size of the stack frame of the current | 1348 /* Make a fair guess for the size of the stack frame of the function |
1273 function. This doesn't have to be exact, the result is only used | 1349 in NODE. This doesn't have to be exact, the result is only used in |
1274 in the inline heuristics. So we don't want to run the full stack | 1350 the inline heuristics. So we don't want to run the full stack var |
1275 var packing algorithm (which is quadratic in the number of stack | 1351 packing algorithm (which is quadratic in the number of stack vars). |
1276 vars). Instead, we calculate the total size of all stack vars. | 1352 Instead, we calculate the total size of all stack vars. This turns |
1277 This turns out to be a pretty fair estimate -- packing of stack | 1353 out to be a pretty fair estimate -- packing of stack vars doesn't |
1278 vars doesn't happen very often. */ | 1354 happen very often. */ |
1279 | 1355 |
1280 HOST_WIDE_INT | 1356 HOST_WIDE_INT |
1281 estimated_stack_frame_size (void) | 1357 estimated_stack_frame_size (struct cgraph_node *node) |
1282 { | 1358 { |
1283 HOST_WIDE_INT size = 0; | 1359 HOST_WIDE_INT size = 0; |
1284 size_t i; | 1360 size_t i; |
1285 tree t, outer_block = DECL_INITIAL (current_function_decl); | 1361 tree var; |
1286 | 1362 tree old_cur_fun_decl = current_function_decl; |
1287 init_vars_expansion (); | 1363 referenced_var_iterator rvi; |
1288 | 1364 struct function *fn = DECL_STRUCT_FUNCTION (node->decl); |
1289 for (t = cfun->local_decls; t; t = TREE_CHAIN (t)) | 1365 |
1290 { | 1366 current_function_decl = node->decl; |
1291 tree var = TREE_VALUE (t); | 1367 push_cfun (fn); |
1292 | 1368 |
1293 if (TREE_USED (var)) | 1369 gcc_checking_assert (gimple_referenced_vars (fn)); |
1294 size += expand_one_var (var, true, false); | 1370 FOR_EACH_REFERENCED_VAR (fn, var, rvi) |
1295 TREE_USED (var) = 1; | 1371 size += expand_one_var (var, true, false); |
1296 } | |
1297 size += account_used_vars_for_block (outer_block, true); | |
1298 | 1372 |
1299 if (stack_vars_num > 0) | 1373 if (stack_vars_num > 0) |
1300 { | 1374 { |
1301 /* Fake sorting the stack vars for account_stack_vars (). */ | 1375 /* Fake sorting the stack vars for account_stack_vars (). */ |
1302 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num); | 1376 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num); |
1303 for (i = 0; i < stack_vars_num; ++i) | 1377 for (i = 0; i < stack_vars_num; ++i) |
1304 stack_vars_sorted[i] = i; | 1378 stack_vars_sorted[i] = i; |
1305 size += account_stack_vars (); | 1379 size += account_stack_vars (); |
1306 fini_vars_expansion (); | 1380 fini_vars_expansion (); |
1307 } | 1381 } |
1308 | 1382 pop_cfun (); |
1383 current_function_decl = old_cur_fun_decl; | |
1309 return size; | 1384 return size; |
1310 } | 1385 } |
1311 | 1386 |
1312 /* Expand all variables used in the function. */ | 1387 /* Expand all variables used in the function. */ |
1313 | 1388 |
1314 static void | 1389 static void |
1315 expand_used_vars (void) | 1390 expand_used_vars (void) |
1316 { | 1391 { |
1317 tree t, next, outer_block = DECL_INITIAL (current_function_decl); | 1392 tree var, outer_block = DECL_INITIAL (current_function_decl); |
1393 VEC(tree,heap) *maybe_local_decls = NULL; | |
1318 unsigned i; | 1394 unsigned i; |
1395 unsigned len; | |
1319 | 1396 |
1320 /* Compute the phase of the stack frame for this function. */ | 1397 /* Compute the phase of the stack frame for this function. */ |
1321 { | 1398 { |
1322 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; | 1399 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; |
1323 int off = STARTING_FRAME_OFFSET % align; | 1400 int off = STARTING_FRAME_OFFSET % align; |
1348 } | 1425 } |
1349 } | 1426 } |
1350 | 1427 |
1351 /* At this point all variables on the local_decls with TREE_USED | 1428 /* At this point all variables on the local_decls with TREE_USED |
1352 set are not associated with any block scope. Lay them out. */ | 1429 set are not associated with any block scope. Lay them out. */ |
1353 t = cfun->local_decls; | 1430 |
1354 cfun->local_decls = NULL_TREE; | 1431 len = VEC_length (tree, cfun->local_decls); |
1355 for (; t; t = next) | 1432 FOR_EACH_LOCAL_DECL (cfun, i, var) |
1356 { | 1433 { |
1357 tree var = TREE_VALUE (t); | |
1358 bool expand_now = false; | 1434 bool expand_now = false; |
1359 | |
1360 next = TREE_CHAIN (t); | |
1361 | 1435 |
1362 /* Expanded above already. */ | 1436 /* Expanded above already. */ |
1363 if (is_gimple_reg (var)) | 1437 if (is_gimple_reg (var)) |
1364 { | 1438 { |
1365 TREE_USED (var) = 0; | 1439 TREE_USED (var) = 0; |
1366 ggc_free (t); | 1440 goto next; |
1367 continue; | 1441 } |
1368 } | |
1369 /* We didn't set a block for static or extern because it's hard | 1442 /* We didn't set a block for static or extern because it's hard |
1370 to tell the difference between a global variable (re)declared | 1443 to tell the difference between a global variable (re)declared |
1371 in a local scope, and one that's really declared there to | 1444 in a local scope, and one that's really declared there to |
1372 begin with. And it doesn't really matter much, since we're | 1445 begin with. And it doesn't really matter much, since we're |
1373 not giving them stack space. Expand them now. */ | 1446 not giving them stack space. Expand them now. */ |
1383 /* Finally, mark all variables on the list as used. We'll use | 1456 /* Finally, mark all variables on the list as used. We'll use |
1384 this in a moment when we expand those associated with scopes. */ | 1457 this in a moment when we expand those associated with scopes. */ |
1385 TREE_USED (var) = 1; | 1458 TREE_USED (var) = 1; |
1386 | 1459 |
1387 if (expand_now) | 1460 if (expand_now) |
1388 { | 1461 expand_one_var (var, true, true); |
1389 expand_one_var (var, true, true); | 1462 |
1463 next: | |
1390 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var)) | 1464 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var)) |
1391 { | 1465 { |
1392 rtx rtl = DECL_RTL_IF_SET (var); | 1466 rtx rtl = DECL_RTL_IF_SET (var); |
1393 | 1467 |
1394 /* Keep artificial non-ignored vars in cfun->local_decls | 1468 /* Keep artificial non-ignored vars in cfun->local_decls |
1395 chain until instantiate_decls. */ | 1469 chain until instantiate_decls. */ |
1396 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT)) | 1470 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT)) |
1397 { | 1471 add_local_decl (cfun, var); |
1398 TREE_CHAIN (t) = cfun->local_decls; | 1472 else if (rtl == NULL_RTX) |
1399 cfun->local_decls = t; | 1473 /* If rtl isn't set yet, which can happen e.g. with |
1400 continue; | 1474 -fstack-protector, retry before returning from this |
1401 } | 1475 function. */ |
1402 } | 1476 VEC_safe_push (tree, heap, maybe_local_decls, var); |
1403 } | 1477 } |
1404 | 1478 } |
1405 ggc_free (t); | 1479 |
1406 } | 1480 /* We duplicated some of the decls in CFUN->LOCAL_DECLS. |
1481 | |
1482 +-----------------+-----------------+ | |
1483 | ...processed... | ...duplicates...| | |
1484 +-----------------+-----------------+ | |
1485 ^ | |
1486 +-- LEN points here. | |
1487 | |
1488 We just want the duplicates, as those are the artificial | |
1489 non-ignored vars that we want to keep until instantiate_decls. | |
1490 Move them down and truncate the array. */ | |
1491 if (!VEC_empty (tree, cfun->local_decls)) | |
1492 VEC_block_remove (tree, cfun->local_decls, 0, len); | |
1407 | 1493 |
1408 /* At this point, all variables within the block tree with TREE_USED | 1494 /* At this point, all variables within the block tree with TREE_USED |
1409 set are actually used by the optimized function. Lay them out. */ | 1495 set are actually used by the optimized function. Lay them out. */ |
1410 expand_used_vars_for_block (outer_block, true); | 1496 expand_used_vars_for_block (outer_block, true); |
1411 | 1497 |
1455 | 1541 |
1456 expand_stack_vars (NULL); | 1542 expand_stack_vars (NULL); |
1457 | 1543 |
1458 fini_vars_expansion (); | 1544 fini_vars_expansion (); |
1459 } | 1545 } |
1546 | |
1547 /* If there were any artificial non-ignored vars without rtl | |
1548 found earlier, see if deferred stack allocation hasn't assigned | |
1549 rtl to them. */ | |
1550 FOR_EACH_VEC_ELT_REVERSE (tree, maybe_local_decls, i, var) | |
1551 { | |
1552 rtx rtl = DECL_RTL_IF_SET (var); | |
1553 | |
1554 /* Keep artificial non-ignored vars in cfun->local_decls | |
1555 chain until instantiate_decls. */ | |
1556 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT)) | |
1557 add_local_decl (cfun, var); | |
1558 } | |
1559 VEC_free (tree, heap, maybe_local_decls); | |
1460 | 1560 |
1461 /* If the target requires that FRAME_OFFSET be aligned, do it. */ | 1561 /* If the target requires that FRAME_OFFSET be aligned, do it. */ |
1462 if (STACK_ALIGNMENT_NEEDED) | 1562 if (STACK_ALIGNMENT_NEEDED) |
1463 { | 1563 { |
1464 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; | 1564 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; |
1564 insn = PREV_INSN (get_last_insn ()); | 1664 insn = PREV_INSN (get_last_insn ()); |
1565 /* Make sure we have an unconditional jump. Otherwise we're | 1665 /* Make sure we have an unconditional jump. Otherwise we're |
1566 confused. */ | 1666 confused. */ |
1567 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn)); | 1667 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn)); |
1568 for (insn = PREV_INSN (insn); insn != last;) | 1668 for (insn = PREV_INSN (insn); insn != last;) |
1569 { | 1669 { |
1570 insn = PREV_INSN (insn); | 1670 insn = PREV_INSN (insn); |
1571 if (JUMP_P (NEXT_INSN (insn))) | 1671 if (JUMP_P (NEXT_INSN (insn))) |
1572 delete_insn (NEXT_INSN (insn)); | 1672 { |
1573 } | 1673 if (!any_condjump_p (NEXT_INSN (insn))) |
1674 { | |
1675 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn)))); | |
1676 delete_insn (NEXT_INSN (NEXT_INSN (insn))); | |
1677 } | |
1678 delete_insn (NEXT_INSN (insn)); | |
1679 } | |
1680 } | |
1574 } | 1681 } |
1575 } | 1682 } |
1576 | 1683 |
1577 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND. | 1684 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND. |
1578 Returns a new basic block if we've terminated the current basic | 1685 Returns a new basic block if we've terminated the current basic |
1653 | 1760 |
1654 /* We can either have a pure conditional jump with one fallthru edge or | 1761 /* We can either have a pure conditional jump with one fallthru edge or |
1655 two-way jump that needs to be decomposed into two basic blocks. */ | 1762 two-way jump that needs to be decomposed into two basic blocks. */ |
1656 if (false_edge->dest == bb->next_bb) | 1763 if (false_edge->dest == bb->next_bb) |
1657 { | 1764 { |
1658 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest)); | 1765 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest), |
1659 add_reg_br_prob_note (last, true_edge->probability); | 1766 true_edge->probability); |
1660 maybe_dump_rtl_for_gimple_stmt (stmt, last); | 1767 maybe_dump_rtl_for_gimple_stmt (stmt, last); |
1661 if (true_edge->goto_locus) | 1768 if (true_edge->goto_locus) |
1662 { | 1769 { |
1663 set_curr_insn_source_location (true_edge->goto_locus); | 1770 set_curr_insn_source_location (true_edge->goto_locus); |
1664 set_curr_insn_block (true_edge->goto_block); | 1771 set_curr_insn_block (true_edge->goto_block); |
1669 maybe_cleanup_end_of_block (false_edge, last); | 1776 maybe_cleanup_end_of_block (false_edge, last); |
1670 return NULL; | 1777 return NULL; |
1671 } | 1778 } |
1672 if (true_edge->dest == bb->next_bb) | 1779 if (true_edge->dest == bb->next_bb) |
1673 { | 1780 { |
1674 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest)); | 1781 jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest), |
1675 add_reg_br_prob_note (last, false_edge->probability); | 1782 false_edge->probability); |
1676 maybe_dump_rtl_for_gimple_stmt (stmt, last); | 1783 maybe_dump_rtl_for_gimple_stmt (stmt, last); |
1677 if (false_edge->goto_locus) | 1784 if (false_edge->goto_locus) |
1678 { | 1785 { |
1679 set_curr_insn_source_location (false_edge->goto_locus); | 1786 set_curr_insn_source_location (false_edge->goto_locus); |
1680 set_curr_insn_block (false_edge->goto_block); | 1787 set_curr_insn_block (false_edge->goto_block); |
1684 true_edge->flags |= EDGE_FALLTHRU; | 1791 true_edge->flags |= EDGE_FALLTHRU; |
1685 maybe_cleanup_end_of_block (true_edge, last); | 1792 maybe_cleanup_end_of_block (true_edge, last); |
1686 return NULL; | 1793 return NULL; |
1687 } | 1794 } |
1688 | 1795 |
1689 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest)); | 1796 jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest), |
1690 add_reg_br_prob_note (last, true_edge->probability); | 1797 true_edge->probability); |
1691 last = get_last_insn (); | 1798 last = get_last_insn (); |
1692 if (false_edge->goto_locus) | 1799 if (false_edge->goto_locus) |
1693 { | 1800 { |
1694 set_curr_insn_source_location (false_edge->goto_locus); | 1801 set_curr_insn_source_location (false_edge->goto_locus); |
1695 set_curr_insn_block (false_edge->goto_block); | 1802 set_curr_insn_block (false_edge->goto_block); |
1736 expand_call_stmt (gimple stmt) | 1843 expand_call_stmt (gimple stmt) |
1737 { | 1844 { |
1738 tree exp; | 1845 tree exp; |
1739 tree lhs = gimple_call_lhs (stmt); | 1846 tree lhs = gimple_call_lhs (stmt); |
1740 size_t i; | 1847 size_t i; |
1848 bool builtin_p; | |
1849 tree decl; | |
1741 | 1850 |
1742 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3); | 1851 exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3); |
1743 | 1852 |
1744 CALL_EXPR_FN (exp) = gimple_call_fn (stmt); | 1853 CALL_EXPR_FN (exp) = gimple_call_fn (stmt); |
1854 decl = gimple_call_fndecl (stmt); | |
1855 builtin_p = decl && DECL_BUILT_IN (decl); | |
1856 | |
1745 TREE_TYPE (exp) = gimple_call_return_type (stmt); | 1857 TREE_TYPE (exp) = gimple_call_return_type (stmt); |
1746 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt); | 1858 CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt); |
1747 | 1859 |
1748 for (i = 0; i < gimple_call_num_args (stmt); i++) | 1860 for (i = 0; i < gimple_call_num_args (stmt); i++) |
1749 CALL_EXPR_ARG (exp, i) = gimple_call_arg (stmt, i); | 1861 { |
1862 tree arg = gimple_call_arg (stmt, i); | |
1863 gimple def; | |
1864 /* TER addresses into arguments of builtin functions so we have a | |
1865 chance to infer more correct alignment information. See PR39954. */ | |
1866 if (builtin_p | |
1867 && TREE_CODE (arg) == SSA_NAME | |
1868 && (def = get_gimple_for_ssa_name (arg)) | |
1869 && gimple_assign_rhs_code (def) == ADDR_EXPR) | |
1870 arg = gimple_assign_rhs1 (def); | |
1871 CALL_EXPR_ARG (exp, i) = arg; | |
1872 } | |
1750 | 1873 |
1751 if (gimple_has_side_effects (stmt)) | 1874 if (gimple_has_side_effects (stmt)) |
1752 TREE_SIDE_EFFECTS (exp) = 1; | 1875 TREE_SIDE_EFFECTS (exp) = 1; |
1753 | 1876 |
1754 if (gimple_call_nothrow_p (stmt)) | 1877 if (gimple_call_nothrow_p (stmt)) |
1831 expand_return (op0); | 1954 expand_return (op0); |
1832 break; | 1955 break; |
1833 | 1956 |
1834 case GIMPLE_ASSIGN: | 1957 case GIMPLE_ASSIGN: |
1835 { | 1958 { |
1836 tree lhs = gimple_assign_lhs (stmt); | 1959 tree lhs = gimple_assign_lhs (stmt); |
1837 | 1960 |
1838 /* Tree expand used to fiddle with |= and &= of two bitfield | 1961 /* Tree expand used to fiddle with |= and &= of two bitfield |
1839 COMPONENT_REFs here. This can't happen with gimple, the LHS | 1962 COMPONENT_REFs here. This can't happen with gimple, the LHS |
1840 of binary assigns must be a gimple reg. */ | 1963 of binary assigns must be a gimple reg. */ |
1841 | 1964 |
1842 if (TREE_CODE (lhs) != SSA_NAME | 1965 if (TREE_CODE (lhs) != SSA_NAME |
1843 || get_gimple_rhs_class (gimple_expr_code (stmt)) | 1966 || get_gimple_rhs_class (gimple_expr_code (stmt)) |
1844 == GIMPLE_SINGLE_RHS) | 1967 == GIMPLE_SINGLE_RHS) |
1845 { | 1968 { |
1846 tree rhs = gimple_assign_rhs1 (stmt); | 1969 tree rhs = gimple_assign_rhs1 (stmt); |
1847 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt)) | 1970 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt)) |
1848 == GIMPLE_SINGLE_RHS); | 1971 == GIMPLE_SINGLE_RHS); |
1849 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)) | 1972 if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)) |
1850 SET_EXPR_LOCATION (rhs, gimple_location (stmt)); | 1973 SET_EXPR_LOCATION (rhs, gimple_location (stmt)); |
1851 expand_assignment (lhs, rhs, | 1974 expand_assignment (lhs, rhs, |
1852 gimple_assign_nontemporal_move_p (stmt)); | 1975 gimple_assign_nontemporal_move_p (stmt)); |
1853 } | 1976 } |
1854 else | 1977 else |
1855 { | 1978 { |
1856 rtx target, temp; | 1979 rtx target, temp; |
1857 bool nontemporal = gimple_assign_nontemporal_move_p (stmt); | 1980 bool nontemporal = gimple_assign_nontemporal_move_p (stmt); |
1858 struct separate_ops ops; | 1981 struct separate_ops ops; |
1859 bool promoted = false; | 1982 bool promoted = false; |
1860 | 1983 |
1861 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE); | 1984 target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE); |
1862 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target)) | 1985 if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target)) |
1863 promoted = true; | 1986 promoted = true; |
1864 | 1987 |
1865 ops.code = gimple_assign_rhs_code (stmt); | 1988 ops.code = gimple_assign_rhs_code (stmt); |
1866 ops.type = TREE_TYPE (lhs); | 1989 ops.type = TREE_TYPE (lhs); |
1867 switch (get_gimple_rhs_class (gimple_expr_code (stmt))) | 1990 switch (get_gimple_rhs_class (gimple_expr_code (stmt))) |
1868 { | 1991 { |
1869 case GIMPLE_BINARY_RHS: | 1992 case GIMPLE_TERNARY_RHS: |
1870 ops.op1 = gimple_assign_rhs2 (stmt); | 1993 ops.op2 = gimple_assign_rhs3 (stmt); |
1871 /* Fallthru */ | 1994 /* Fallthru */ |
1872 case GIMPLE_UNARY_RHS: | 1995 case GIMPLE_BINARY_RHS: |
1873 ops.op0 = gimple_assign_rhs1 (stmt); | 1996 ops.op1 = gimple_assign_rhs2 (stmt); |
1874 break; | 1997 /* Fallthru */ |
1875 default: | 1998 case GIMPLE_UNARY_RHS: |
1876 gcc_unreachable (); | 1999 ops.op0 = gimple_assign_rhs1 (stmt); |
1877 } | 2000 break; |
1878 ops.location = gimple_location (stmt); | 2001 default: |
1879 | 2002 gcc_unreachable (); |
1880 /* If we want to use a nontemporal store, force the value to | 2003 } |
1881 register first. If we store into a promoted register, | 2004 ops.location = gimple_location (stmt); |
1882 don't directly expand to target. */ | 2005 |
1883 temp = nontemporal || promoted ? NULL_RTX : target; | 2006 /* If we want to use a nontemporal store, force the value to |
1884 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target), | 2007 register first. If we store into a promoted register, |
1885 EXPAND_NORMAL); | 2008 don't directly expand to target. */ |
1886 | 2009 temp = nontemporal || promoted ? NULL_RTX : target; |
1887 if (temp == target) | 2010 temp = expand_expr_real_2 (&ops, temp, GET_MODE (target), |
1888 ; | 2011 EXPAND_NORMAL); |
1889 else if (promoted) | 2012 |
1890 { | 2013 if (temp == target) |
1891 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target); | 2014 ; |
1892 /* If TEMP is a VOIDmode constant, use convert_modes to make | 2015 else if (promoted) |
1893 sure that we properly convert it. */ | 2016 { |
1894 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode) | 2017 int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target); |
1895 { | 2018 /* If TEMP is a VOIDmode constant, use convert_modes to make |
1896 temp = convert_modes (GET_MODE (target), | 2019 sure that we properly convert it. */ |
1897 TYPE_MODE (ops.type), | 2020 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode) |
1898 temp, unsignedp); | 2021 { |
1899 temp = convert_modes (GET_MODE (SUBREG_REG (target)), | 2022 temp = convert_modes (GET_MODE (target), |
1900 GET_MODE (target), temp, unsignedp); | 2023 TYPE_MODE (ops.type), |
1901 } | 2024 temp, unsignedp); |
1902 | 2025 temp = convert_modes (GET_MODE (SUBREG_REG (target)), |
1903 convert_move (SUBREG_REG (target), temp, unsignedp); | 2026 GET_MODE (target), temp, unsignedp); |
1904 } | 2027 } |
1905 else if (nontemporal && emit_storent_insn (target, temp)) | 2028 |
1906 ; | 2029 convert_move (SUBREG_REG (target), temp, unsignedp); |
1907 else | 2030 } |
1908 { | 2031 else if (nontemporal && emit_storent_insn (target, temp)) |
1909 temp = force_operand (temp, target); | 2032 ; |
1910 if (temp != target) | 2033 else |
1911 emit_move_insn (target, temp); | 2034 { |
1912 } | 2035 temp = force_operand (temp, target); |
1913 } | 2036 if (temp != target) |
2037 emit_move_insn (target, temp); | |
2038 } | |
2039 } | |
1914 } | 2040 } |
1915 break; | 2041 break; |
1916 | 2042 |
1917 default: | 2043 default: |
1918 gcc_unreachable (); | 2044 gcc_unreachable (); |
2167 | 2293 |
2168 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting | 2294 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting |
2169 any rtl. */ | 2295 any rtl. */ |
2170 | 2296 |
2171 static rtx | 2297 static rtx |
2172 convert_debug_memory_address (enum machine_mode mode, rtx x) | 2298 convert_debug_memory_address (enum machine_mode mode, rtx x, |
2299 addr_space_t as) | |
2173 { | 2300 { |
2174 enum machine_mode xmode = GET_MODE (x); | 2301 enum machine_mode xmode = GET_MODE (x); |
2175 | 2302 |
2176 #ifndef POINTERS_EXTEND_UNSIGNED | 2303 #ifndef POINTERS_EXTEND_UNSIGNED |
2177 gcc_assert (mode == Pmode); | 2304 gcc_assert (mode == Pmode |
2305 || mode == targetm.addr_space.address_mode (as)); | |
2178 gcc_assert (xmode == mode || xmode == VOIDmode); | 2306 gcc_assert (xmode == mode || xmode == VOIDmode); |
2179 #else | 2307 #else |
2180 gcc_assert (mode == Pmode || mode == ptr_mode); | 2308 rtx temp; |
2309 enum machine_mode address_mode = targetm.addr_space.address_mode (as); | |
2310 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as); | |
2311 | |
2312 gcc_assert (mode == address_mode || mode == pointer_mode); | |
2181 | 2313 |
2182 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode) | 2314 if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode) |
2183 return x; | 2315 return x; |
2184 | 2316 |
2185 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode)) | 2317 if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode)) |
2189 else if (POINTERS_EXTEND_UNSIGNED > 0) | 2321 else if (POINTERS_EXTEND_UNSIGNED > 0) |
2190 x = gen_rtx_ZERO_EXTEND (mode, x); | 2322 x = gen_rtx_ZERO_EXTEND (mode, x); |
2191 else if (!POINTERS_EXTEND_UNSIGNED) | 2323 else if (!POINTERS_EXTEND_UNSIGNED) |
2192 x = gen_rtx_SIGN_EXTEND (mode, x); | 2324 x = gen_rtx_SIGN_EXTEND (mode, x); |
2193 else | 2325 else |
2194 gcc_unreachable (); | 2326 { |
2327 switch (GET_CODE (x)) | |
2328 { | |
2329 case SUBREG: | |
2330 if ((SUBREG_PROMOTED_VAR_P (x) | |
2331 || (REG_P (SUBREG_REG (x)) && REG_POINTER (SUBREG_REG (x))) | |
2332 || (GET_CODE (SUBREG_REG (x)) == PLUS | |
2333 && REG_P (XEXP (SUBREG_REG (x), 0)) | |
2334 && REG_POINTER (XEXP (SUBREG_REG (x), 0)) | |
2335 && CONST_INT_P (XEXP (SUBREG_REG (x), 1)))) | |
2336 && GET_MODE (SUBREG_REG (x)) == mode) | |
2337 return SUBREG_REG (x); | |
2338 break; | |
2339 case LABEL_REF: | |
2340 temp = gen_rtx_LABEL_REF (mode, XEXP (x, 0)); | |
2341 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x); | |
2342 return temp; | |
2343 case SYMBOL_REF: | |
2344 temp = shallow_copy_rtx (x); | |
2345 PUT_MODE (temp, mode); | |
2346 return temp; | |
2347 case CONST: | |
2348 temp = convert_debug_memory_address (mode, XEXP (x, 0), as); | |
2349 if (temp) | |
2350 temp = gen_rtx_CONST (mode, temp); | |
2351 return temp; | |
2352 case PLUS: | |
2353 case MINUS: | |
2354 if (CONST_INT_P (XEXP (x, 1))) | |
2355 { | |
2356 temp = convert_debug_memory_address (mode, XEXP (x, 0), as); | |
2357 if (temp) | |
2358 return gen_rtx_fmt_ee (GET_CODE (x), mode, temp, XEXP (x, 1)); | |
2359 } | |
2360 break; | |
2361 default: | |
2362 break; | |
2363 } | |
2364 /* Don't know how to express ptr_extend as operation in debug info. */ | |
2365 return NULL; | |
2366 } | |
2195 #endif /* POINTERS_EXTEND_UNSIGNED */ | 2367 #endif /* POINTERS_EXTEND_UNSIGNED */ |
2196 | 2368 |
2197 return x; | 2369 return x; |
2198 } | 2370 } |
2199 | 2371 |
2205 { | 2377 { |
2206 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX; | 2378 rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX; |
2207 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp)); | 2379 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp)); |
2208 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp)); | 2380 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp)); |
2209 addr_space_t as; | 2381 addr_space_t as; |
2210 enum machine_mode address_mode; | |
2211 | 2382 |
2212 switch (TREE_CODE_CLASS (TREE_CODE (exp))) | 2383 switch (TREE_CODE_CLASS (TREE_CODE (exp))) |
2213 { | 2384 { |
2214 case tcc_expression: | 2385 case tcc_expression: |
2215 switch (TREE_CODE (exp)) | 2386 switch (TREE_CODE (exp)) |
2216 { | 2387 { |
2217 case COND_EXPR: | 2388 case COND_EXPR: |
2218 goto ternary; | 2389 case DOT_PROD_EXPR: |
2219 | 2390 case WIDEN_MULT_PLUS_EXPR: |
2220 case TRUTH_ANDIF_EXPR: | 2391 case WIDEN_MULT_MINUS_EXPR: |
2221 case TRUTH_ORIF_EXPR: | 2392 case FMA_EXPR: |
2222 case TRUTH_AND_EXPR: | 2393 goto ternary; |
2223 case TRUTH_OR_EXPR: | 2394 |
2224 case TRUTH_XOR_EXPR: | 2395 case TRUTH_ANDIF_EXPR: |
2225 goto binary; | 2396 case TRUTH_ORIF_EXPR: |
2226 | 2397 case TRUTH_AND_EXPR: |
2227 case TRUTH_NOT_EXPR: | 2398 case TRUTH_OR_EXPR: |
2228 goto unary; | 2399 case TRUTH_XOR_EXPR: |
2229 | 2400 goto binary; |
2230 default: | 2401 |
2231 break; | 2402 case TRUTH_NOT_EXPR: |
2232 } | 2403 goto unary; |
2404 | |
2405 default: | |
2406 break; | |
2407 } | |
2233 break; | 2408 break; |
2234 | 2409 |
2235 ternary: | 2410 ternary: |
2236 op2 = expand_debug_expr (TREE_OPERAND (exp, 2)); | 2411 op2 = expand_debug_expr (TREE_OPERAND (exp, 2)); |
2237 if (!op2) | 2412 if (!op2) |
2312 case RESULT_DECL: | 2487 case RESULT_DECL: |
2313 op0 = DECL_RTL_IF_SET (exp); | 2488 op0 = DECL_RTL_IF_SET (exp); |
2314 | 2489 |
2315 /* This decl was probably optimized away. */ | 2490 /* This decl was probably optimized away. */ |
2316 if (!op0) | 2491 if (!op0) |
2317 { | 2492 { |
2318 if (TREE_CODE (exp) != VAR_DECL | 2493 if (TREE_CODE (exp) != VAR_DECL |
2319 || DECL_EXTERNAL (exp) | 2494 || DECL_EXTERNAL (exp) |
2320 || !TREE_STATIC (exp) | 2495 || !TREE_STATIC (exp) |
2321 || !DECL_NAME (exp) | 2496 || !DECL_NAME (exp) |
2322 || DECL_HARD_REGISTER (exp) | 2497 || DECL_HARD_REGISTER (exp) |
2323 || mode == VOIDmode) | 2498 || mode == VOIDmode) |
2324 return NULL; | 2499 return NULL; |
2325 | 2500 |
2326 op0 = DECL_RTL (exp); | 2501 op0 = make_decl_rtl_for_debug (exp); |
2327 SET_DECL_RTL (exp, NULL); | 2502 if (!MEM_P (op0) |
2328 if (!MEM_P (op0) | 2503 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF |
2329 || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF | 2504 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp) |
2330 || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp) | 2505 return NULL; |
2331 return NULL; | 2506 } |
2332 } | |
2333 else | 2507 else |
2334 op0 = copy_rtx (op0); | 2508 op0 = copy_rtx (op0); |
2335 | 2509 |
2336 if (GET_MODE (op0) == BLKmode) | 2510 if (GET_MODE (op0) == BLKmode |
2337 { | 2511 /* If op0 is not BLKmode, but BLKmode is, adjust_mode |
2338 gcc_assert (MEM_P (op0)); | 2512 below would ICE. While it is likely a FE bug, |
2339 op0 = adjust_address_nv (op0, mode, 0); | 2513 try to be robust here. See PR43166. */ |
2340 return op0; | 2514 || mode == BLKmode |
2341 } | 2515 || (mode == VOIDmode && GET_MODE (op0) != VOIDmode)) |
2516 { | |
2517 gcc_assert (MEM_P (op0)); | |
2518 op0 = adjust_address_nv (op0, mode, 0); | |
2519 return op0; | |
2520 } | |
2342 | 2521 |
2343 /* Fall through. */ | 2522 /* Fall through. */ |
2344 | 2523 |
2345 adjust_mode: | 2524 adjust_mode: |
2346 case PAREN_EXPR: | 2525 case PAREN_EXPR: |
2347 case NOP_EXPR: | 2526 case NOP_EXPR: |
2348 case CONVERT_EXPR: | 2527 case CONVERT_EXPR: |
2349 { | 2528 { |
2350 enum machine_mode inner_mode = GET_MODE (op0); | 2529 enum machine_mode inner_mode = GET_MODE (op0); |
2351 | 2530 |
2352 if (mode == inner_mode) | 2531 if (mode == inner_mode) |
2353 return op0; | 2532 return op0; |
2354 | 2533 |
2355 if (inner_mode == VOIDmode) | 2534 if (inner_mode == VOIDmode) |
2356 { | 2535 { |
2357 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | 2536 if (TREE_CODE (exp) == SSA_NAME) |
2358 if (mode == inner_mode) | 2537 inner_mode = TYPE_MODE (TREE_TYPE (exp)); |
2359 return op0; | 2538 else |
2539 inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
2540 if (mode == inner_mode) | |
2541 return op0; | |
2542 } | |
2543 | |
2544 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode)) | |
2545 { | |
2546 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode)) | |
2547 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0); | |
2548 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode)) | |
2549 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode); | |
2550 else | |
2551 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode); | |
2552 } | |
2553 else if (FLOAT_MODE_P (mode)) | |
2554 { | |
2555 gcc_assert (TREE_CODE (exp) != SSA_NAME); | |
2556 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
2557 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode); | |
2558 else | |
2559 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode); | |
2560 } | |
2561 else if (FLOAT_MODE_P (inner_mode)) | |
2562 { | |
2563 if (unsignedp) | |
2564 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode); | |
2565 else | |
2566 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode); | |
2567 } | |
2568 else if (CONSTANT_P (op0) | |
2569 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode)) | |
2570 op0 = simplify_gen_subreg (mode, op0, inner_mode, | |
2571 subreg_lowpart_offset (mode, | |
2572 inner_mode)); | |
2573 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary | |
2574 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) | |
2575 : unsignedp) | |
2576 op0 = gen_rtx_ZERO_EXTEND (mode, op0); | |
2577 else | |
2578 op0 = gen_rtx_SIGN_EXTEND (mode, op0); | |
2579 | |
2580 return op0; | |
2360 } | 2581 } |
2361 | 2582 |
2362 if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode)) | 2583 case MEM_REF: |
2363 { | 2584 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0))) |
2364 if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode)) | 2585 { |
2365 op0 = simplify_gen_subreg (mode, op0, inner_mode, 0); | 2586 tree newexp = fold_binary (MEM_REF, TREE_TYPE (exp), |
2366 else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode)) | 2587 TREE_OPERAND (exp, 0), |
2367 op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode); | 2588 TREE_OPERAND (exp, 1)); |
2368 else | 2589 if (newexp) |
2369 op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode); | 2590 return expand_debug_expr (newexp); |
2370 } | 2591 } |
2371 else if (FLOAT_MODE_P (mode)) | 2592 /* FALLTHROUGH */ |
2372 { | |
2373 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
2374 op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode); | |
2375 else | |
2376 op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode); | |
2377 } | |
2378 else if (FLOAT_MODE_P (inner_mode)) | |
2379 { | |
2380 if (unsignedp) | |
2381 op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode); | |
2382 else | |
2383 op0 = simplify_gen_unary (FIX, mode, op0, inner_mode); | |
2384 } | |
2385 else if (CONSTANT_P (op0) | |
2386 || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode)) | |
2387 op0 = simplify_gen_subreg (mode, op0, inner_mode, | |
2388 subreg_lowpart_offset (mode, | |
2389 inner_mode)); | |
2390 else if (unsignedp) | |
2391 op0 = gen_rtx_ZERO_EXTEND (mode, op0); | |
2392 else | |
2393 op0 = gen_rtx_SIGN_EXTEND (mode, op0); | |
2394 | |
2395 return op0; | |
2396 } | |
2397 | |
2398 case INDIRECT_REF: | 2593 case INDIRECT_REF: |
2399 case ALIGN_INDIRECT_REF: | |
2400 case MISALIGNED_INDIRECT_REF: | |
2401 op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); | 2594 op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); |
2402 if (!op0) | 2595 if (!op0) |
2403 return NULL; | 2596 return NULL; |
2404 | 2597 |
2598 if (TREE_CODE (exp) == MEM_REF) | |
2599 { | |
2600 if (GET_CODE (op0) == DEBUG_IMPLICIT_PTR | |
2601 || (GET_CODE (op0) == PLUS | |
2602 && GET_CODE (XEXP (op0, 0)) == DEBUG_IMPLICIT_PTR)) | |
2603 /* (mem (debug_implicit_ptr)) might confuse aliasing. | |
2604 Instead just use get_inner_reference. */ | |
2605 goto component_ref; | |
2606 | |
2607 op1 = expand_debug_expr (TREE_OPERAND (exp, 1)); | |
2608 if (!op1 || !CONST_INT_P (op1)) | |
2609 return NULL; | |
2610 | |
2611 op0 = plus_constant (op0, INTVAL (op1)); | |
2612 } | |
2613 | |
2405 if (POINTER_TYPE_P (TREE_TYPE (exp))) | 2614 if (POINTER_TYPE_P (TREE_TYPE (exp))) |
2406 { | 2615 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp))); |
2407 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp))); | |
2408 address_mode = targetm.addr_space.address_mode (as); | |
2409 } | |
2410 else | 2616 else |
2411 { | 2617 as = ADDR_SPACE_GENERIC; |
2412 as = ADDR_SPACE_GENERIC; | 2618 |
2413 address_mode = Pmode; | 2619 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as), |
2414 } | 2620 op0, as); |
2415 | 2621 if (op0 == NULL_RTX) |
2416 if (TREE_CODE (exp) == ALIGN_INDIRECT_REF) | 2622 return NULL; |
2417 { | |
2418 int align = TYPE_ALIGN_UNIT (TREE_TYPE (exp)); | |
2419 op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align)); | |
2420 } | |
2421 | 2623 |
2422 op0 = gen_rtx_MEM (mode, op0); | 2624 op0 = gen_rtx_MEM (mode, op0); |
2423 | |
2424 set_mem_attributes (op0, exp, 0); | 2625 set_mem_attributes (op0, exp, 0); |
2626 if (TREE_CODE (exp) == MEM_REF | |
2627 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0))) | |
2628 set_mem_expr (op0, NULL_TREE); | |
2425 set_mem_addr_space (op0, as); | 2629 set_mem_addr_space (op0, as); |
2426 | 2630 |
2427 return op0; | 2631 return op0; |
2428 | 2632 |
2429 case TARGET_MEM_REF: | 2633 case TARGET_MEM_REF: |
2430 if (TMR_SYMBOL (exp) && !DECL_RTL_SET_P (TMR_SYMBOL (exp))) | 2634 if (TREE_CODE (TMR_BASE (exp)) == ADDR_EXPR |
2431 return NULL; | 2635 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp), 0))) |
2636 return NULL; | |
2432 | 2637 |
2433 op0 = expand_debug_expr | 2638 op0 = expand_debug_expr |
2434 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp)); | 2639 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp)); |
2435 if (!op0) | 2640 if (!op0) |
2436 return NULL; | 2641 return NULL; |
2437 | 2642 |
2438 as = TYPE_ADDR_SPACE (TREE_TYPE (exp)); | 2643 if (POINTER_TYPE_P (TREE_TYPE (exp))) |
2644 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp))); | |
2645 else | |
2646 as = ADDR_SPACE_GENERIC; | |
2647 | |
2648 op0 = convert_debug_memory_address (targetm.addr_space.address_mode (as), | |
2649 op0, as); | |
2650 if (op0 == NULL_RTX) | |
2651 return NULL; | |
2439 | 2652 |
2440 op0 = gen_rtx_MEM (mode, op0); | 2653 op0 = gen_rtx_MEM (mode, op0); |
2441 | 2654 |
2442 set_mem_attributes (op0, exp, 0); | 2655 set_mem_attributes (op0, exp, 0); |
2443 set_mem_addr_space (op0, as); | 2656 set_mem_addr_space (op0, as); |
2444 | 2657 |
2445 return op0; | 2658 return op0; |
2446 | 2659 |
2660 component_ref: | |
2447 case ARRAY_REF: | 2661 case ARRAY_REF: |
2448 case ARRAY_RANGE_REF: | 2662 case ARRAY_RANGE_REF: |
2449 case COMPONENT_REF: | 2663 case COMPONENT_REF: |
2450 case BIT_FIELD_REF: | 2664 case BIT_FIELD_REF: |
2451 case REALPART_EXPR: | 2665 case REALPART_EXPR: |
2452 case IMAGPART_EXPR: | 2666 case IMAGPART_EXPR: |
2453 case VIEW_CONVERT_EXPR: | 2667 case VIEW_CONVERT_EXPR: |
2454 { | 2668 { |
2455 enum machine_mode mode1; | 2669 enum machine_mode mode1; |
2456 HOST_WIDE_INT bitsize, bitpos; | 2670 HOST_WIDE_INT bitsize, bitpos; |
2457 tree offset; | 2671 tree offset; |
2458 int volatilep = 0; | 2672 int volatilep = 0; |
2459 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset, | 2673 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset, |
2460 &mode1, &unsignedp, &volatilep, false); | 2674 &mode1, &unsignedp, &volatilep, false); |
2461 rtx orig_op0; | 2675 rtx orig_op0; |
2462 | 2676 |
2463 if (bitsize == 0) | 2677 if (bitsize == 0) |
2464 return NULL; | 2678 return NULL; |
2465 | 2679 |
2466 orig_op0 = op0 = expand_debug_expr (tem); | 2680 orig_op0 = op0 = expand_debug_expr (tem); |
2467 | 2681 |
2468 if (!op0) | 2682 if (!op0) |
2469 return NULL; | 2683 return NULL; |
2470 | 2684 |
2471 if (offset) | 2685 if (offset) |
2472 { | 2686 { |
2473 enum machine_mode addrmode, offmode; | 2687 enum machine_mode addrmode, offmode; |
2474 | 2688 |
2475 gcc_assert (MEM_P (op0)); | 2689 if (!MEM_P (op0)) |
2476 | 2690 return NULL; |
2477 op0 = XEXP (op0, 0); | 2691 |
2478 addrmode = GET_MODE (op0); | 2692 op0 = XEXP (op0, 0); |
2479 if (addrmode == VOIDmode) | 2693 addrmode = GET_MODE (op0); |
2480 addrmode = Pmode; | 2694 if (addrmode == VOIDmode) |
2481 | 2695 addrmode = Pmode; |
2482 op1 = expand_debug_expr (offset); | 2696 |
2483 if (!op1) | 2697 op1 = expand_debug_expr (offset); |
2484 return NULL; | 2698 if (!op1) |
2485 | 2699 return NULL; |
2486 offmode = GET_MODE (op1); | 2700 |
2487 if (offmode == VOIDmode) | 2701 offmode = GET_MODE (op1); |
2488 offmode = TYPE_MODE (TREE_TYPE (offset)); | 2702 if (offmode == VOIDmode) |
2489 | 2703 offmode = TYPE_MODE (TREE_TYPE (offset)); |
2490 if (addrmode != offmode) | 2704 |
2491 op1 = simplify_gen_subreg (addrmode, op1, offmode, | 2705 if (addrmode != offmode) |
2492 subreg_lowpart_offset (addrmode, | 2706 op1 = simplify_gen_subreg (addrmode, op1, offmode, |
2493 offmode)); | 2707 subreg_lowpart_offset (addrmode, |
2494 | 2708 offmode)); |
2495 /* Don't use offset_address here, we don't need a | 2709 |
2496 recognizable address, and we don't want to generate | 2710 /* Don't use offset_address here, we don't need a |
2497 code. */ | 2711 recognizable address, and we don't want to generate |
2498 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1)); | 2712 code. */ |
2499 } | 2713 op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1)); |
2500 | 2714 } |
2501 if (MEM_P (op0)) | 2715 |
2502 { | 2716 if (MEM_P (op0)) |
2503 if (mode1 == VOIDmode) | 2717 { |
2504 /* Bitfield. */ | 2718 if (mode1 == VOIDmode) |
2505 mode1 = smallest_mode_for_size (bitsize, MODE_INT); | 2719 /* Bitfield. */ |
2506 if (bitpos >= BITS_PER_UNIT) | 2720 mode1 = smallest_mode_for_size (bitsize, MODE_INT); |
2507 { | 2721 if (bitpos >= BITS_PER_UNIT) |
2508 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT); | 2722 { |
2509 bitpos %= BITS_PER_UNIT; | 2723 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT); |
2510 } | 2724 bitpos %= BITS_PER_UNIT; |
2511 else if (bitpos < 0) | 2725 } |
2512 { | 2726 else if (bitpos < 0) |
2513 HOST_WIDE_INT units | 2727 { |
2514 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT; | 2728 HOST_WIDE_INT units |
2515 op0 = adjust_address_nv (op0, mode1, units); | 2729 = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT; |
2516 bitpos += units * BITS_PER_UNIT; | 2730 op0 = adjust_address_nv (op0, mode1, units); |
2517 } | 2731 bitpos += units * BITS_PER_UNIT; |
2518 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode)) | 2732 } |
2519 op0 = adjust_address_nv (op0, mode, 0); | 2733 else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode)) |
2520 else if (GET_MODE (op0) != mode1) | 2734 op0 = adjust_address_nv (op0, mode, 0); |
2521 op0 = adjust_address_nv (op0, mode1, 0); | 2735 else if (GET_MODE (op0) != mode1) |
2522 else | 2736 op0 = adjust_address_nv (op0, mode1, 0); |
2523 op0 = copy_rtx (op0); | 2737 else |
2524 if (op0 == orig_op0) | 2738 op0 = copy_rtx (op0); |
2525 op0 = shallow_copy_rtx (op0); | 2739 if (op0 == orig_op0) |
2526 set_mem_attributes (op0, exp, 0); | 2740 op0 = shallow_copy_rtx (op0); |
2527 } | 2741 set_mem_attributes (op0, exp, 0); |
2528 | 2742 } |
2529 if (bitpos == 0 && mode == GET_MODE (op0)) | 2743 |
2530 return op0; | 2744 if (bitpos == 0 && mode == GET_MODE (op0)) |
2745 return op0; | |
2531 | 2746 |
2532 if (bitpos < 0) | 2747 if (bitpos < 0) |
2533 return NULL; | 2748 return NULL; |
2534 | 2749 |
2535 if ((bitpos % BITS_PER_UNIT) == 0 | 2750 if (GET_MODE (op0) == BLKmode) |
2536 && bitsize == GET_MODE_BITSIZE (mode1)) | 2751 return NULL; |
2537 { | 2752 |
2538 enum machine_mode opmode = GET_MODE (op0); | 2753 if ((bitpos % BITS_PER_UNIT) == 0 |
2539 | 2754 && bitsize == GET_MODE_BITSIZE (mode1)) |
2540 gcc_assert (opmode != BLKmode); | 2755 { |
2541 | 2756 enum machine_mode opmode = GET_MODE (op0); |
2542 if (opmode == VOIDmode) | 2757 |
2543 opmode = mode1; | 2758 if (opmode == VOIDmode) |
2544 | 2759 opmode = TYPE_MODE (TREE_TYPE (tem)); |
2545 /* This condition may hold if we're expanding the address | 2760 |
2546 right past the end of an array that turned out not to | 2761 /* This condition may hold if we're expanding the address |
2547 be addressable (i.e., the address was only computed in | 2762 right past the end of an array that turned out not to |
2548 debug stmts). The gen_subreg below would rightfully | 2763 be addressable (i.e., the address was only computed in |
2549 crash, and the address doesn't really exist, so just | 2764 debug stmts). The gen_subreg below would rightfully |
2550 drop it. */ | 2765 crash, and the address doesn't really exist, so just |
2551 if (bitpos >= GET_MODE_BITSIZE (opmode)) | 2766 drop it. */ |
2552 return NULL; | 2767 if (bitpos >= GET_MODE_BITSIZE (opmode)) |
2553 | 2768 return NULL; |
2554 return simplify_gen_subreg (mode, op0, opmode, | 2769 |
2555 bitpos / BITS_PER_UNIT); | 2770 if ((bitpos % GET_MODE_BITSIZE (mode)) == 0) |
2556 } | 2771 return simplify_gen_subreg (mode, op0, opmode, |
2557 | 2772 bitpos / BITS_PER_UNIT); |
2558 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0)) | 2773 } |
2559 && TYPE_UNSIGNED (TREE_TYPE (exp)) | 2774 |
2560 ? SIGN_EXTRACT | 2775 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0)) |
2561 : ZERO_EXTRACT, mode, | 2776 && TYPE_UNSIGNED (TREE_TYPE (exp)) |
2562 GET_MODE (op0) != VOIDmode | 2777 ? SIGN_EXTRACT |
2563 ? GET_MODE (op0) : mode1, | 2778 : ZERO_EXTRACT, mode, |
2564 op0, GEN_INT (bitsize), GEN_INT (bitpos)); | 2779 GET_MODE (op0) != VOIDmode |
2780 ? GET_MODE (op0) | |
2781 : TYPE_MODE (TREE_TYPE (tem)), | |
2782 op0, GEN_INT (bitsize), GEN_INT (bitpos)); | |
2565 } | 2783 } |
2566 | 2784 |
2567 case ABS_EXPR: | 2785 case ABS_EXPR: |
2568 return gen_rtx_ABS (mode, op0); | 2786 return gen_rtx_ABS (mode, op0); |
2569 | 2787 |
2584 return gen_rtx_UNSIGNED_FIX (mode, op0); | 2802 return gen_rtx_UNSIGNED_FIX (mode, op0); |
2585 else | 2803 else |
2586 return gen_rtx_FIX (mode, op0); | 2804 return gen_rtx_FIX (mode, op0); |
2587 | 2805 |
2588 case POINTER_PLUS_EXPR: | 2806 case POINTER_PLUS_EXPR: |
2807 /* For the rare target where pointers are not the same size as | |
2808 size_t, we need to check for mis-matched modes and correct | |
2809 the addend. */ | |
2810 if (op0 && op1 | |
2811 && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode | |
2812 && GET_MODE (op0) != GET_MODE (op1)) | |
2813 { | |
2814 if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))) | |
2815 op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1); | |
2816 else | |
2817 /* We always sign-extend, regardless of the signedness of | |
2818 the operand, because the operand is always unsigned | |
2819 here even if the original C expression is signed. */ | |
2820 op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1); | |
2821 } | |
2822 /* Fall through. */ | |
2589 case PLUS_EXPR: | 2823 case PLUS_EXPR: |
2590 return gen_rtx_PLUS (mode, op0, op1); | 2824 return gen_rtx_PLUS (mode, op0, op1); |
2591 | 2825 |
2592 case MINUS_EXPR: | 2826 case MINUS_EXPR: |
2593 return gen_rtx_MINUS (mode, op0, op1); | 2827 return gen_rtx_MINUS (mode, op0, op1); |
2849 } | 3083 } |
2850 | 3084 |
2851 case ADDR_EXPR: | 3085 case ADDR_EXPR: |
2852 op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); | 3086 op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); |
2853 if (!op0 || !MEM_P (op0)) | 3087 if (!op0 || !MEM_P (op0)) |
2854 return NULL; | 3088 { |
2855 | 3089 if ((TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL |
2856 op0 = convert_debug_memory_address (mode, XEXP (op0, 0)); | 3090 || TREE_CODE (TREE_OPERAND (exp, 0)) == PARM_DECL |
3091 || TREE_CODE (TREE_OPERAND (exp, 0)) == RESULT_DECL) | |
3092 && !TREE_ADDRESSABLE (TREE_OPERAND (exp, 0))) | |
3093 return gen_rtx_DEBUG_IMPLICIT_PTR (mode, TREE_OPERAND (exp, 0)); | |
3094 | |
3095 if (handled_component_p (TREE_OPERAND (exp, 0))) | |
3096 { | |
3097 HOST_WIDE_INT bitoffset, bitsize, maxsize; | |
3098 tree decl | |
3099 = get_ref_base_and_extent (TREE_OPERAND (exp, 0), | |
3100 &bitoffset, &bitsize, &maxsize); | |
3101 if ((TREE_CODE (decl) == VAR_DECL | |
3102 || TREE_CODE (decl) == PARM_DECL | |
3103 || TREE_CODE (decl) == RESULT_DECL) | |
3104 && !TREE_ADDRESSABLE (decl) | |
3105 && (bitoffset % BITS_PER_UNIT) == 0 | |
3106 && bitsize > 0 | |
3107 && bitsize == maxsize) | |
3108 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode, decl), | |
3109 bitoffset / BITS_PER_UNIT); | |
3110 } | |
3111 | |
3112 return NULL; | |
3113 } | |
3114 | |
3115 as = TYPE_ADDR_SPACE (TREE_TYPE (exp)); | |
3116 op0 = convert_debug_memory_address (mode, XEXP (op0, 0), as); | |
2857 | 3117 |
2858 return op0; | 3118 return op0; |
2859 | 3119 |
2860 case VECTOR_CST: | 3120 case VECTOR_CST: |
2861 exp = build_constructor_from_list (TREE_TYPE (exp), | 3121 exp = build_constructor_from_list (TREE_TYPE (exp), |
2862 TREE_VECTOR_CST_ELTS (exp)); | 3122 TREE_VECTOR_CST_ELTS (exp)); |
2863 /* Fall through. */ | 3123 /* Fall through. */ |
2864 | 3124 |
2865 case CONSTRUCTOR: | 3125 case CONSTRUCTOR: |
2866 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE) | 3126 if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE) |
2867 { | 3127 { |
2868 unsigned i; | 3128 unsigned i; |
2869 tree val; | 3129 tree val; |
2870 | 3130 |
2871 op0 = gen_rtx_CONCATN | 3131 op0 = gen_rtx_CONCATN |
2872 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))); | 3132 (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))); |
2873 | 3133 |
2874 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val) | 3134 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val) |
2875 { | 3135 { |
2876 op1 = expand_debug_expr (val); | 3136 op1 = expand_debug_expr (val); |
2877 if (!op1) | 3137 if (!op1) |
2878 return NULL; | 3138 return NULL; |
2879 XVECEXP (op0, 0, i) = op1; | 3139 XVECEXP (op0, 0, i) = op1; |
2880 } | 3140 } |
2881 | 3141 |
2882 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))) | 3142 if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))) |
2883 { | 3143 { |
2884 op1 = expand_debug_expr | 3144 op1 = expand_debug_expr |
2885 (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node)); | 3145 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp)))); |
2886 | 3146 |
2887 if (!op1) | 3147 if (!op1) |
2888 return NULL; | 3148 return NULL; |
2889 | 3149 |
2890 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++) | 3150 for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++) |
2891 XVECEXP (op0, 0, i) = op1; | 3151 XVECEXP (op0, 0, i) = op1; |
2892 } | 3152 } |
2893 | 3153 |
2894 return op0; | 3154 return op0; |
2895 } | 3155 } |
2896 else | 3156 else |
2897 goto flag_unsupported; | 3157 goto flag_unsupported; |
2898 | 3158 |
2899 case CALL_EXPR: | 3159 case CALL_EXPR: |
2900 /* ??? Maybe handle some builtins? */ | 3160 /* ??? Maybe handle some builtins? */ |
2901 return NULL; | 3161 return NULL; |
2902 | 3162 |
2903 case SSA_NAME: | 3163 case SSA_NAME: |
2904 { | 3164 { |
2905 int part = var_to_partition (SA.map, exp); | 3165 gimple g = get_gimple_for_ssa_name (exp); |
2906 | 3166 if (g) |
2907 if (part == NO_PARTITION) | 3167 { |
2908 return NULL; | 3168 op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g)); |
2909 | 3169 if (!op0) |
2910 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions); | 3170 return NULL; |
2911 | 3171 } |
2912 op0 = SA.partition_to_pseudo[part]; | 3172 else |
2913 goto adjust_mode; | 3173 { |
3174 int part = var_to_partition (SA.map, exp); | |
3175 | |
3176 if (part == NO_PARTITION) | |
3177 return NULL; | |
3178 | |
3179 gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions); | |
3180 | |
3181 op0 = copy_rtx (SA.partition_to_pseudo[part]); | |
3182 } | |
3183 goto adjust_mode; | |
2914 } | 3184 } |
2915 | 3185 |
2916 case ERROR_MARK: | 3186 case ERROR_MARK: |
2917 return NULL; | 3187 return NULL; |
3188 | |
3189 /* Vector stuff. For most of the codes we don't have rtl codes. */ | |
3190 case REALIGN_LOAD_EXPR: | |
3191 case REDUC_MAX_EXPR: | |
3192 case REDUC_MIN_EXPR: | |
3193 case REDUC_PLUS_EXPR: | |
3194 case VEC_COND_EXPR: | |
3195 case VEC_EXTRACT_EVEN_EXPR: | |
3196 case VEC_EXTRACT_ODD_EXPR: | |
3197 case VEC_INTERLEAVE_HIGH_EXPR: | |
3198 case VEC_INTERLEAVE_LOW_EXPR: | |
3199 case VEC_LSHIFT_EXPR: | |
3200 case VEC_PACK_FIX_TRUNC_EXPR: | |
3201 case VEC_PACK_SAT_EXPR: | |
3202 case VEC_PACK_TRUNC_EXPR: | |
3203 case VEC_RSHIFT_EXPR: | |
3204 case VEC_UNPACK_FLOAT_HI_EXPR: | |
3205 case VEC_UNPACK_FLOAT_LO_EXPR: | |
3206 case VEC_UNPACK_HI_EXPR: | |
3207 case VEC_UNPACK_LO_EXPR: | |
3208 case VEC_WIDEN_MULT_HI_EXPR: | |
3209 case VEC_WIDEN_MULT_LO_EXPR: | |
3210 return NULL; | |
3211 | |
3212 /* Misc codes. */ | |
3213 case ADDR_SPACE_CONVERT_EXPR: | |
3214 case FIXED_CONVERT_EXPR: | |
3215 case OBJ_TYPE_REF: | |
3216 case WITH_SIZE_EXPR: | |
3217 return NULL; | |
3218 | |
3219 case DOT_PROD_EXPR: | |
3220 if (SCALAR_INT_MODE_P (GET_MODE (op0)) | |
3221 && SCALAR_INT_MODE_P (mode)) | |
3222 { | |
3223 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
3224 op0 = gen_rtx_ZERO_EXTEND (mode, op0); | |
3225 else | |
3226 op0 = gen_rtx_SIGN_EXTEND (mode, op0); | |
3227 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))) | |
3228 op1 = gen_rtx_ZERO_EXTEND (mode, op1); | |
3229 else | |
3230 op1 = gen_rtx_SIGN_EXTEND (mode, op1); | |
3231 op0 = gen_rtx_MULT (mode, op0, op1); | |
3232 return gen_rtx_PLUS (mode, op0, op2); | |
3233 } | |
3234 return NULL; | |
3235 | |
3236 case WIDEN_MULT_EXPR: | |
3237 case WIDEN_MULT_PLUS_EXPR: | |
3238 case WIDEN_MULT_MINUS_EXPR: | |
3239 if (SCALAR_INT_MODE_P (GET_MODE (op0)) | |
3240 && SCALAR_INT_MODE_P (mode)) | |
3241 { | |
3242 enum machine_mode inner_mode = GET_MODE (op0); | |
3243 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
3244 op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode); | |
3245 else | |
3246 op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode); | |
3247 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))) | |
3248 op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode); | |
3249 else | |
3250 op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode); | |
3251 op0 = gen_rtx_MULT (mode, op0, op1); | |
3252 if (TREE_CODE (exp) == WIDEN_MULT_EXPR) | |
3253 return op0; | |
3254 else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR) | |
3255 return gen_rtx_PLUS (mode, op0, op2); | |
3256 else | |
3257 return gen_rtx_MINUS (mode, op2, op0); | |
3258 } | |
3259 return NULL; | |
3260 | |
3261 case WIDEN_SUM_EXPR: | |
3262 if (SCALAR_INT_MODE_P (GET_MODE (op0)) | |
3263 && SCALAR_INT_MODE_P (mode)) | |
3264 { | |
3265 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
3266 op0 = gen_rtx_ZERO_EXTEND (mode, op0); | |
3267 else | |
3268 op0 = gen_rtx_SIGN_EXTEND (mode, op0); | |
3269 return gen_rtx_PLUS (mode, op0, op1); | |
3270 } | |
3271 return NULL; | |
3272 | |
3273 case FMA_EXPR: | |
3274 return gen_rtx_FMA (mode, op0, op1, op2); | |
2918 | 3275 |
2919 default: | 3276 default: |
2920 flag_unsupported: | 3277 flag_unsupported: |
2921 #ifdef ENABLE_CHECKING | 3278 #ifdef ENABLE_CHECKING |
2922 debug_tree (exp); | 3279 debug_tree (exp); |
3064 for (; !gsi_end_p (gsi); gsi_next (&gsi)) | 3421 for (; !gsi_end_p (gsi); gsi_next (&gsi)) |
3065 { | 3422 { |
3066 basic_block new_bb; | 3423 basic_block new_bb; |
3067 | 3424 |
3068 stmt = gsi_stmt (gsi); | 3425 stmt = gsi_stmt (gsi); |
3426 | |
3427 /* If this statement is a non-debug one, and we generate debug | |
3428 insns, then this one might be the last real use of a TERed | |
3429 SSA_NAME, but where there are still some debug uses further | |
3430 down. Expanding the current SSA name in such further debug | |
3431 uses by their RHS might lead to wrong debug info, as coalescing | |
3432 might make the operands of such RHS be placed into the same | |
3433 pseudo as something else. Like so: | |
3434 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead | |
3435 use(a_1); | |
3436 a_2 = ... | |
3437 #DEBUG ... => a_1 | |
3438 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced. | |
3439 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use, | |
3440 the write to a_2 would actually have clobbered the place which | |
3441 formerly held a_0. | |
3442 | |
3443 So, instead of that, we recognize the situation, and generate | |
3444 debug temporaries at the last real use of TERed SSA names: | |
3445 a_1 = a_0 + 1; | |
3446 #DEBUG #D1 => a_1 | |
3447 use(a_1); | |
3448 a_2 = ... | |
3449 #DEBUG ... => #D1 | |
3450 */ | |
3451 if (MAY_HAVE_DEBUG_INSNS | |
3452 && SA.values | |
3453 && !is_gimple_debug (stmt)) | |
3454 { | |
3455 ssa_op_iter iter; | |
3456 tree op; | |
3457 gimple def; | |
3458 | |
3459 location_t sloc = get_curr_insn_source_location (); | |
3460 tree sblock = get_curr_insn_block (); | |
3461 | |
3462 /* Look for SSA names that have their last use here (TERed | |
3463 names always have only one real use). */ | |
3464 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE) | |
3465 if ((def = get_gimple_for_ssa_name (op))) | |
3466 { | |
3467 imm_use_iterator imm_iter; | |
3468 use_operand_p use_p; | |
3469 bool have_debug_uses = false; | |
3470 | |
3471 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op) | |
3472 { | |
3473 if (gimple_debug_bind_p (USE_STMT (use_p))) | |
3474 { | |
3475 have_debug_uses = true; | |
3476 break; | |
3477 } | |
3478 } | |
3479 | |
3480 if (have_debug_uses) | |
3481 { | |
3482 /* OP is a TERed SSA name, with DEF it's defining | |
3483 statement, and where OP is used in further debug | |
3484 instructions. Generate a debug temporary, and | |
3485 replace all uses of OP in debug insns with that | |
3486 temporary. */ | |
3487 gimple debugstmt; | |
3488 tree value = gimple_assign_rhs_to_tree (def); | |
3489 tree vexpr = make_node (DEBUG_EXPR_DECL); | |
3490 rtx val; | |
3491 enum machine_mode mode; | |
3492 | |
3493 set_curr_insn_source_location (gimple_location (def)); | |
3494 set_curr_insn_block (gimple_block (def)); | |
3495 | |
3496 DECL_ARTIFICIAL (vexpr) = 1; | |
3497 TREE_TYPE (vexpr) = TREE_TYPE (value); | |
3498 if (DECL_P (value)) | |
3499 mode = DECL_MODE (value); | |
3500 else | |
3501 mode = TYPE_MODE (TREE_TYPE (value)); | |
3502 DECL_MODE (vexpr) = mode; | |
3503 | |
3504 val = gen_rtx_VAR_LOCATION | |
3505 (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED); | |
3506 | |
3507 val = emit_debug_insn (val); | |
3508 | |
3509 FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op) | |
3510 { | |
3511 if (!gimple_debug_bind_p (debugstmt)) | |
3512 continue; | |
3513 | |
3514 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter) | |
3515 SET_USE (use_p, vexpr); | |
3516 | |
3517 update_stmt (debugstmt); | |
3518 } | |
3519 } | |
3520 } | |
3521 set_curr_insn_source_location (sloc); | |
3522 set_curr_insn_block (sblock); | |
3523 } | |
3524 | |
3069 currently_expanding_gimple_stmt = stmt; | 3525 currently_expanding_gimple_stmt = stmt; |
3070 | 3526 |
3071 /* Expand this statement, then evaluate the resulting RTL and | 3527 /* Expand this statement, then evaluate the resulting RTL and |
3072 fixup the CFG accordingly. */ | 3528 fixup the CFG accordingly. */ |
3073 if (gimple_code (stmt) == GIMPLE_COND) | 3529 if (gimple_code (stmt) == GIMPLE_COND) |
3075 new_bb = expand_gimple_cond (bb, stmt); | 3531 new_bb = expand_gimple_cond (bb, stmt); |
3076 if (new_bb) | 3532 if (new_bb) |
3077 return new_bb; | 3533 return new_bb; |
3078 } | 3534 } |
3079 else if (gimple_debug_bind_p (stmt)) | 3535 else if (gimple_debug_bind_p (stmt)) |
3080 { | 3536 { |
3081 location_t sloc = get_curr_insn_source_location (); | 3537 location_t sloc = get_curr_insn_source_location (); |
3082 tree sblock = get_curr_insn_block (); | 3538 tree sblock = get_curr_insn_block (); |
3083 gimple_stmt_iterator nsi = gsi; | 3539 gimple_stmt_iterator nsi = gsi; |
3084 | 3540 |
3085 for (;;) | 3541 for (;;) |
3086 { | 3542 { |
3087 tree var = gimple_debug_bind_get_var (stmt); | 3543 tree var = gimple_debug_bind_get_var (stmt); |
3088 tree value; | 3544 tree value; |
3089 rtx val; | 3545 rtx val; |
3090 enum machine_mode mode; | 3546 enum machine_mode mode; |
3091 | 3547 |
3092 if (gimple_debug_bind_has_value_p (stmt)) | 3548 if (gimple_debug_bind_has_value_p (stmt)) |
3093 value = gimple_debug_bind_get_value (stmt); | 3549 value = gimple_debug_bind_get_value (stmt); |
3094 else | 3550 else |
3095 value = NULL_TREE; | 3551 value = NULL_TREE; |
3096 | 3552 |
3097 last = get_last_insn (); | 3553 last = get_last_insn (); |
3098 | 3554 |
3099 set_curr_insn_source_location (gimple_location (stmt)); | 3555 set_curr_insn_source_location (gimple_location (stmt)); |
3100 set_curr_insn_block (gimple_block (stmt)); | 3556 set_curr_insn_block (gimple_block (stmt)); |
3101 | 3557 |
3102 if (DECL_P (var)) | 3558 if (DECL_P (var)) |
3103 mode = DECL_MODE (var); | 3559 mode = DECL_MODE (var); |
3104 else | 3560 else |
3105 mode = TYPE_MODE (TREE_TYPE (var)); | 3561 mode = TYPE_MODE (TREE_TYPE (var)); |
3106 | 3562 |
3107 val = gen_rtx_VAR_LOCATION | 3563 val = gen_rtx_VAR_LOCATION |
3108 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED); | 3564 (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED); |
3109 | 3565 |
3110 val = emit_debug_insn (val); | 3566 val = emit_debug_insn (val); |
3111 | 3567 |
3112 if (dump_file && (dump_flags & TDF_DETAILS)) | 3568 if (dump_file && (dump_flags & TDF_DETAILS)) |
3113 { | 3569 { |
3114 /* We can't dump the insn with a TREE where an RTX | 3570 /* We can't dump the insn with a TREE where an RTX |
3115 is expected. */ | 3571 is expected. */ |
3116 INSN_VAR_LOCATION_LOC (val) = const0_rtx; | 3572 INSN_VAR_LOCATION_LOC (val) = const0_rtx; |
3117 maybe_dump_rtl_for_gimple_stmt (stmt, last); | 3573 maybe_dump_rtl_for_gimple_stmt (stmt, last); |
3118 INSN_VAR_LOCATION_LOC (val) = (rtx)value; | 3574 INSN_VAR_LOCATION_LOC (val) = (rtx)value; |
3119 } | 3575 } |
3120 | 3576 |
3121 gsi = nsi; | 3577 /* In order not to generate too many debug temporaries, |
3122 gsi_next (&nsi); | 3578 we delink all uses of debug statements we already expanded. |
3123 if (gsi_end_p (nsi)) | 3579 Therefore debug statements between definition and real |
3124 break; | 3580 use of TERed SSA names will continue to use the SSA name, |
3125 stmt = gsi_stmt (nsi); | 3581 and not be replaced with debug temps. */ |
3126 if (!gimple_debug_bind_p (stmt)) | 3582 delink_stmt_imm_use (stmt); |
3127 break; | 3583 |
3128 } | 3584 gsi = nsi; |
3129 | 3585 gsi_next (&nsi); |
3130 set_curr_insn_source_location (sloc); | 3586 if (gsi_end_p (nsi)) |
3131 set_curr_insn_block (sblock); | 3587 break; |
3132 } | 3588 stmt = gsi_stmt (nsi); |
3589 if (!gimple_debug_bind_p (stmt)) | |
3590 break; | |
3591 } | |
3592 | |
3593 set_curr_insn_source_location (sloc); | |
3594 set_curr_insn_block (sblock); | |
3595 } | |
3133 else | 3596 else |
3134 { | 3597 { |
3135 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt)) | 3598 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt)) |
3136 { | 3599 { |
3137 bool can_fallthru; | 3600 bool can_fallthru; |
3398 gimple_stmt_iterator gsi; | 3861 gimple_stmt_iterator gsi; |
3399 | 3862 |
3400 FOR_EACH_BB (bb) | 3863 FOR_EACH_BB (bb) |
3401 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) | 3864 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
3402 { | 3865 { |
3403 gimple stmt = gsi_stmt (gsi); | 3866 gimple stmt = gsi_stmt (gsi); |
3404 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL); | 3867 if (!is_gimple_debug (stmt)) |
3868 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL); | |
3405 } | 3869 } |
3406 } | 3870 } |
3407 | 3871 |
3408 /* This function sets crtl->args.internal_arg_pointer to a virtual | 3872 /* This function sets crtl->args.internal_arg_pointer to a virtual |
3409 register if DRAP is needed. Local register allocator will replace | 3873 register if DRAP is needed. Local register allocator will replace |
3438 | 3902 |
3439 /* Update crtl->stack_alignment_estimated and use it later to align | 3903 /* Update crtl->stack_alignment_estimated and use it later to align |
3440 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call | 3904 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call |
3441 exceptions since callgraph doesn't collect incoming stack alignment | 3905 exceptions since callgraph doesn't collect incoming stack alignment |
3442 in this case. */ | 3906 in this case. */ |
3443 if (flag_non_call_exceptions | 3907 if (cfun->can_throw_non_call_exceptions |
3444 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary) | 3908 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary) |
3445 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY; | 3909 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY; |
3446 else | 3910 else |
3447 preferred_stack_boundary = crtl->preferred_stack_boundary; | 3911 preferred_stack_boundary = crtl->preferred_stack_boundary; |
3448 if (preferred_stack_boundary > crtl->stack_alignment_estimated) | 3912 if (preferred_stack_boundary > crtl->stack_alignment_estimated) |
3492 { | 3956 { |
3493 basic_block bb, init_block; | 3957 basic_block bb, init_block; |
3494 sbitmap blocks; | 3958 sbitmap blocks; |
3495 edge_iterator ei; | 3959 edge_iterator ei; |
3496 edge e; | 3960 edge e; |
3961 rtx var_seq; | |
3497 unsigned i; | 3962 unsigned i; |
3498 | 3963 |
3964 timevar_push (TV_OUT_OF_SSA); | |
3499 rewrite_out_of_ssa (&SA); | 3965 rewrite_out_of_ssa (&SA); |
3966 timevar_pop (TV_OUT_OF_SSA); | |
3500 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions, | 3967 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions, |
3501 sizeof (rtx)); | 3968 sizeof (rtx)); |
3502 | 3969 |
3503 /* Some backends want to know that we are expanding to RTL. */ | 3970 /* Some backends want to know that we are expanding to RTL. */ |
3504 currently_expanding_to_rtl = 1; | 3971 currently_expanding_to_rtl = 1; |
3513 set_curr_insn_source_location | 3980 set_curr_insn_source_location |
3514 (DECL_SOURCE_LOCATION (current_function_decl)); | 3981 (DECL_SOURCE_LOCATION (current_function_decl)); |
3515 else | 3982 else |
3516 set_curr_insn_source_location (cfun->function_start_locus); | 3983 set_curr_insn_source_location (cfun->function_start_locus); |
3517 } | 3984 } |
3985 else | |
3986 set_curr_insn_source_location (UNKNOWN_LOCATION); | |
3518 set_curr_insn_block (DECL_INITIAL (current_function_decl)); | 3987 set_curr_insn_block (DECL_INITIAL (current_function_decl)); |
3519 prologue_locator = curr_insn_locator (); | 3988 prologue_locator = curr_insn_locator (); |
3989 | |
3990 #ifdef INSN_SCHEDULING | |
3991 init_sched_attrs (); | |
3992 #endif | |
3520 | 3993 |
3521 /* Make sure first insn is a note even if we don't want linenums. | 3994 /* Make sure first insn is a note even if we don't want linenums. |
3522 This makes sure the first insn will never be deleted. | 3995 This makes sure the first insn will never be deleted. |
3523 Also, final expects a note to appear there. */ | 3996 Also, final expects a note to appear there. */ |
3524 emit_note (NOTE_INSN_DELETED); | 3997 emit_note (NOTE_INSN_DELETED); |
3531 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY; | 4004 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY; |
3532 crtl->stack_alignment_estimated = 0; | 4005 crtl->stack_alignment_estimated = 0; |
3533 crtl->preferred_stack_boundary = STACK_BOUNDARY; | 4006 crtl->preferred_stack_boundary = STACK_BOUNDARY; |
3534 cfun->cfg->max_jumptable_ents = 0; | 4007 cfun->cfg->max_jumptable_ents = 0; |
3535 | 4008 |
4009 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge | |
4010 of the function section at exapnsion time to predict distance of calls. */ | |
4011 resolve_unique_section (current_function_decl, 0, flag_function_sections); | |
3536 | 4012 |
3537 /* Expand the variables recorded during gimple lowering. */ | 4013 /* Expand the variables recorded during gimple lowering. */ |
4014 timevar_push (TV_VAR_EXPAND); | |
4015 start_sequence (); | |
4016 | |
3538 expand_used_vars (); | 4017 expand_used_vars (); |
4018 | |
4019 var_seq = get_insns (); | |
4020 end_sequence (); | |
4021 timevar_pop (TV_VAR_EXPAND); | |
3539 | 4022 |
3540 /* Honor stack protection warnings. */ | 4023 /* Honor stack protection warnings. */ |
3541 if (warn_stack_protect) | 4024 if (warn_stack_protect) |
3542 { | 4025 { |
3543 if (cfun->calls_alloca) | 4026 if (cfun->calls_alloca) |
3544 warning (OPT_Wstack_protector, | 4027 warning (OPT_Wstack_protector, |
3545 "not protecting local variables: variable length buffer"); | 4028 "stack protector not protecting local variables: " |
4029 "variable length buffer"); | |
3546 if (has_short_buffer && !crtl->stack_protect_guard) | 4030 if (has_short_buffer && !crtl->stack_protect_guard) |
3547 warning (OPT_Wstack_protector, | 4031 warning (OPT_Wstack_protector, |
3548 "not protecting function: no buffer at least %d bytes long", | 4032 "stack protector not protecting function: " |
3549 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE)); | 4033 "all local arrays are less than %d bytes long", |
4034 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE)); | |
3550 } | 4035 } |
3551 | 4036 |
3552 /* Set up parameters and prepare for return, for the function. */ | 4037 /* Set up parameters and prepare for return, for the function. */ |
3553 expand_function_start (current_function_decl); | 4038 expand_function_start (current_function_decl); |
4039 | |
4040 /* If we emitted any instructions for setting up the variables, | |
4041 emit them before the FUNCTION_START note. */ | |
4042 if (var_seq) | |
4043 { | |
4044 emit_insn_before (var_seq, parm_birth_insn); | |
4045 | |
4046 /* In expand_function_end we'll insert the alloca save/restore | |
4047 before parm_birth_insn. We've just insertted an alloca call. | |
4048 Adjust the pointer to match. */ | |
4049 parm_birth_insn = var_seq; | |
4050 } | |
3554 | 4051 |
3555 /* Now that we also have the parameter RTXs, copy them over to our | 4052 /* Now that we also have the parameter RTXs, copy them over to our |
3556 partitions. */ | 4053 partitions. */ |
3557 for (i = 0; i < SA.map->num_partitions; i++) | 4054 for (i = 0; i < SA.map->num_partitions; i++) |
3558 { | 4055 { |
3611 | 4108 |
3612 if (MAY_HAVE_DEBUG_INSNS) | 4109 if (MAY_HAVE_DEBUG_INSNS) |
3613 expand_debug_locations (); | 4110 expand_debug_locations (); |
3614 | 4111 |
3615 execute_free_datastructures (); | 4112 execute_free_datastructures (); |
4113 timevar_push (TV_OUT_OF_SSA); | |
3616 finish_out_of_ssa (&SA); | 4114 finish_out_of_ssa (&SA); |
4115 timevar_pop (TV_OUT_OF_SSA); | |
4116 | |
4117 timevar_push (TV_POST_EXPAND); | |
4118 /* We are no longer in SSA form. */ | |
4119 cfun->gimple_df->in_ssa_p = false; | |
3617 | 4120 |
3618 /* Expansion is used by optimization passes too, set maybe_hot_insn_p | 4121 /* Expansion is used by optimization passes too, set maybe_hot_insn_p |
3619 conservatively to true until they are all profile aware. */ | 4122 conservatively to true until they are all profile aware. */ |
3620 pointer_map_destroy (lab_rtx_for_bb); | 4123 pointer_map_destroy (lab_rtx_for_bb); |
3621 free_histograms (); | 4124 free_histograms (); |
3632 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb) | 4135 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb) |
3633 { | 4136 { |
3634 edge e; | 4137 edge e; |
3635 edge_iterator ei; | 4138 edge_iterator ei; |
3636 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) | 4139 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) |
3637 { | 4140 { |
3638 if (e->insns.r) | 4141 if (e->insns.r) |
3639 commit_one_edge_insertion (e); | 4142 { |
3640 else | 4143 /* Avoid putting insns before parm_birth_insn. */ |
3641 ei_next (&ei); | 4144 if (e->src == ENTRY_BLOCK_PTR |
3642 } | 4145 && single_succ_p (ENTRY_BLOCK_PTR) |
4146 && parm_birth_insn) | |
4147 { | |
4148 rtx insns = e->insns.r; | |
4149 e->insns.r = NULL_RTX; | |
4150 emit_insn_after_noloc (insns, parm_birth_insn, e->dest); | |
4151 } | |
4152 else | |
4153 commit_one_edge_insertion (e); | |
4154 } | |
4155 else | |
4156 ei_next (&ei); | |
4157 } | |
3643 } | 4158 } |
3644 | 4159 |
3645 /* We're done expanding trees to RTL. */ | 4160 /* We're done expanding trees to RTL. */ |
3646 currently_expanding_to_rtl = 0; | 4161 currently_expanding_to_rtl = 0; |
3647 | 4162 |
3719 naked_return_label = NULL; | 4234 naked_return_label = NULL; |
3720 /* Tag the blocks with a depth number so that change_scope can find | 4235 /* Tag the blocks with a depth number so that change_scope can find |
3721 the common parent easily. */ | 4236 the common parent easily. */ |
3722 set_block_levels (DECL_INITIAL (cfun->decl), 0); | 4237 set_block_levels (DECL_INITIAL (cfun->decl), 0); |
3723 default_rtl_profile (); | 4238 default_rtl_profile (); |
4239 timevar_pop (TV_POST_EXPAND); | |
3724 return 0; | 4240 return 0; |
3725 } | 4241 } |
3726 | 4242 |
3727 struct rtl_opt_pass pass_expand = | 4243 struct rtl_opt_pass pass_expand = |
3728 { | 4244 { |
3732 NULL, /* gate */ | 4248 NULL, /* gate */ |
3733 gimple_expand_cfg, /* execute */ | 4249 gimple_expand_cfg, /* execute */ |
3734 NULL, /* sub */ | 4250 NULL, /* sub */ |
3735 NULL, /* next */ | 4251 NULL, /* next */ |
3736 0, /* static_pass_number */ | 4252 0, /* static_pass_number */ |
3737 TV_EXPAND, /* tv_id */ | 4253 TV_EXPAND, /* tv_id */ |
3738 PROP_ssa | PROP_gimple_leh | PROP_cfg,/* properties_required */ | 4254 PROP_ssa | PROP_gimple_leh | PROP_cfg |
4255 | PROP_gimple_lcx, /* properties_required */ | |
3739 PROP_rtl, /* properties_provided */ | 4256 PROP_rtl, /* properties_provided */ |
3740 PROP_ssa | PROP_trees, /* properties_destroyed */ | 4257 PROP_ssa | PROP_trees, /* properties_destroyed */ |
3741 TODO_verify_ssa | TODO_verify_flow | 4258 TODO_verify_ssa | TODO_verify_flow |
3742 | TODO_verify_stmts, /* todo_flags_start */ | 4259 | TODO_verify_stmts, /* todo_flags_start */ |
3743 TODO_dump_func | 4260 TODO_dump_func |