comparison gcc/resource.c @ 111:04ced10e8804

gcc 7
author kono
date Fri, 27 Oct 2017 22:46:09 +0900
parents f6334be47118
children 84e7813d76e9
comparison
equal deleted inserted replaced
68:561a7518be6b 111:04ced10e8804
1 /* Definitions for computing resource usage of specific insns. 1 /* Definitions for computing resource usage of specific insns.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2 Copyright (C) 1999-2017 Free Software Foundation, Inc.
3 2009, 2010 Free Software Foundation, Inc.
4 3
5 This file is part of GCC. 4 This file is part of GCC.
6 5
7 GCC is free software; you can redistribute it and/or modify it under 6 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free 7 the terms of the GNU General Public License as published by the Free
19 <http://www.gnu.org/licenses/>. */ 18 <http://www.gnu.org/licenses/>. */
20 19
21 #include "config.h" 20 #include "config.h"
22 #include "system.h" 21 #include "system.h"
23 #include "coretypes.h" 22 #include "coretypes.h"
24 #include "tm.h" 23 #include "backend.h"
25 #include "diagnostic-core.h"
26 #include "rtl.h" 24 #include "rtl.h"
25 #include "df.h"
26 #include "memmodel.h"
27 #include "tm_p.h" 27 #include "tm_p.h"
28 #include "hard-reg-set.h"
29 #include "function.h"
30 #include "regs.h" 28 #include "regs.h"
31 #include "flags.h" 29 #include "emit-rtl.h"
32 #include "output.h"
33 #include "resource.h" 30 #include "resource.h"
34 #include "except.h"
35 #include "insn-attr.h" 31 #include "insn-attr.h"
36 #include "params.h" 32 #include "params.h"
37 #include "df.h"
38 33
39 /* This structure is used to record liveness information at the targets or 34 /* This structure is used to record liveness information at the targets or
40 fallthrough insns of branches. We will most likely need the information 35 fallthrough insns of branches. We will most likely need the information
41 at targets again, so save them in a hash table rather than recomputing them 36 at targets again, so save them in a hash table rather than recomputing them
42 each time. */ 37 each time. */
77 Also only used by the next two functions. */ 72 Also only used by the next two functions. */
78 73
79 static HARD_REG_SET pending_dead_regs; 74 static HARD_REG_SET pending_dead_regs;
80 75
81 static void update_live_status (rtx, const_rtx, void *); 76 static void update_live_status (rtx, const_rtx, void *);
82 static int find_basic_block (rtx, int); 77 static int find_basic_block (rtx_insn *, int);
83 static rtx next_insn_no_annul (rtx); 78 static rtx_insn *next_insn_no_annul (rtx_insn *);
84 static rtx find_dead_or_set_registers (rtx, struct resources*, 79 static rtx_insn *find_dead_or_set_registers (rtx_insn *, struct resources*,
85 rtx*, int, struct resources, 80 rtx *, int, struct resources,
86 struct resources); 81 struct resources);
87 82
88 /* Utility function called from mark_target_live_regs via note_stores. 83 /* Utility function called from mark_target_live_regs via note_stores.
89 It deadens any CLOBBERed registers and livens any SET registers. */ 84 It deadens any CLOBBERed registers and livens any SET registers. */
90 85
91 static void 86 static void
105 100
106 } 101 }
107 else 102 else
108 { 103 {
109 first_regno = REGNO (dest); 104 first_regno = REGNO (dest);
110 last_regno = END_HARD_REGNO (dest); 105 last_regno = END_REGNO (dest);
111 } 106 }
112 107
113 if (GET_CODE (x) == CLOBBER) 108 if (GET_CODE (x) == CLOBBER)
114 for (i = first_regno; i < last_regno; i++) 109 for (i = first_regno; i < last_regno; i++)
115 CLEAR_HARD_REG_BIT (current_live_regs, i); 110 CLEAR_HARD_REG_BIT (current_live_regs, i);
131 instead of finding the basic block containing INSN, we search 126 instead of finding the basic block containing INSN, we search
132 backwards toward a BARRIER where the live register information is 127 backwards toward a BARRIER where the live register information is
133 correct. */ 128 correct. */
134 129
135 static int 130 static int
136 find_basic_block (rtx insn, int search_limit) 131 find_basic_block (rtx_insn *insn, int search_limit)
137 { 132 {
138 /* Scan backwards to the previous BARRIER. Then see if we can find a 133 /* Scan backwards to the previous BARRIER. Then see if we can find a
139 label that starts a basic block. Return the basic block number. */ 134 label that starts a basic block. Return the basic block number. */
140 for (insn = prev_nonnote_insn (insn); 135 for (insn = prev_nonnote_insn (insn);
141 insn && !BARRIER_P (insn) && search_limit != 0; 136 insn && !BARRIER_P (insn) && search_limit != 0;
146 if (search_limit == 0) 141 if (search_limit == 0)
147 return -1; 142 return -1;
148 143
149 /* The start of the function. */ 144 /* The start of the function. */
150 else if (insn == 0) 145 else if (insn == 0)
151 return ENTRY_BLOCK_PTR->next_bb->index; 146 return ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index;
152 147
153 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach 148 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach
154 anything other than a CODE_LABEL or note, we can't find this code. */ 149 anything other than a CODE_LABEL or note, we can't find this code. */
155 for (insn = next_nonnote_insn (insn); 150 for (insn = next_nonnote_insn (insn);
156 insn && LABEL_P (insn); 151 insn && LABEL_P (insn);
162 } 157 }
163 158
164 /* Similar to next_insn, but ignores insns in the delay slots of 159 /* Similar to next_insn, but ignores insns in the delay slots of
165 an annulled branch. */ 160 an annulled branch. */
166 161
167 static rtx 162 static rtx_insn *
168 next_insn_no_annul (rtx insn) 163 next_insn_no_annul (rtx_insn *insn)
169 { 164 {
170 if (insn) 165 if (insn)
171 { 166 {
172 /* If INSN is an annulled branch, skip any insns from the target 167 /* If INSN is an annulled branch, skip any insns from the target
173 of the branch. */ 168 of the branch. */
174 if (INSN_P (insn) 169 if (JUMP_P (insn)
175 && INSN_ANNULLED_BRANCH_P (insn) 170 && INSN_ANNULLED_BRANCH_P (insn)
176 && NEXT_INSN (PREV_INSN (insn)) != insn) 171 && NEXT_INSN (PREV_INSN (insn)) != insn)
177 { 172 {
178 rtx next = NEXT_INSN (insn); 173 rtx_insn *next = NEXT_INSN (insn);
179 enum rtx_code code = GET_CODE (next); 174
180 175 while ((NONJUMP_INSN_P (next) || JUMP_P (next) || CALL_P (next))
181 while ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
182 && INSN_FROM_TARGET_P (next)) 176 && INSN_FROM_TARGET_P (next))
183 { 177 {
184 insn = next; 178 insn = next;
185 next = NEXT_INSN (insn); 179 next = NEXT_INSN (insn);
186 code = GET_CODE (next);
187 } 180 }
188 } 181 }
189 182
190 insn = NEXT_INSN (insn); 183 insn = NEXT_INSN (insn);
191 if (insn && NONJUMP_INSN_P (insn) 184 if (insn && NONJUMP_INSN_P (insn)
192 && GET_CODE (PATTERN (insn)) == SEQUENCE) 185 && GET_CODE (PATTERN (insn)) == SEQUENCE)
193 insn = XVECEXP (PATTERN (insn), 0, 0); 186 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
194 } 187 }
195 188
196 return insn; 189 return insn;
197 } 190 }
198 191
213 /* Handle leaf items for which we set resource flags. Also, special-case 206 /* Handle leaf items for which we set resource flags. Also, special-case
214 CALL, SET and CLOBBER operators. */ 207 CALL, SET and CLOBBER operators. */
215 switch (code) 208 switch (code)
216 { 209 {
217 case CONST: 210 case CONST:
218 case CONST_INT: 211 CASE_CONST_ANY:
219 case CONST_DOUBLE:
220 case CONST_FIXED:
221 case CONST_VECTOR:
222 case PC: 212 case PC:
223 case SYMBOL_REF: 213 case SYMBOL_REF:
224 case LABEL_REF: 214 case LABEL_REF:
225 return; 215 return;
226 216
244 return; 234 return;
245 235
246 case MEM: 236 case MEM:
247 /* If this memory shouldn't change, it really isn't referencing 237 /* If this memory shouldn't change, it really isn't referencing
248 memory. */ 238 memory. */
249 if (MEM_READONLY_P (x)) 239 if (! MEM_READONLY_P (x))
250 res->unch_memory = 1;
251 else
252 res->memory = 1; 240 res->memory = 1;
253 res->volatil |= MEM_VOLATILE_P (x); 241 res->volatil |= MEM_VOLATILE_P (x);
254 242
255 /* Mark registers used to access memory. */ 243 /* Mark registers used to access memory. */
256 mark_referenced_resources (XEXP (x, 0), res, false); 244 mark_referenced_resources (XEXP (x, 0), res, false);
314 USE insns immediately in front of the CALL. 302 USE insns immediately in front of the CALL.
315 303
316 However, we may have moved some of the parameter loading insns 304 However, we may have moved some of the parameter loading insns
317 into the delay slot of this CALL. If so, the USE's for them 305 into the delay slot of this CALL. If so, the USE's for them
318 don't count and should be skipped. */ 306 don't count and should be skipped. */
319 rtx insn = PREV_INSN (x); 307 rtx_insn *insn = PREV_INSN (as_a <rtx_insn *> (x));
320 rtx sequence = 0; 308 rtx_sequence *sequence = 0;
321 int seq_size = 0; 309 int seq_size = 0;
322 int i; 310 int i;
323 311
324 /* If we are part of a delay slot sequence, point at the SEQUENCE. */ 312 /* If we are part of a delay slot sequence, point at the SEQUENCE. */
325 if (NEXT_INSN (insn) != x) 313 if (NEXT_INSN (insn) != x)
326 { 314 {
327 sequence = PATTERN (NEXT_INSN (insn)); 315 sequence = as_a <rtx_sequence *> (PATTERN (NEXT_INSN (insn)));
328 seq_size = XVECLEN (sequence, 0); 316 seq_size = sequence->len ();
329 gcc_assert (GET_CODE (sequence) == SEQUENCE); 317 gcc_assert (GET_CODE (sequence) == SEQUENCE);
330 } 318 }
331 319
332 res->memory = 1; 320 res->memory = 1;
333 SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM); 321 SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
334 if (frame_pointer_needed) 322 if (frame_pointer_needed)
335 { 323 {
336 SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM); 324 SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
337 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER 325 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
338 SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM); 326 SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
339 #endif
340 } 327 }
341 328
342 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 329 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
343 if (global_regs[i]) 330 if (global_regs[i])
344 SET_HARD_REG_BIT (res->regs, i); 331 SET_HARD_REG_BIT (res->regs, i);
362 link = XEXP (link, 1)) 349 link = XEXP (link, 1))
363 if (GET_CODE (XEXP (link, 0)) == USE) 350 if (GET_CODE (XEXP (link, 0)) == USE)
364 { 351 {
365 for (i = 1; i < seq_size; i++) 352 for (i = 1; i < seq_size; i++)
366 { 353 {
367 rtx slot_pat = PATTERN (XVECEXP (sequence, 0, i)); 354 rtx slot_pat = PATTERN (sequence->element (i));
368 if (GET_CODE (slot_pat) == SET 355 if (GET_CODE (slot_pat) == SET
369 && rtx_equal_p (SET_DEST (slot_pat), 356 && rtx_equal_p (SET_DEST (slot_pat),
370 XEXP (XEXP (link, 0), 0))) 357 XEXP (XEXP (link, 0), 0)))
371 break; 358 break;
372 } 359 }
376 } 363 }
377 } 364 }
378 } 365 }
379 366
380 /* ... fall through to other INSN processing ... */ 367 /* ... fall through to other INSN processing ... */
368 gcc_fallthrough ();
381 369
382 case INSN: 370 case INSN:
383 case JUMP_INSN: 371 case JUMP_INSN:
384 372
385 #ifdef INSN_REFERENCES_ARE_DELAYED 373 if (GET_CODE (PATTERN (x)) == COND_EXEC)
374 /* In addition to the usual references, also consider all outputs
375 as referenced, to compensate for mark_set_resources treating
376 them as killed. This is similar to ZERO_EXTRACT / STRICT_LOW_PART
377 handling, execpt that we got a partial incidence instead of a partial
378 width. */
379 mark_set_resources (x, res, 0,
380 include_delayed_effects
381 ? MARK_SRC_DEST_CALL : MARK_SRC_DEST);
382
386 if (! include_delayed_effects 383 if (! include_delayed_effects
387 && INSN_REFERENCES_ARE_DELAYED (x)) 384 && INSN_REFERENCES_ARE_DELAYED (as_a <rtx_insn *> (x)))
388 return; 385 return;
389 #endif
390 386
391 /* No special processing, just speed up. */ 387 /* No special processing, just speed up. */
392 mark_referenced_resources (PATTERN (x), res, include_delayed_effects); 388 mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
393 return; 389 return;
394 390
416 /* A subroutine of mark_target_live_regs. Search forward from TARGET 412 /* A subroutine of mark_target_live_regs. Search forward from TARGET
417 looking for registers that are set before they are used. These are dead. 413 looking for registers that are set before they are used. These are dead.
418 Stop after passing a few conditional jumps, and/or a small 414 Stop after passing a few conditional jumps, and/or a small
419 number of unconditional branches. */ 415 number of unconditional branches. */
420 416
421 static rtx 417 static rtx_insn *
422 find_dead_or_set_registers (rtx target, struct resources *res, 418 find_dead_or_set_registers (rtx_insn *target, struct resources *res,
423 rtx *jump_target, int jump_count, 419 rtx *jump_target, int jump_count,
424 struct resources set, struct resources needed) 420 struct resources set, struct resources needed)
425 { 421 {
426 HARD_REG_SET scratch; 422 HARD_REG_SET scratch;
427 rtx insn, next; 423 rtx_insn *insn;
428 rtx jump_insn = 0; 424 rtx_insn *next_insn;
425 rtx_insn *jump_insn = 0;
429 int i; 426 int i;
430 427
431 for (insn = target; insn; insn = next) 428 for (insn = target; insn; insn = next_insn)
432 { 429 {
433 rtx this_jump_insn = insn; 430 rtx_insn *this_insn = insn;
434 431
435 next = NEXT_INSN (insn); 432 next_insn = NEXT_INSN (insn);
436 433
437 /* If this instruction can throw an exception, then we don't 434 /* If this instruction can throw an exception, then we don't
438 know where we might end up next. That means that we have to 435 know where we might end up next. That means that we have to
439 assume that whatever we have already marked as live really is 436 assume that whatever we have already marked as live really is
440 live. */ 437 live. */
469 /* All other USE insns are to be ignored. */ 466 /* All other USE insns are to be ignored. */
470 continue; 467 continue;
471 } 468 }
472 else if (GET_CODE (PATTERN (insn)) == CLOBBER) 469 else if (GET_CODE (PATTERN (insn)) == CLOBBER)
473 continue; 470 continue;
474 else if (GET_CODE (PATTERN (insn)) == SEQUENCE) 471 else if (rtx_sequence *seq =
472 dyn_cast <rtx_sequence *> (PATTERN (insn)))
475 { 473 {
476 /* An unconditional jump can be used to fill the delay slot 474 /* An unconditional jump can be used to fill the delay slot
477 of a call, so search for a JUMP_INSN in any position. */ 475 of a call, so search for a JUMP_INSN in any position. */
478 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++) 476 for (i = 0; i < seq->len (); i++)
479 { 477 {
480 this_jump_insn = XVECEXP (PATTERN (insn), 0, i); 478 this_insn = seq->insn (i);
481 if (JUMP_P (this_jump_insn)) 479 if (JUMP_P (this_insn))
482 break; 480 break;
483 } 481 }
484 } 482 }
485 483
486 default: 484 default:
487 break; 485 break;
488 } 486 }
489 487
490 if (JUMP_P (this_jump_insn)) 488 if (rtx_jump_insn *this_jump_insn =
489 dyn_cast <rtx_jump_insn *> (this_insn))
491 { 490 {
492 if (jump_count++ < 10) 491 if (jump_count++ < 10)
493 { 492 {
494 if (any_uncondjump_p (this_jump_insn) 493 if (any_uncondjump_p (this_jump_insn)
495 || GET_CODE (PATTERN (this_jump_insn)) == RETURN) 494 || ANY_RETURN_P (PATTERN (this_jump_insn)))
496 { 495 {
497 next = JUMP_LABEL (this_jump_insn); 496 rtx lab_or_return = this_jump_insn->jump_label ();
497 if (ANY_RETURN_P (lab_or_return))
498 next_insn = NULL;
499 else
500 next_insn = as_a <rtx_insn *> (lab_or_return);
498 if (jump_insn == 0) 501 if (jump_insn == 0)
499 { 502 {
500 jump_insn = insn; 503 jump_insn = insn;
501 if (jump_target) 504 if (jump_target)
502 *jump_target = JUMP_LABEL (this_jump_insn); 505 *jump_target = JUMP_LABEL (this_jump_insn);
530 and then inverting the INSN_FROM_TARGET_P bits again. */ 533 and then inverting the INSN_FROM_TARGET_P bits again. */
531 534
532 if (GET_CODE (PATTERN (insn)) == SEQUENCE 535 if (GET_CODE (PATTERN (insn)) == SEQUENCE
533 && INSN_ANNULLED_BRANCH_P (this_jump_insn)) 536 && INSN_ANNULLED_BRANCH_P (this_jump_insn))
534 { 537 {
535 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++) 538 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
536 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) 539 for (i = 1; i < seq->len (); i++)
537 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)); 540 INSN_FROM_TARGET_P (seq->element (i))
541 = ! INSN_FROM_TARGET_P (seq->element (i));
538 542
539 target_set = set; 543 target_set = set;
540 mark_set_resources (insn, &target_set, 0, 544 mark_set_resources (insn, &target_set, 0,
541 MARK_SRC_DEST_CALL); 545 MARK_SRC_DEST_CALL);
542 546
543 for (i = 1; i < XVECLEN (PATTERN (insn), 0); i++) 547 for (i = 1; i < seq->len (); i++)
544 INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)) 548 INSN_FROM_TARGET_P (seq->element (i))
545 = ! INSN_FROM_TARGET_P (XVECEXP (PATTERN (insn), 0, i)); 549 = ! INSN_FROM_TARGET_P (seq->element (i));
546 550
547 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); 551 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
548 } 552 }
549 else 553 else
550 { 554 {
560 fallthrough_res = *res; 564 fallthrough_res = *res;
561 COPY_HARD_REG_SET (scratch, set.regs); 565 COPY_HARD_REG_SET (scratch, set.regs);
562 AND_COMPL_HARD_REG_SET (scratch, needed.regs); 566 AND_COMPL_HARD_REG_SET (scratch, needed.regs);
563 AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch); 567 AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
564 568
565 find_dead_or_set_registers (JUMP_LABEL (this_jump_insn), 569 if (!ANY_RETURN_P (this_jump_insn->jump_label ()))
566 &target_res, 0, jump_count, 570 find_dead_or_set_registers
567 target_set, needed); 571 (this_jump_insn->jump_target (),
568 find_dead_or_set_registers (next, 572 &target_res, 0, jump_count, target_set, needed);
573 find_dead_or_set_registers (next_insn,
569 &fallthrough_res, 0, jump_count, 574 &fallthrough_res, 0, jump_count,
570 set, needed); 575 set, needed);
571 IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs); 576 IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
572 AND_HARD_REG_SET (res->regs, fallthrough_res.regs); 577 AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
573 break; 578 break;
627 { 632 {
628 case NOTE: 633 case NOTE:
629 case BARRIER: 634 case BARRIER:
630 case CODE_LABEL: 635 case CODE_LABEL:
631 case USE: 636 case USE:
632 case CONST_INT: 637 CASE_CONST_ANY:
633 case CONST_DOUBLE:
634 case CONST_FIXED:
635 case CONST_VECTOR:
636 case LABEL_REF: 638 case LABEL_REF:
637 case SYMBOL_REF: 639 case SYMBOL_REF:
638 case CONST: 640 case CONST:
639 case PC: 641 case PC:
640 /* These don't set any resources. */ 642 /* These don't set any resources. */
650 that aren't saved across calls, global registers and anything 652 that aren't saved across calls, global registers and anything
651 explicitly CLOBBERed immediately after the CALL_INSN. */ 653 explicitly CLOBBERed immediately after the CALL_INSN. */
652 654
653 if (mark_type == MARK_SRC_DEST_CALL) 655 if (mark_type == MARK_SRC_DEST_CALL)
654 { 656 {
657 rtx_call_insn *call_insn = as_a <rtx_call_insn *> (x);
655 rtx link; 658 rtx link;
659 HARD_REG_SET regs;
656 660
657 res->cc = res->memory = 1; 661 res->cc = res->memory = 1;
658 662
659 IOR_HARD_REG_SET (res->regs, regs_invalidated_by_call); 663 get_call_reg_set_usage (call_insn, &regs, regs_invalidated_by_call);
660 664 IOR_HARD_REG_SET (res->regs, regs);
661 for (link = CALL_INSN_FUNCTION_USAGE (x); 665
666 for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
662 link; link = XEXP (link, 1)) 667 link; link = XEXP (link, 1))
663 if (GET_CODE (XEXP (link, 0)) == CLOBBER) 668 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
664 mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1, 669 mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1,
665 MARK_SRC_DEST); 670 MARK_SRC_DEST);
666 671
667 /* Check for a REG_SETJMP. If it exists, then we must 672 /* Check for a REG_SETJMP. If it exists, then we must
668 assume that this call can clobber any register. */ 673 assume that this call can clobber any register. */
669 if (find_reg_note (x, REG_SETJMP, NULL)) 674 if (find_reg_note (call_insn, REG_SETJMP, NULL))
670 SET_HARD_REG_SET (res->regs); 675 SET_HARD_REG_SET (res->regs);
671 } 676 }
672 677
673 /* ... and also what its RTL says it modifies, if anything. */ 678 /* ... and also what its RTL says it modifies, if anything. */
679 gcc_fallthrough ();
674 680
675 case JUMP_INSN: 681 case JUMP_INSN:
676 case INSN: 682 case INSN:
677 683
678 /* An insn consisting of just a CLOBBER (or USE) is just for flow 684 /* An insn consisting of just a CLOBBER (or USE) is just for flow
679 and doesn't actually do anything, so we ignore it. */ 685 and doesn't actually do anything, so we ignore it. */
680 686
681 #ifdef INSN_SETS_ARE_DELAYED
682 if (mark_type != MARK_SRC_DEST_CALL 687 if (mark_type != MARK_SRC_DEST_CALL
683 && INSN_SETS_ARE_DELAYED (x)) 688 && INSN_SETS_ARE_DELAYED (as_a <rtx_insn *> (x)))
684 return; 689 return;
685 #endif
686 690
687 x = PATTERN (x); 691 x = PATTERN (x);
688 if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER) 692 if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
689 goto restart; 693 goto restart;
690 return; 694 return;
705 case CLOBBER: 709 case CLOBBER:
706 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST); 710 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
707 return; 711 return;
708 712
709 case SEQUENCE: 713 case SEQUENCE:
710 for (i = 0; i < XVECLEN (x, 0); i++) 714 {
711 if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0)) 715 rtx_sequence *seq = as_a <rtx_sequence *> (x);
712 && INSN_FROM_TARGET_P (XVECEXP (x, 0, i)))) 716 rtx control = seq->element (0);
713 mark_set_resources (XVECEXP (x, 0, i), res, 0, mark_type); 717 bool annul_p = JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control);
718
719 mark_set_resources (control, res, 0, mark_type);
720 for (i = seq->len () - 1; i >= 0; --i)
721 {
722 rtx elt = seq->element (i);
723 if (!annul_p && INSN_FROM_TARGET_P (elt))
724 mark_set_resources (elt, res, 0, mark_type);
725 }
726 }
714 return; 727 return;
715 728
716 case POST_INC: 729 case POST_INC:
717 case PRE_INC: 730 case PRE_INC:
718 case POST_DEC: 731 case POST_DEC:
736 749
737 case MEM: 750 case MEM:
738 if (in_dest) 751 if (in_dest)
739 { 752 {
740 res->memory = 1; 753 res->memory = 1;
741 res->unch_memory |= MEM_READONLY_P (x);
742 res->volatil |= MEM_VOLATILE_P (x); 754 res->volatil |= MEM_VOLATILE_P (x);
743 } 755 }
744 756
745 mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST); 757 mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
746 return; 758 return;
816 /* Return TRUE if INSN is a return, possibly with a filled delay slot. */ 828 /* Return TRUE if INSN is a return, possibly with a filled delay slot. */
817 829
818 static bool 830 static bool
819 return_insn_p (const_rtx insn) 831 return_insn_p (const_rtx insn)
820 { 832 {
821 if (JUMP_P (insn) && GET_CODE (PATTERN (insn)) == RETURN) 833 if (JUMP_P (insn) && ANY_RETURN_P (PATTERN (insn)))
822 return true; 834 return true;
823 835
824 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE) 836 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
825 return return_insn_p (XVECEXP (PATTERN (insn), 0, 0)); 837 return return_insn_p (XVECEXP (PATTERN (insn), 0, 0));
826 838
864 Because we can be called many times on the same target, save our results 876 Because we can be called many times on the same target, save our results
865 in a hash table indexed by INSN_UID. This is only done if the function 877 in a hash table indexed by INSN_UID. This is only done if the function
866 init_resource_info () was invoked before we are called. */ 878 init_resource_info () was invoked before we are called. */
867 879
868 void 880 void
869 mark_target_live_regs (rtx insns, rtx target, struct resources *res) 881 mark_target_live_regs (rtx_insn *insns, rtx target_maybe_return, struct resources *res)
870 { 882 {
871 int b = -1; 883 int b = -1;
872 unsigned int i; 884 unsigned int i;
873 struct target_info *tinfo = NULL; 885 struct target_info *tinfo = NULL;
874 rtx insn; 886 rtx_insn *insn;
875 rtx jump_insn = 0;
876 rtx jump_target; 887 rtx jump_target;
877 HARD_REG_SET scratch; 888 HARD_REG_SET scratch;
878 struct resources set, needed; 889 struct resources set, needed;
879 890
880 /* Handle end of function. */ 891 /* Handle end of function. */
881 if (target == 0) 892 if (target_maybe_return == 0 || ANY_RETURN_P (target_maybe_return))
882 { 893 {
883 *res = end_of_function_needs; 894 *res = end_of_function_needs;
884 return; 895 return;
885 } 896 }
886 897
898 /* We've handled the case of RETURN/SIMPLE_RETURN; we should now have an
899 instruction. */
900 rtx_insn *target = as_a <rtx_insn *> (target_maybe_return);
901
887 /* Handle return insn. */ 902 /* Handle return insn. */
888 else if (return_insn_p (target)) 903 if (return_insn_p (target))
889 { 904 {
890 *res = end_of_function_needs; 905 *res = end_of_function_needs;
891 mark_referenced_resources (target, res, false); 906 mark_referenced_resources (target, res, false);
892 return; 907 return;
893 } 908 }
894 909
895 /* We have to assume memory is needed, but the CC isn't. */ 910 /* We have to assume memory is needed, but the CC isn't. */
896 res->memory = 1; 911 res->memory = 1;
897 res->volatil = res->unch_memory = 0; 912 res->volatil = 0;
898 res->cc = 0; 913 res->cc = 0;
899 914
900 /* See if we have computed this value already. */ 915 /* See if we have computed this value already. */
901 if (target_hash_table != NULL) 916 if (target_hash_table != NULL)
902 { 917 {
907 922
908 /* Start by getting the basic block number. If we have saved 923 /* Start by getting the basic block number. If we have saved
909 information, we can get it from there unless the insn at the 924 information, we can get it from there unless the insn at the
910 start of the basic block has been deleted. */ 925 start of the basic block has been deleted. */
911 if (tinfo && tinfo->block != -1 926 if (tinfo && tinfo->block != -1
912 && ! INSN_DELETED_P (BB_HEAD (BASIC_BLOCK (tinfo->block)))) 927 && ! BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, tinfo->block))->deleted ())
913 b = tinfo->block; 928 b = tinfo->block;
914 } 929 }
915 930
916 if (b == -1) 931 if (b == -1)
917 b = find_basic_block (target, MAX_DELAY_SLOT_LIVE_SEARCH); 932 b = find_basic_block (target, MAX_DELAY_SLOT_LIVE_SEARCH);
947 them with anything set or killed between its start and the insn before 962 them with anything set or killed between its start and the insn before
948 TARGET; this custom life analysis is really about registers so we need 963 TARGET; this custom life analysis is really about registers so we need
949 to use the LR problem. Otherwise, we must assume everything is live. */ 964 to use the LR problem. Otherwise, we must assume everything is live. */
950 if (b != -1) 965 if (b != -1)
951 { 966 {
952 regset regs_live = DF_LR_IN (BASIC_BLOCK (b)); 967 regset regs_live = DF_LR_IN (BASIC_BLOCK_FOR_FN (cfun, b));
953 rtx start_insn, stop_insn; 968 rtx_insn *start_insn, *stop_insn;
954 969
955 /* Compute hard regs live at start of block. */ 970 /* Compute hard regs live at start of block. */
956 REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live); 971 REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
957 972
958 /* Get starting and ending insn, handling the case where each might 973 /* Get starting and ending insn, handling the case where each might
959 be a SEQUENCE. */ 974 be a SEQUENCE. */
960 start_insn = (b == ENTRY_BLOCK_PTR->next_bb->index ? 975 start_insn = (b == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index ?
961 insns : BB_HEAD (BASIC_BLOCK (b))); 976 insns : BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, b)));
962 stop_insn = target; 977 stop_insn = target;
963 978
964 if (NONJUMP_INSN_P (start_insn) 979 if (NONJUMP_INSN_P (start_insn)
965 && GET_CODE (PATTERN (start_insn)) == SEQUENCE) 980 && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
966 start_insn = XVECEXP (PATTERN (start_insn), 0, 0); 981 start_insn = as_a <rtx_sequence *> (PATTERN (start_insn))->insn (0);
967 982
968 if (NONJUMP_INSN_P (stop_insn) 983 if (NONJUMP_INSN_P (stop_insn)
969 && GET_CODE (PATTERN (stop_insn)) == SEQUENCE) 984 && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
970 stop_insn = next_insn (PREV_INSN (stop_insn)); 985 stop_insn = next_insn (PREV_INSN (stop_insn));
971 986
972 for (insn = start_insn; insn != stop_insn; 987 for (insn = start_insn; insn != stop_insn;
973 insn = next_insn_no_annul (insn)) 988 insn = next_insn_no_annul (insn))
974 { 989 {
975 rtx link; 990 rtx link;
976 rtx real_insn = insn; 991 rtx_insn *real_insn = insn;
977 enum rtx_code code = GET_CODE (insn); 992 enum rtx_code code = GET_CODE (insn);
978 993
979 if (DEBUG_INSN_P (insn)) 994 if (DEBUG_INSN_P (insn))
980 continue; 995 continue;
981 996
986 && INSN_FROM_TARGET_P (insn)) 1001 && INSN_FROM_TARGET_P (insn))
987 continue; 1002 continue;
988 1003
989 /* If this insn is a USE made by update_block, we care about the 1004 /* If this insn is a USE made by update_block, we care about the
990 underlying insn. */ 1005 underlying insn. */
991 if (code == INSN && GET_CODE (PATTERN (insn)) == USE 1006 if (code == INSN
1007 && GET_CODE (PATTERN (insn)) == USE
992 && INSN_P (XEXP (PATTERN (insn), 0))) 1008 && INSN_P (XEXP (PATTERN (insn), 0)))
993 real_insn = XEXP (PATTERN (insn), 0); 1009 real_insn = as_a <rtx_insn *> (XEXP (PATTERN (insn), 0));
994 1010
995 if (CALL_P (real_insn)) 1011 if (CALL_P (real_insn))
996 { 1012 {
997 /* CALL clobbers all call-used regs that aren't fixed except 1013 /* Values in call-clobbered registers survive a COND_EXEC CALL
998 sp, ap, and fp. Do this before setting the result of the 1014 if that is not executed; this matters for resoure use because
999 call live. */ 1015 they may be used by a complementarily (or more strictly)
1000 AND_COMPL_HARD_REG_SET (current_live_regs, 1016 predicated instruction, or if the CALL is NORETURN. */
1001 regs_invalidated_by_call); 1017 if (GET_CODE (PATTERN (real_insn)) != COND_EXEC)
1018 {
1019 HARD_REG_SET regs_invalidated_by_this_call;
1020 get_call_reg_set_usage (real_insn,
1021 &regs_invalidated_by_this_call,
1022 regs_invalidated_by_call);
1023 /* CALL clobbers all call-used regs that aren't fixed except
1024 sp, ap, and fp. Do this before setting the result of the
1025 call live. */
1026 AND_COMPL_HARD_REG_SET (current_live_regs,
1027 regs_invalidated_by_this_call);
1028 }
1002 1029
1003 /* A CALL_INSN sets any global register live, since it may 1030 /* A CALL_INSN sets any global register live, since it may
1004 have been modified by the call. */ 1031 have been modified by the call. */
1005 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1032 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1006 if (global_regs[i]) 1033 if (global_regs[i])
1082 SET_HARD_REG_SET (res->regs); 1109 SET_HARD_REG_SET (res->regs);
1083 1110
1084 CLEAR_RESOURCE (&set); 1111 CLEAR_RESOURCE (&set);
1085 CLEAR_RESOURCE (&needed); 1112 CLEAR_RESOURCE (&needed);
1086 1113
1087 jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0, 1114 rtx_insn *jump_insn = find_dead_or_set_registers (target, res, &jump_target,
1088 set, needed); 1115 0, set, needed);
1089 1116
1090 /* If we hit an unconditional branch, we have another way of finding out 1117 /* If we hit an unconditional branch, we have another way of finding out
1091 what is live: we can see what is live at the branch target and include 1118 what is live: we can see what is live at the branch target and include
1092 anything used but not set before the branch. We add the live 1119 anything used but not set before the branch. We add the live
1093 resources found using the test below to those found until now. */ 1120 resources found using the test below to those found until now. */
1094 1121
1095 if (jump_insn) 1122 if (jump_insn)
1096 { 1123 {
1097 struct resources new_resources; 1124 struct resources new_resources;
1098 rtx stop_insn = next_active_insn (jump_insn); 1125 rtx_insn *stop_insn = next_active_insn (jump_insn);
1099 1126
1100 mark_target_live_regs (insns, next_active_insn (jump_target), 1127 if (!ANY_RETURN_P (jump_target))
1101 &new_resources); 1128 jump_target = next_active_insn (as_a<rtx_insn *> (jump_target));
1129 mark_target_live_regs (insns, jump_target, &new_resources);
1102 CLEAR_RESOURCE (&set); 1130 CLEAR_RESOURCE (&set);
1103 CLEAR_RESOURCE (&needed); 1131 CLEAR_RESOURCE (&needed);
1104 1132
1105 /* Include JUMP_INSN in the needed registers. */ 1133 /* Include JUMP_INSN in the needed registers. */
1106 for (insn = target; insn != stop_insn; insn = next_active_insn (insn)) 1134 for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
1125 1153
1126 /* Initialize the resources required by mark_target_live_regs (). 1154 /* Initialize the resources required by mark_target_live_regs ().
1127 This should be invoked before the first call to mark_target_live_regs. */ 1155 This should be invoked before the first call to mark_target_live_regs. */
1128 1156
1129 void 1157 void
1130 init_resource_info (rtx epilogue_insn) 1158 init_resource_info (rtx_insn *epilogue_insn)
1131 { 1159 {
1132 int i; 1160 int i;
1133 basic_block bb; 1161 basic_block bb;
1134 1162
1135 /* Indicate what resources are required to be valid at the end of the current 1163 /* Indicate what resources are required to be valid at the end of the current
1136 function. The condition code never is and memory always is. If the 1164 function. The condition code never is and memory always is.
1137 frame pointer is needed, it is and so is the stack pointer unless 1165 The stack pointer is needed unless EXIT_IGNORE_STACK is true
1138 EXIT_IGNORE_STACK is nonzero. If the frame pointer is not needed, the 1166 and there is an epilogue that restores the original stack pointer
1139 stack pointer is. Registers used to return the function value are 1167 from the frame pointer. Registers used to return the function value
1140 needed. Registers holding global variables are needed. */ 1168 are needed. Registers holding global variables are needed. */
1141 1169
1142 end_of_function_needs.cc = 0; 1170 end_of_function_needs.cc = 0;
1143 end_of_function_needs.memory = 1; 1171 end_of_function_needs.memory = 1;
1144 end_of_function_needs.unch_memory = 0;
1145 CLEAR_HARD_REG_SET (end_of_function_needs.regs); 1172 CLEAR_HARD_REG_SET (end_of_function_needs.regs);
1146 1173
1147 if (frame_pointer_needed) 1174 if (frame_pointer_needed)
1148 { 1175 {
1149 SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM); 1176 SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
1150 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER 1177 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER)
1151 SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM); 1178 SET_HARD_REG_BIT (end_of_function_needs.regs,
1152 #endif 1179 HARD_FRAME_POINTER_REGNUM);
1153 if (! EXIT_IGNORE_STACK 1180 }
1154 || current_function_sp_is_unchanging) 1181 if (!(frame_pointer_needed
1155 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM); 1182 && EXIT_IGNORE_STACK
1156 } 1183 && epilogue_insn
1157 else 1184 && !crtl->sp_is_unchanging))
1158 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM); 1185 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1159 1186
1160 if (crtl->return_rtx != 0) 1187 if (crtl->return_rtx != 0)
1161 mark_referenced_resources (crtl->return_rtx, 1188 mark_referenced_resources (crtl->return_rtx,
1162 &end_of_function_needs, true); 1189 &end_of_function_needs, true);
1163 1190
1164 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1191 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1165 if (global_regs[i] 1192 if (global_regs[i] || EPILOGUE_USES (i))
1166 #ifdef EPILOGUE_USES
1167 || EPILOGUE_USES (i)
1168 #endif
1169 )
1170 SET_HARD_REG_BIT (end_of_function_needs.regs, i); 1193 SET_HARD_REG_BIT (end_of_function_needs.regs, i);
1171 1194
1172 /* The registers required to be live at the end of the function are 1195 /* The registers required to be live at the end of the function are
1173 represented in the flow information as being dead just prior to 1196 represented in the flow information as being dead just prior to
1174 reaching the end of the function. For example, the return of a value 1197 reaching the end of the function. For example, the return of a value
1196 break; 1219 break;
1197 } 1220 }
1198 1221
1199 /* Allocate and initialize the tables used by mark_target_live_regs. */ 1222 /* Allocate and initialize the tables used by mark_target_live_regs. */
1200 target_hash_table = XCNEWVEC (struct target_info *, TARGET_HASH_PRIME); 1223 target_hash_table = XCNEWVEC (struct target_info *, TARGET_HASH_PRIME);
1201 bb_ticks = XCNEWVEC (int, last_basic_block); 1224 bb_ticks = XCNEWVEC (int, last_basic_block_for_fn (cfun));
1202 1225
1203 /* Set the BLOCK_FOR_INSN of each label that starts a basic block. */ 1226 /* Set the BLOCK_FOR_INSN of each label that starts a basic block. */
1204 FOR_EACH_BB (bb) 1227 FOR_EACH_BB_FN (bb, cfun)
1205 if (LABEL_P (BB_HEAD (bb))) 1228 if (LABEL_P (BB_HEAD (bb)))
1206 BLOCK_FOR_INSN (BB_HEAD (bb)) = bb; 1229 BLOCK_FOR_INSN (BB_HEAD (bb)) = bb;
1207 } 1230 }
1208 1231
1209 /* Free up the resources allocated to mark_target_live_regs (). This 1232 /* Free up the resources allocated to mark_target_live_regs (). This
1238 { 1261 {
1239 free (bb_ticks); 1262 free (bb_ticks);
1240 bb_ticks = NULL; 1263 bb_ticks = NULL;
1241 } 1264 }
1242 1265
1243 FOR_EACH_BB (bb) 1266 FOR_EACH_BB_FN (bb, cfun)
1244 if (LABEL_P (BB_HEAD (bb))) 1267 if (LABEL_P (BB_HEAD (bb)))
1245 BLOCK_FOR_INSN (BB_HEAD (bb)) = NULL; 1268 BLOCK_FOR_INSN (BB_HEAD (bb)) = NULL;
1246 } 1269 }
1247 1270
1248 /* Clear any hashed information that we have stored for INSN. */ 1271 /* Clear any hashed information that we have stored for INSN. */
1249 1272
1250 void 1273 void
1251 clear_hashed_info_for_insn (rtx insn) 1274 clear_hashed_info_for_insn (rtx_insn *insn)
1252 { 1275 {
1253 struct target_info *tinfo; 1276 struct target_info *tinfo;
1254 1277
1255 if (target_hash_table != NULL) 1278 if (target_hash_table != NULL)
1256 { 1279 {
1265 } 1288 }
1266 1289
1267 /* Increment the tick count for the basic block that contains INSN. */ 1290 /* Increment the tick count for the basic block that contains INSN. */
1268 1291
1269 void 1292 void
1270 incr_ticks_for_insn (rtx insn) 1293 incr_ticks_for_insn (rtx_insn *insn)
1271 { 1294 {
1272 int b = find_basic_block (insn, MAX_DELAY_SLOT_LIVE_SEARCH); 1295 int b = find_basic_block (insn, MAX_DELAY_SLOT_LIVE_SEARCH);
1273 1296
1274 if (b != -1) 1297 if (b != -1)
1275 bb_ticks[b]++; 1298 bb_ticks[b]++;