Mercurial > hg > CbC > CbC_gcc
annotate gcc/resource.c @ 158:494b0b89df80 default tip
...
author | Shinji KONO <kono@ie.u-ryukyu.ac.jp> |
---|---|
date | Mon, 25 May 2020 18:13:55 +0900 |
parents | 1830386684a0 |
children |
rev | line source |
---|---|
0 | 1 /* Definitions for computing resource usage of specific insns. |
145 | 2 Copyright (C) 1999-2020 Free Software Foundation, Inc. |
0 | 3 |
4 This file is part of GCC. | |
5 | |
6 GCC is free software; you can redistribute it and/or modify it under | |
7 the terms of the GNU General Public License as published by the Free | |
8 Software Foundation; either version 3, or (at your option) any later | |
9 version. | |
10 | |
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY | |
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 for more details. | |
15 | |
16 You should have received a copy of the GNU General Public License | |
17 along with GCC; see the file COPYING3. If not see | |
18 <http://www.gnu.org/licenses/>. */ | |
19 | |
20 #include "config.h" | |
21 #include "system.h" | |
22 #include "coretypes.h" | |
111 | 23 #include "backend.h" |
0 | 24 #include "rtl.h" |
111 | 25 #include "df.h" |
26 #include "memmodel.h" | |
0 | 27 #include "tm_p.h" |
28 #include "regs.h" | |
111 | 29 #include "emit-rtl.h" |
0 | 30 #include "resource.h" |
31 #include "insn-attr.h" | |
145 | 32 #include "function-abi.h" |
0 | 33 |
34 /* This structure is used to record liveness information at the targets or | |
35 fallthrough insns of branches. We will most likely need the information | |
36 at targets again, so save them in a hash table rather than recomputing them | |
37 each time. */ | |
38 | |
39 struct target_info | |
40 { | |
41 int uid; /* INSN_UID of target. */ | |
42 struct target_info *next; /* Next info for same hash bucket. */ | |
43 HARD_REG_SET live_regs; /* Registers live at target. */ | |
44 int block; /* Basic block number containing target. */ | |
45 int bb_tick; /* Generation count of basic block info. */ | |
46 }; | |
47 | |
48 #define TARGET_HASH_PRIME 257 | |
49 | |
50 /* Indicates what resources are required at the beginning of the epilogue. */ | |
51 static struct resources start_of_epilogue_needs; | |
52 | |
53 /* Indicates what resources are required at function end. */ | |
54 static struct resources end_of_function_needs; | |
55 | |
56 /* Define the hash table itself. */ | |
57 static struct target_info **target_hash_table = NULL; | |
58 | |
59 /* For each basic block, we maintain a generation number of its basic | |
60 block info, which is updated each time we move an insn from the | |
61 target of a jump. This is the generation number indexed by block | |
62 number. */ | |
63 | |
64 static int *bb_ticks; | |
65 | |
66 /* Marks registers possibly live at the current place being scanned by | |
67 mark_target_live_regs. Also used by update_live_status. */ | |
68 | |
69 static HARD_REG_SET current_live_regs; | |
70 | |
71 /* Marks registers for which we have seen a REG_DEAD note but no assignment. | |
72 Also only used by the next two functions. */ | |
73 | |
74 static HARD_REG_SET pending_dead_regs; | |
75 | |
76 static void update_live_status (rtx, const_rtx, void *); | |
111 | 77 static int find_basic_block (rtx_insn *, int); |
78 static rtx_insn *next_insn_no_annul (rtx_insn *); | |
79 static rtx_insn *find_dead_or_set_registers (rtx_insn *, struct resources*, | |
80 rtx *, int, struct resources, | |
81 struct resources); | |
0 | 82 |
83 /* Utility function called from mark_target_live_regs via note_stores. | |
84 It deadens any CLOBBERed registers and livens any SET registers. */ | |
85 | |
86 static void | |
87 update_live_status (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED) | |
88 { | |
89 int first_regno, last_regno; | |
90 int i; | |
91 | |
92 if (!REG_P (dest) | |
93 && (GET_CODE (dest) != SUBREG || !REG_P (SUBREG_REG (dest)))) | |
94 return; | |
95 | |
96 if (GET_CODE (dest) == SUBREG) | |
97 { | |
98 first_regno = subreg_regno (dest); | |
99 last_regno = first_regno + subreg_nregs (dest); | |
100 | |
101 } | |
102 else | |
103 { | |
104 first_regno = REGNO (dest); | |
111 | 105 last_regno = END_REGNO (dest); |
0 | 106 } |
107 | |
108 if (GET_CODE (x) == CLOBBER) | |
109 for (i = first_regno; i < last_regno; i++) | |
110 CLEAR_HARD_REG_BIT (current_live_regs, i); | |
111 else | |
112 for (i = first_regno; i < last_regno; i++) | |
113 { | |
114 SET_HARD_REG_BIT (current_live_regs, i); | |
115 CLEAR_HARD_REG_BIT (pending_dead_regs, i); | |
116 } | |
117 } | |
118 | |
119 /* Find the number of the basic block with correct live register | |
120 information that starts closest to INSN. Return -1 if we couldn't | |
121 find such a basic block or the beginning is more than | |
122 SEARCH_LIMIT instructions before INSN. Use SEARCH_LIMIT = -1 for | |
123 an unlimited search. | |
124 | |
125 The delay slot filling code destroys the control-flow graph so, | |
126 instead of finding the basic block containing INSN, we search | |
127 backwards toward a BARRIER where the live register information is | |
128 correct. */ | |
129 | |
130 static int | |
111 | 131 find_basic_block (rtx_insn *insn, int search_limit) |
0 | 132 { |
133 /* Scan backwards to the previous BARRIER. Then see if we can find a | |
134 label that starts a basic block. Return the basic block number. */ | |
135 for (insn = prev_nonnote_insn (insn); | |
136 insn && !BARRIER_P (insn) && search_limit != 0; | |
137 insn = prev_nonnote_insn (insn), --search_limit) | |
138 ; | |
139 | |
140 /* The closest BARRIER is too far away. */ | |
141 if (search_limit == 0) | |
142 return -1; | |
143 | |
144 /* The start of the function. */ | |
145 else if (insn == 0) | |
111 | 146 return ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index; |
0 | 147 |
148 /* See if any of the upcoming CODE_LABELs start a basic block. If we reach | |
149 anything other than a CODE_LABEL or note, we can't find this code. */ | |
150 for (insn = next_nonnote_insn (insn); | |
151 insn && LABEL_P (insn); | |
152 insn = next_nonnote_insn (insn)) | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
153 if (BLOCK_FOR_INSN (insn)) |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
154 return BLOCK_FOR_INSN (insn)->index; |
0 | 155 |
156 return -1; | |
157 } | |
158 | |
159 /* Similar to next_insn, but ignores insns in the delay slots of | |
160 an annulled branch. */ | |
161 | |
111 | 162 static rtx_insn * |
163 next_insn_no_annul (rtx_insn *insn) | |
0 | 164 { |
165 if (insn) | |
166 { | |
167 /* If INSN is an annulled branch, skip any insns from the target | |
168 of the branch. */ | |
111 | 169 if (JUMP_P (insn) |
0 | 170 && INSN_ANNULLED_BRANCH_P (insn) |
171 && NEXT_INSN (PREV_INSN (insn)) != insn) | |
172 { | |
111 | 173 rtx_insn *next = NEXT_INSN (insn); |
0 | 174 |
111 | 175 while ((NONJUMP_INSN_P (next) || JUMP_P (next) || CALL_P (next)) |
0 | 176 && INSN_FROM_TARGET_P (next)) |
177 { | |
178 insn = next; | |
179 next = NEXT_INSN (insn); | |
180 } | |
181 } | |
182 | |
183 insn = NEXT_INSN (insn); | |
184 if (insn && NONJUMP_INSN_P (insn) | |
185 && GET_CODE (PATTERN (insn)) == SEQUENCE) | |
111 | 186 insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0); |
0 | 187 } |
188 | |
189 return insn; | |
190 } | |
191 | |
192 /* Given X, some rtl, and RES, a pointer to a `struct resource', mark | |
193 which resources are referenced by the insn. If INCLUDE_DELAYED_EFFECTS | |
194 is TRUE, resources used by the called routine will be included for | |
195 CALL_INSNs. */ | |
196 | |
197 void | |
198 mark_referenced_resources (rtx x, struct resources *res, | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
199 bool include_delayed_effects) |
0 | 200 { |
201 enum rtx_code code = GET_CODE (x); | |
202 int i, j; | |
203 unsigned int r; | |
204 const char *format_ptr; | |
205 | |
206 /* Handle leaf items for which we set resource flags. Also, special-case | |
207 CALL, SET and CLOBBER operators. */ | |
208 switch (code) | |
209 { | |
210 case CONST: | |
111 | 211 CASE_CONST_ANY: |
0 | 212 case PC: |
213 case SYMBOL_REF: | |
214 case LABEL_REF: | |
131 | 215 case DEBUG_INSN: |
0 | 216 return; |
217 | |
218 case SUBREG: | |
219 if (!REG_P (SUBREG_REG (x))) | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
220 mark_referenced_resources (SUBREG_REG (x), res, false); |
0 | 221 else |
222 { | |
223 unsigned int regno = subreg_regno (x); | |
224 unsigned int last_regno = regno + subreg_nregs (x); | |
225 | |
226 gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER); | |
227 for (r = regno; r < last_regno; r++) | |
228 SET_HARD_REG_BIT (res->regs, r); | |
229 } | |
230 return; | |
231 | |
232 case REG: | |
233 gcc_assert (HARD_REGISTER_P (x)); | |
234 add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x)); | |
235 return; | |
236 | |
237 case MEM: | |
238 /* If this memory shouldn't change, it really isn't referencing | |
239 memory. */ | |
111 | 240 if (! MEM_READONLY_P (x)) |
0 | 241 res->memory = 1; |
242 res->volatil |= MEM_VOLATILE_P (x); | |
243 | |
244 /* Mark registers used to access memory. */ | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
245 mark_referenced_resources (XEXP (x, 0), res, false); |
0 | 246 return; |
247 | |
248 case CC0: | |
249 res->cc = 1; | |
250 return; | |
251 | |
252 case UNSPEC_VOLATILE: | |
253 case TRAP_IF: | |
254 case ASM_INPUT: | |
255 /* Traditional asm's are always volatile. */ | |
256 res->volatil = 1; | |
257 break; | |
258 | |
259 case ASM_OPERANDS: | |
260 res->volatil |= MEM_VOLATILE_P (x); | |
261 | |
262 /* For all ASM_OPERANDS, we must traverse the vector of input operands. | |
145 | 263 We cannot just fall through here since then we would be confused |
0 | 264 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate |
265 traditional asms unlike their normal usage. */ | |
266 | |
267 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++) | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
268 mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, false); |
0 | 269 return; |
270 | |
271 case CALL: | |
272 /* The first operand will be a (MEM (xxx)) but doesn't really reference | |
273 memory. The second operand may be referenced, though. */ | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
274 mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, false); |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
275 mark_referenced_resources (XEXP (x, 1), res, false); |
0 | 276 return; |
277 | |
278 case SET: | |
279 /* Usually, the first operand of SET is set, not referenced. But | |
280 registers used to access memory are referenced. SET_DEST is | |
281 also referenced if it is a ZERO_EXTRACT. */ | |
282 | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
283 mark_referenced_resources (SET_SRC (x), res, false); |
0 | 284 |
285 x = SET_DEST (x); | |
286 if (GET_CODE (x) == ZERO_EXTRACT | |
287 || GET_CODE (x) == STRICT_LOW_PART) | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
288 mark_referenced_resources (x, res, false); |
0 | 289 else if (GET_CODE (x) == SUBREG) |
290 x = SUBREG_REG (x); | |
291 if (MEM_P (x)) | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
292 mark_referenced_resources (XEXP (x, 0), res, false); |
0 | 293 return; |
294 | |
295 case CLOBBER: | |
296 return; | |
297 | |
298 case CALL_INSN: | |
299 if (include_delayed_effects) | |
300 { | |
301 /* A CALL references memory, the frame pointer if it exists, the | |
302 stack pointer, any global registers and any registers given in | |
303 USE insns immediately in front of the CALL. | |
304 | |
305 However, we may have moved some of the parameter loading insns | |
306 into the delay slot of this CALL. If so, the USE's for them | |
307 don't count and should be skipped. */ | |
111 | 308 rtx_insn *insn = PREV_INSN (as_a <rtx_insn *> (x)); |
309 rtx_sequence *sequence = 0; | |
0 | 310 int seq_size = 0; |
311 int i; | |
312 | |
313 /* If we are part of a delay slot sequence, point at the SEQUENCE. */ | |
314 if (NEXT_INSN (insn) != x) | |
315 { | |
111 | 316 sequence = as_a <rtx_sequence *> (PATTERN (NEXT_INSN (insn))); |
317 seq_size = sequence->len (); | |
0 | 318 gcc_assert (GET_CODE (sequence) == SEQUENCE); |
319 } | |
320 | |
321 res->memory = 1; | |
322 SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM); | |
323 if (frame_pointer_needed) | |
324 { | |
325 SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM); | |
111 | 326 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER) |
327 SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM); | |
0 | 328 } |
329 | |
330 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
331 if (global_regs[i]) | |
332 SET_HARD_REG_BIT (res->regs, i); | |
333 | |
334 /* Check for a REG_SETJMP. If it exists, then we must | |
335 assume that this call can need any register. | |
336 | |
337 This is done to be more conservative about how we handle setjmp. | |
338 We assume that they both use and set all registers. Using all | |
339 registers ensures that a register will not be considered dead | |
340 just because it crosses a setjmp call. A register should be | |
341 considered dead only if the setjmp call returns nonzero. */ | |
342 if (find_reg_note (x, REG_SETJMP, NULL)) | |
343 SET_HARD_REG_SET (res->regs); | |
344 | |
345 { | |
346 rtx link; | |
347 | |
348 for (link = CALL_INSN_FUNCTION_USAGE (x); | |
349 link; | |
350 link = XEXP (link, 1)) | |
351 if (GET_CODE (XEXP (link, 0)) == USE) | |
352 { | |
353 for (i = 1; i < seq_size; i++) | |
354 { | |
111 | 355 rtx slot_pat = PATTERN (sequence->element (i)); |
0 | 356 if (GET_CODE (slot_pat) == SET |
357 && rtx_equal_p (SET_DEST (slot_pat), | |
358 XEXP (XEXP (link, 0), 0))) | |
359 break; | |
360 } | |
361 if (i >= seq_size) | |
362 mark_referenced_resources (XEXP (XEXP (link, 0), 0), | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
363 res, false); |
0 | 364 } |
365 } | |
366 } | |
367 | |
368 /* ... fall through to other INSN processing ... */ | |
111 | 369 gcc_fallthrough (); |
0 | 370 |
371 case INSN: | |
372 case JUMP_INSN: | |
373 | |
111 | 374 if (GET_CODE (PATTERN (x)) == COND_EXEC) |
375 /* In addition to the usual references, also consider all outputs | |
376 as referenced, to compensate for mark_set_resources treating | |
377 them as killed. This is similar to ZERO_EXTRACT / STRICT_LOW_PART | |
378 handling, execpt that we got a partial incidence instead of a partial | |
379 width. */ | |
380 mark_set_resources (x, res, 0, | |
381 include_delayed_effects | |
382 ? MARK_SRC_DEST_CALL : MARK_SRC_DEST); | |
383 | |
0 | 384 if (! include_delayed_effects |
111 | 385 && INSN_REFERENCES_ARE_DELAYED (as_a <rtx_insn *> (x))) |
0 | 386 return; |
387 | |
388 /* No special processing, just speed up. */ | |
389 mark_referenced_resources (PATTERN (x), res, include_delayed_effects); | |
390 return; | |
391 | |
392 default: | |
393 break; | |
394 } | |
395 | |
396 /* Process each sub-expression and flag what it needs. */ | |
397 format_ptr = GET_RTX_FORMAT (code); | |
398 for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
399 switch (*format_ptr++) | |
400 { | |
401 case 'e': | |
402 mark_referenced_resources (XEXP (x, i), res, include_delayed_effects); | |
403 break; | |
404 | |
405 case 'E': | |
406 for (j = 0; j < XVECLEN (x, i); j++) | |
407 mark_referenced_resources (XVECEXP (x, i, j), res, | |
408 include_delayed_effects); | |
409 break; | |
410 } | |
411 } | |
412 | |
413 /* A subroutine of mark_target_live_regs. Search forward from TARGET | |
414 looking for registers that are set before they are used. These are dead. | |
415 Stop after passing a few conditional jumps, and/or a small | |
416 number of unconditional branches. */ | |
417 | |
111 | 418 static rtx_insn * |
419 find_dead_or_set_registers (rtx_insn *target, struct resources *res, | |
0 | 420 rtx *jump_target, int jump_count, |
421 struct resources set, struct resources needed) | |
422 { | |
423 HARD_REG_SET scratch; | |
111 | 424 rtx_insn *insn; |
425 rtx_insn *next_insn; | |
426 rtx_insn *jump_insn = 0; | |
0 | 427 int i; |
428 | |
111 | 429 for (insn = target; insn; insn = next_insn) |
0 | 430 { |
111 | 431 rtx_insn *this_insn = insn; |
0 | 432 |
111 | 433 next_insn = NEXT_INSN (insn); |
0 | 434 |
435 /* If this instruction can throw an exception, then we don't | |
436 know where we might end up next. That means that we have to | |
437 assume that whatever we have already marked as live really is | |
438 live. */ | |
439 if (can_throw_internal (insn)) | |
440 break; | |
441 | |
442 switch (GET_CODE (insn)) | |
443 { | |
444 case CODE_LABEL: | |
445 /* After a label, any pending dead registers that weren't yet | |
446 used can be made dead. */ | |
145 | 447 pending_dead_regs &= ~needed.regs; |
448 res->regs &= ~pending_dead_regs; | |
0 | 449 CLEAR_HARD_REG_SET (pending_dead_regs); |
450 | |
451 continue; | |
452 | |
453 case BARRIER: | |
454 case NOTE: | |
131 | 455 case DEBUG_INSN: |
0 | 456 continue; |
457 | |
458 case INSN: | |
459 if (GET_CODE (PATTERN (insn)) == USE) | |
460 { | |
461 /* If INSN is a USE made by update_block, we care about the | |
462 underlying insn. Any registers set by the underlying insn | |
463 are live since the insn is being done somewhere else. */ | |
464 if (INSN_P (XEXP (PATTERN (insn), 0))) | |
465 mark_set_resources (XEXP (PATTERN (insn), 0), res, 0, | |
466 MARK_SRC_DEST_CALL); | |
467 | |
468 /* All other USE insns are to be ignored. */ | |
469 continue; | |
470 } | |
471 else if (GET_CODE (PATTERN (insn)) == CLOBBER) | |
472 continue; | |
111 | 473 else if (rtx_sequence *seq = |
474 dyn_cast <rtx_sequence *> (PATTERN (insn))) | |
0 | 475 { |
476 /* An unconditional jump can be used to fill the delay slot | |
477 of a call, so search for a JUMP_INSN in any position. */ | |
111 | 478 for (i = 0; i < seq->len (); i++) |
0 | 479 { |
111 | 480 this_insn = seq->insn (i); |
481 if (JUMP_P (this_insn)) | |
0 | 482 break; |
483 } | |
484 } | |
485 | |
486 default: | |
487 break; | |
488 } | |
489 | |
111 | 490 if (rtx_jump_insn *this_jump_insn = |
491 dyn_cast <rtx_jump_insn *> (this_insn)) | |
0 | 492 { |
493 if (jump_count++ < 10) | |
494 { | |
495 if (any_uncondjump_p (this_jump_insn) | |
111 | 496 || ANY_RETURN_P (PATTERN (this_jump_insn))) |
0 | 497 { |
111 | 498 rtx lab_or_return = this_jump_insn->jump_label (); |
499 if (ANY_RETURN_P (lab_or_return)) | |
500 next_insn = NULL; | |
501 else | |
502 next_insn = as_a <rtx_insn *> (lab_or_return); | |
0 | 503 if (jump_insn == 0) |
504 { | |
505 jump_insn = insn; | |
506 if (jump_target) | |
507 *jump_target = JUMP_LABEL (this_jump_insn); | |
508 } | |
509 } | |
510 else if (any_condjump_p (this_jump_insn)) | |
511 { | |
512 struct resources target_set, target_res; | |
513 struct resources fallthrough_res; | |
514 | |
515 /* We can handle conditional branches here by following | |
516 both paths, and then IOR the results of the two paths | |
517 together, which will give us registers that are dead | |
518 on both paths. Since this is expensive, we give it | |
519 a much higher cost than unconditional branches. The | |
520 cost was chosen so that we will follow at most 1 | |
521 conditional branch. */ | |
522 | |
523 jump_count += 4; | |
524 if (jump_count >= 10) | |
525 break; | |
526 | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
527 mark_referenced_resources (insn, &needed, true); |
0 | 528 |
529 /* For an annulled branch, mark_set_resources ignores slots | |
530 filled by instructions from the target. This is correct | |
531 if the branch is not taken. Since we are following both | |
532 paths from the branch, we must also compute correct info | |
533 if the branch is taken. We do this by inverting all of | |
534 the INSN_FROM_TARGET_P bits, calling mark_set_resources, | |
535 and then inverting the INSN_FROM_TARGET_P bits again. */ | |
536 | |
537 if (GET_CODE (PATTERN (insn)) == SEQUENCE | |
538 && INSN_ANNULLED_BRANCH_P (this_jump_insn)) | |
539 { | |
111 | 540 rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn)); |
541 for (i = 1; i < seq->len (); i++) | |
542 INSN_FROM_TARGET_P (seq->element (i)) | |
543 = ! INSN_FROM_TARGET_P (seq->element (i)); | |
0 | 544 |
545 target_set = set; | |
546 mark_set_resources (insn, &target_set, 0, | |
547 MARK_SRC_DEST_CALL); | |
548 | |
111 | 549 for (i = 1; i < seq->len (); i++) |
550 INSN_FROM_TARGET_P (seq->element (i)) | |
551 = ! INSN_FROM_TARGET_P (seq->element (i)); | |
0 | 552 |
553 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); | |
554 } | |
555 else | |
556 { | |
557 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); | |
558 target_set = set; | |
559 } | |
560 | |
561 target_res = *res; | |
145 | 562 scratch = target_set.regs & ~needed.regs; |
563 target_res.regs &= ~scratch; | |
0 | 564 |
565 fallthrough_res = *res; | |
145 | 566 scratch = set.regs & ~needed.regs; |
567 fallthrough_res.regs &= ~scratch; | |
0 | 568 |
111 | 569 if (!ANY_RETURN_P (this_jump_insn->jump_label ())) |
570 find_dead_or_set_registers | |
571 (this_jump_insn->jump_target (), | |
572 &target_res, 0, jump_count, target_set, needed); | |
573 find_dead_or_set_registers (next_insn, | |
0 | 574 &fallthrough_res, 0, jump_count, |
575 set, needed); | |
145 | 576 fallthrough_res.regs |= target_res.regs; |
577 res->regs &= fallthrough_res.regs; | |
0 | 578 break; |
579 } | |
580 else | |
581 break; | |
582 } | |
583 else | |
584 { | |
585 /* Don't try this optimization if we expired our jump count | |
586 above, since that would mean there may be an infinite loop | |
587 in the function being compiled. */ | |
588 jump_insn = 0; | |
589 break; | |
590 } | |
591 } | |
592 | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
593 mark_referenced_resources (insn, &needed, true); |
0 | 594 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); |
595 | |
145 | 596 scratch = set.regs & ~needed.regs; |
597 res->regs &= ~scratch; | |
0 | 598 } |
599 | |
600 return jump_insn; | |
601 } | |
602 | |
603 /* Given X, a part of an insn, and a pointer to a `struct resource', | |
604 RES, indicate which resources are modified by the insn. If | |
605 MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially | |
606 set by the called routine. | |
607 | |
608 If IN_DEST is nonzero, it means we are inside a SET. Otherwise, | |
609 objects are being referenced instead of set. | |
610 | |
611 We never mark the insn as modifying the condition code unless it explicitly | |
612 SETs CC0 even though this is not totally correct. The reason for this is | |
613 that we require a SET of CC0 to immediately precede the reference to CC0. | |
614 So if some other insn sets CC0 as a side-effect, we know it cannot affect | |
615 our computation and thus may be placed in a delay slot. */ | |
616 | |
617 void | |
618 mark_set_resources (rtx x, struct resources *res, int in_dest, | |
619 enum mark_resource_type mark_type) | |
620 { | |
621 enum rtx_code code; | |
622 int i, j; | |
623 unsigned int r; | |
624 const char *format_ptr; | |
625 | |
626 restart: | |
627 | |
628 code = GET_CODE (x); | |
629 | |
630 switch (code) | |
631 { | |
632 case NOTE: | |
633 case BARRIER: | |
634 case CODE_LABEL: | |
635 case USE: | |
111 | 636 CASE_CONST_ANY: |
0 | 637 case LABEL_REF: |
638 case SYMBOL_REF: | |
639 case CONST: | |
640 case PC: | |
131 | 641 case DEBUG_INSN: |
0 | 642 /* These don't set any resources. */ |
643 return; | |
644 | |
645 case CC0: | |
646 if (in_dest) | |
647 res->cc = 1; | |
648 return; | |
649 | |
650 case CALL_INSN: | |
651 /* Called routine modifies the condition code, memory, any registers | |
652 that aren't saved across calls, global registers and anything | |
653 explicitly CLOBBERed immediately after the CALL_INSN. */ | |
654 | |
655 if (mark_type == MARK_SRC_DEST_CALL) | |
656 { | |
111 | 657 rtx_call_insn *call_insn = as_a <rtx_call_insn *> (x); |
0 | 658 rtx link; |
659 | |
660 res->cc = res->memory = 1; | |
661 | |
145 | 662 res->regs |= insn_callee_abi (call_insn).full_reg_clobbers (); |
0 | 663 |
111 | 664 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); |
0 | 665 link; link = XEXP (link, 1)) |
145 | 666 if (GET_CODE (XEXP (link, 0)) == CLOBBER) |
667 mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1, | |
668 MARK_SRC_DEST); | |
0 | 669 |
670 /* Check for a REG_SETJMP. If it exists, then we must | |
671 assume that this call can clobber any register. */ | |
111 | 672 if (find_reg_note (call_insn, REG_SETJMP, NULL)) |
0 | 673 SET_HARD_REG_SET (res->regs); |
674 } | |
675 | |
676 /* ... and also what its RTL says it modifies, if anything. */ | |
111 | 677 gcc_fallthrough (); |
0 | 678 |
679 case JUMP_INSN: | |
680 case INSN: | |
681 | |
682 /* An insn consisting of just a CLOBBER (or USE) is just for flow | |
683 and doesn't actually do anything, so we ignore it. */ | |
684 | |
685 if (mark_type != MARK_SRC_DEST_CALL | |
111 | 686 && INSN_SETS_ARE_DELAYED (as_a <rtx_insn *> (x))) |
0 | 687 return; |
688 | |
689 x = PATTERN (x); | |
690 if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER) | |
691 goto restart; | |
692 return; | |
693 | |
694 case SET: | |
695 /* If the source of a SET is a CALL, this is actually done by | |
696 the called routine. So only include it if we are to include the | |
697 effects of the calling routine. */ | |
698 | |
699 mark_set_resources (SET_DEST (x), res, | |
700 (mark_type == MARK_SRC_DEST_CALL | |
701 || GET_CODE (SET_SRC (x)) != CALL), | |
702 mark_type); | |
703 | |
704 mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST); | |
705 return; | |
706 | |
707 case CLOBBER: | |
708 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST); | |
709 return; | |
710 | |
711 case SEQUENCE: | |
111 | 712 { |
713 rtx_sequence *seq = as_a <rtx_sequence *> (x); | |
714 rtx control = seq->element (0); | |
715 bool annul_p = JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control); | |
716 | |
717 mark_set_resources (control, res, 0, mark_type); | |
718 for (i = seq->len () - 1; i >= 0; --i) | |
719 { | |
720 rtx elt = seq->element (i); | |
721 if (!annul_p && INSN_FROM_TARGET_P (elt)) | |
722 mark_set_resources (elt, res, 0, mark_type); | |
723 } | |
724 } | |
0 | 725 return; |
726 | |
727 case POST_INC: | |
728 case PRE_INC: | |
729 case POST_DEC: | |
730 case PRE_DEC: | |
731 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST); | |
732 return; | |
733 | |
734 case PRE_MODIFY: | |
735 case POST_MODIFY: | |
736 mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST); | |
737 mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, MARK_SRC_DEST); | |
738 mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, MARK_SRC_DEST); | |
739 return; | |
740 | |
741 case SIGN_EXTRACT: | |
742 case ZERO_EXTRACT: | |
743 mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST); | |
744 mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST); | |
745 mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST); | |
746 return; | |
747 | |
748 case MEM: | |
749 if (in_dest) | |
750 { | |
751 res->memory = 1; | |
752 res->volatil |= MEM_VOLATILE_P (x); | |
753 } | |
754 | |
755 mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST); | |
756 return; | |
757 | |
758 case SUBREG: | |
759 if (in_dest) | |
760 { | |
761 if (!REG_P (SUBREG_REG (x))) | |
762 mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type); | |
763 else | |
764 { | |
765 unsigned int regno = subreg_regno (x); | |
766 unsigned int last_regno = regno + subreg_nregs (x); | |
767 | |
768 gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER); | |
769 for (r = regno; r < last_regno; r++) | |
770 SET_HARD_REG_BIT (res->regs, r); | |
771 } | |
772 } | |
773 return; | |
774 | |
775 case REG: | |
776 if (in_dest) | |
777 { | |
778 gcc_assert (HARD_REGISTER_P (x)); | |
779 add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x)); | |
780 } | |
781 return; | |
782 | |
783 case UNSPEC_VOLATILE: | |
784 case ASM_INPUT: | |
785 /* Traditional asm's are always volatile. */ | |
786 res->volatil = 1; | |
787 return; | |
788 | |
789 case TRAP_IF: | |
790 res->volatil = 1; | |
791 break; | |
792 | |
793 case ASM_OPERANDS: | |
794 res->volatil |= MEM_VOLATILE_P (x); | |
795 | |
796 /* For all ASM_OPERANDS, we must traverse the vector of input operands. | |
145 | 797 We cannot just fall through here since then we would be confused |
0 | 798 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate |
799 traditional asms unlike their normal usage. */ | |
800 | |
801 for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++) | |
802 mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest, | |
803 MARK_SRC_DEST); | |
804 return; | |
805 | |
806 default: | |
807 break; | |
808 } | |
809 | |
810 /* Process each sub-expression and flag what it needs. */ | |
811 format_ptr = GET_RTX_FORMAT (code); | |
812 for (i = 0; i < GET_RTX_LENGTH (code); i++) | |
813 switch (*format_ptr++) | |
814 { | |
815 case 'e': | |
816 mark_set_resources (XEXP (x, i), res, in_dest, mark_type); | |
817 break; | |
818 | |
819 case 'E': | |
820 for (j = 0; j < XVECLEN (x, i); j++) | |
821 mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type); | |
822 break; | |
823 } | |
824 } | |
825 | |
826 /* Return TRUE if INSN is a return, possibly with a filled delay slot. */ | |
827 | |
828 static bool | |
829 return_insn_p (const_rtx insn) | |
830 { | |
111 | 831 if (JUMP_P (insn) && ANY_RETURN_P (PATTERN (insn))) |
0 | 832 return true; |
833 | |
834 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE) | |
835 return return_insn_p (XVECEXP (PATTERN (insn), 0, 0)); | |
836 | |
837 return false; | |
838 } | |
839 | |
840 /* Set the resources that are live at TARGET. | |
841 | |
842 If TARGET is zero, we refer to the end of the current function and can | |
843 return our precomputed value. | |
844 | |
845 Otherwise, we try to find out what is live by consulting the basic block | |
846 information. This is tricky, because we must consider the actions of | |
847 reload and jump optimization, which occur after the basic block information | |
848 has been computed. | |
849 | |
850 Accordingly, we proceed as follows:: | |
851 | |
852 We find the previous BARRIER and look at all immediately following labels | |
853 (with no intervening active insns) to see if any of them start a basic | |
854 block. If we hit the start of the function first, we use block 0. | |
855 | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
856 Once we have found a basic block and a corresponding first insn, we can |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
857 accurately compute the live status (by starting at a label following a |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
858 BARRIER, we are immune to actions taken by reload and jump.) Then we |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
859 scan all insns between that point and our target. For each CLOBBER (or |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
860 for call-clobbered regs when we pass a CALL_INSN), mark the appropriate |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
861 registers are dead. For a SET, mark them as live. |
0 | 862 |
863 We have to be careful when using REG_DEAD notes because they are not | |
864 updated by such things as find_equiv_reg. So keep track of registers | |
865 marked as dead that haven't been assigned to, and mark them dead at the | |
866 next CODE_LABEL since reload and jump won't propagate values across labels. | |
867 | |
868 If we cannot find the start of a basic block (should be a very rare | |
869 case, if it can happen at all), mark everything as potentially live. | |
870 | |
871 Next, scan forward from TARGET looking for things set or clobbered | |
872 before they are used. These are not live. | |
873 | |
874 Because we can be called many times on the same target, save our results | |
875 in a hash table indexed by INSN_UID. This is only done if the function | |
876 init_resource_info () was invoked before we are called. */ | |
877 | |
878 void | |
111 | 879 mark_target_live_regs (rtx_insn *insns, rtx target_maybe_return, struct resources *res) |
0 | 880 { |
881 int b = -1; | |
882 unsigned int i; | |
883 struct target_info *tinfo = NULL; | |
111 | 884 rtx_insn *insn; |
0 | 885 rtx jump_target; |
886 HARD_REG_SET scratch; | |
887 struct resources set, needed; | |
888 | |
889 /* Handle end of function. */ | |
111 | 890 if (target_maybe_return == 0 || ANY_RETURN_P (target_maybe_return)) |
0 | 891 { |
892 *res = end_of_function_needs; | |
893 return; | |
894 } | |
895 | |
111 | 896 /* We've handled the case of RETURN/SIMPLE_RETURN; we should now have an |
897 instruction. */ | |
898 rtx_insn *target = as_a <rtx_insn *> (target_maybe_return); | |
899 | |
0 | 900 /* Handle return insn. */ |
111 | 901 if (return_insn_p (target)) |
0 | 902 { |
903 *res = end_of_function_needs; | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
904 mark_referenced_resources (target, res, false); |
0 | 905 return; |
906 } | |
907 | |
908 /* We have to assume memory is needed, but the CC isn't. */ | |
909 res->memory = 1; | |
111 | 910 res->volatil = 0; |
0 | 911 res->cc = 0; |
912 | |
913 /* See if we have computed this value already. */ | |
914 if (target_hash_table != NULL) | |
915 { | |
916 for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME]; | |
917 tinfo; tinfo = tinfo->next) | |
918 if (tinfo->uid == INSN_UID (target)) | |
919 break; | |
920 | |
921 /* Start by getting the basic block number. If we have saved | |
922 information, we can get it from there unless the insn at the | |
923 start of the basic block has been deleted. */ | |
924 if (tinfo && tinfo->block != -1 | |
111 | 925 && ! BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, tinfo->block))->deleted ()) |
0 | 926 b = tinfo->block; |
927 } | |
928 | |
929 if (b == -1) | |
145 | 930 b = find_basic_block (target, param_max_delay_slot_live_search); |
0 | 931 |
932 if (target_hash_table != NULL) | |
933 { | |
934 if (tinfo) | |
935 { | |
936 /* If the information is up-to-date, use it. Otherwise, we will | |
937 update it below. */ | |
938 if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b]) | |
939 { | |
145 | 940 res->regs = tinfo->live_regs; |
0 | 941 return; |
942 } | |
943 } | |
944 else | |
945 { | |
946 /* Allocate a place to put our results and chain it into the | |
947 hash table. */ | |
948 tinfo = XNEW (struct target_info); | |
949 tinfo->uid = INSN_UID (target); | |
950 tinfo->block = b; | |
951 tinfo->next | |
952 = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME]; | |
953 target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo; | |
954 } | |
955 } | |
956 | |
957 CLEAR_HARD_REG_SET (pending_dead_regs); | |
958 | |
959 /* If we found a basic block, get the live registers from it and update | |
960 them with anything set or killed between its start and the insn before | |
36 | 961 TARGET; this custom life analysis is really about registers so we need |
962 to use the LR problem. Otherwise, we must assume everything is live. */ | |
0 | 963 if (b != -1) |
964 { | |
111 | 965 regset regs_live = DF_LR_IN (BASIC_BLOCK_FOR_FN (cfun, b)); |
966 rtx_insn *start_insn, *stop_insn; | |
145 | 967 df_ref def; |
0 | 968 |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
969 /* Compute hard regs live at start of block. */ |
0 | 970 REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live); |
145 | 971 FOR_EACH_ARTIFICIAL_DEF (def, b) |
972 if (DF_REF_FLAGS (def) & DF_REF_AT_TOP) | |
973 SET_HARD_REG_BIT (current_live_regs, DF_REF_REGNO (def)); | |
0 | 974 |
975 /* Get starting and ending insn, handling the case where each might | |
976 be a SEQUENCE. */ | |
111 | 977 start_insn = (b == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index ? |
978 insns : BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, b))); | |
0 | 979 stop_insn = target; |
980 | |
981 if (NONJUMP_INSN_P (start_insn) | |
982 && GET_CODE (PATTERN (start_insn)) == SEQUENCE) | |
111 | 983 start_insn = as_a <rtx_sequence *> (PATTERN (start_insn))->insn (0); |
0 | 984 |
985 if (NONJUMP_INSN_P (stop_insn) | |
986 && GET_CODE (PATTERN (stop_insn)) == SEQUENCE) | |
987 stop_insn = next_insn (PREV_INSN (stop_insn)); | |
988 | |
989 for (insn = start_insn; insn != stop_insn; | |
990 insn = next_insn_no_annul (insn)) | |
991 { | |
992 rtx link; | |
111 | 993 rtx_insn *real_insn = insn; |
0 | 994 enum rtx_code code = GET_CODE (insn); |
995 | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
996 if (DEBUG_INSN_P (insn)) |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
997 continue; |
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
998 |
0 | 999 /* If this insn is from the target of a branch, it isn't going to |
1000 be used in the sequel. If it is used in both cases, this | |
1001 test will not be true. */ | |
1002 if ((code == INSN || code == JUMP_INSN || code == CALL_INSN) | |
1003 && INSN_FROM_TARGET_P (insn)) | |
1004 continue; | |
1005 | |
1006 /* If this insn is a USE made by update_block, we care about the | |
1007 underlying insn. */ | |
111 | 1008 if (code == INSN |
1009 && GET_CODE (PATTERN (insn)) == USE | |
0 | 1010 && INSN_P (XEXP (PATTERN (insn), 0))) |
111 | 1011 real_insn = as_a <rtx_insn *> (XEXP (PATTERN (insn), 0)); |
0 | 1012 |
1013 if (CALL_P (real_insn)) | |
1014 { | |
111 | 1015 /* Values in call-clobbered registers survive a COND_EXEC CALL |
1016 if that is not executed; this matters for resoure use because | |
1017 they may be used by a complementarily (or more strictly) | |
1018 predicated instruction, or if the CALL is NORETURN. */ | |
1019 if (GET_CODE (PATTERN (real_insn)) != COND_EXEC) | |
1020 { | |
145 | 1021 HARD_REG_SET regs_invalidated_by_this_call |
1022 = insn_callee_abi (real_insn).full_reg_clobbers (); | |
111 | 1023 /* CALL clobbers all call-used regs that aren't fixed except |
1024 sp, ap, and fp. Do this before setting the result of the | |
1025 call live. */ | |
145 | 1026 current_live_regs &= ~regs_invalidated_by_this_call; |
111 | 1027 } |
0 | 1028 |
1029 /* A CALL_INSN sets any global register live, since it may | |
1030 have been modified by the call. */ | |
1031 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
1032 if (global_regs[i]) | |
1033 SET_HARD_REG_BIT (current_live_regs, i); | |
1034 } | |
1035 | |
1036 /* Mark anything killed in an insn to be deadened at the next | |
1037 label. Ignore USE insns; the only REG_DEAD notes will be for | |
1038 parameters. But they might be early. A CALL_INSN will usually | |
1039 clobber registers used for parameters. It isn't worth bothering | |
1040 with the unlikely case when it won't. */ | |
1041 if ((NONJUMP_INSN_P (real_insn) | |
1042 && GET_CODE (PATTERN (real_insn)) != USE | |
1043 && GET_CODE (PATTERN (real_insn)) != CLOBBER) | |
1044 || JUMP_P (real_insn) | |
1045 || CALL_P (real_insn)) | |
1046 { | |
1047 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1)) | |
1048 if (REG_NOTE_KIND (link) == REG_DEAD | |
1049 && REG_P (XEXP (link, 0)) | |
1050 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER) | |
1051 add_to_hard_reg_set (&pending_dead_regs, | |
1052 GET_MODE (XEXP (link, 0)), | |
1053 REGNO (XEXP (link, 0))); | |
1054 | |
145 | 1055 note_stores (real_insn, update_live_status, NULL); |
0 | 1056 |
1057 /* If any registers were unused after this insn, kill them. | |
1058 These notes will always be accurate. */ | |
1059 for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1)) | |
1060 if (REG_NOTE_KIND (link) == REG_UNUSED | |
1061 && REG_P (XEXP (link, 0)) | |
1062 && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER) | |
1063 remove_from_hard_reg_set (¤t_live_regs, | |
1064 GET_MODE (XEXP (link, 0)), | |
1065 REGNO (XEXP (link, 0))); | |
1066 } | |
1067 | |
1068 else if (LABEL_P (real_insn)) | |
1069 { | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1070 basic_block bb; |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1071 |
0 | 1072 /* A label clobbers the pending dead registers since neither |
1073 reload nor jump will propagate a value across a label. */ | |
145 | 1074 current_live_regs &= ~pending_dead_regs; |
0 | 1075 CLEAR_HARD_REG_SET (pending_dead_regs); |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1076 |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1077 /* We must conservatively assume that all registers that used |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1078 to be live here still are. The fallthrough edge may have |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1079 left a live register uninitialized. */ |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1080 bb = BLOCK_FOR_INSN (real_insn); |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1081 if (bb) |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1082 { |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1083 HARD_REG_SET extra_live; |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1084 |
36 | 1085 REG_SET_TO_HARD_REG_SET (extra_live, DF_LR_IN (bb)); |
145 | 1086 current_live_regs |= extra_live; |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1087 } |
0 | 1088 } |
1089 | |
1090 /* The beginning of the epilogue corresponds to the end of the | |
1091 RTL chain when there are no epilogue insns. Certain resources | |
1092 are implicitly required at that point. */ | |
1093 else if (NOTE_P (real_insn) | |
1094 && NOTE_KIND (real_insn) == NOTE_INSN_EPILOGUE_BEG) | |
145 | 1095 current_live_regs |= start_of_epilogue_needs.regs; |
0 | 1096 } |
1097 | |
145 | 1098 res->regs = current_live_regs; |
0 | 1099 if (tinfo != NULL) |
1100 { | |
1101 tinfo->block = b; | |
1102 tinfo->bb_tick = bb_ticks[b]; | |
1103 } | |
1104 } | |
1105 else | |
1106 /* We didn't find the start of a basic block. Assume everything | |
1107 in use. This should happen only extremely rarely. */ | |
1108 SET_HARD_REG_SET (res->regs); | |
1109 | |
1110 CLEAR_RESOURCE (&set); | |
1111 CLEAR_RESOURCE (&needed); | |
1112 | |
111 | 1113 rtx_insn *jump_insn = find_dead_or_set_registers (target, res, &jump_target, |
1114 0, set, needed); | |
0 | 1115 |
1116 /* If we hit an unconditional branch, we have another way of finding out | |
1117 what is live: we can see what is live at the branch target and include | |
1118 anything used but not set before the branch. We add the live | |
1119 resources found using the test below to those found until now. */ | |
1120 | |
1121 if (jump_insn) | |
1122 { | |
1123 struct resources new_resources; | |
111 | 1124 rtx_insn *stop_insn = next_active_insn (jump_insn); |
0 | 1125 |
111 | 1126 if (!ANY_RETURN_P (jump_target)) |
1127 jump_target = next_active_insn (as_a<rtx_insn *> (jump_target)); | |
1128 mark_target_live_regs (insns, jump_target, &new_resources); | |
0 | 1129 CLEAR_RESOURCE (&set); |
1130 CLEAR_RESOURCE (&needed); | |
1131 | |
1132 /* Include JUMP_INSN in the needed registers. */ | |
1133 for (insn = target; insn != stop_insn; insn = next_active_insn (insn)) | |
1134 { | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
1135 mark_referenced_resources (insn, &needed, true); |
0 | 1136 |
145 | 1137 scratch = needed.regs & ~set.regs; |
1138 new_resources.regs |= scratch; | |
0 | 1139 |
1140 mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL); | |
1141 } | |
1142 | |
145 | 1143 res->regs |= new_resources.regs; |
0 | 1144 } |
1145 | |
1146 if (tinfo != NULL) | |
145 | 1147 tinfo->live_regs = res->regs; |
0 | 1148 } |
1149 | |
1150 /* Initialize the resources required by mark_target_live_regs (). | |
1151 This should be invoked before the first call to mark_target_live_regs. */ | |
1152 | |
1153 void | |
111 | 1154 init_resource_info (rtx_insn *epilogue_insn) |
0 | 1155 { |
1156 int i; | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1157 basic_block bb; |
0 | 1158 |
1159 /* Indicate what resources are required to be valid at the end of the current | |
111 | 1160 function. The condition code never is and memory always is. |
1161 The stack pointer is needed unless EXIT_IGNORE_STACK is true | |
1162 and there is an epilogue that restores the original stack pointer | |
1163 from the frame pointer. Registers used to return the function value | |
1164 are needed. Registers holding global variables are needed. */ | |
0 | 1165 |
1166 end_of_function_needs.cc = 0; | |
1167 end_of_function_needs.memory = 1; | |
1168 CLEAR_HARD_REG_SET (end_of_function_needs.regs); | |
1169 | |
1170 if (frame_pointer_needed) | |
1171 { | |
1172 SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM); | |
111 | 1173 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER) |
1174 SET_HARD_REG_BIT (end_of_function_needs.regs, | |
1175 HARD_FRAME_POINTER_REGNUM); | |
0 | 1176 } |
111 | 1177 if (!(frame_pointer_needed |
1178 && EXIT_IGNORE_STACK | |
1179 && epilogue_insn | |
1180 && !crtl->sp_is_unchanging)) | |
0 | 1181 SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM); |
1182 | |
1183 if (crtl->return_rtx != 0) | |
1184 mark_referenced_resources (crtl->return_rtx, | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
1185 &end_of_function_needs, true); |
0 | 1186 |
1187 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) | |
111 | 1188 if (global_regs[i] || EPILOGUE_USES (i)) |
0 | 1189 SET_HARD_REG_BIT (end_of_function_needs.regs, i); |
1190 | |
1191 /* The registers required to be live at the end of the function are | |
1192 represented in the flow information as being dead just prior to | |
1193 reaching the end of the function. For example, the return of a value | |
1194 might be represented by a USE of the return register immediately | |
1195 followed by an unconditional jump to the return label where the | |
1196 return label is the end of the RTL chain. The end of the RTL chain | |
1197 is then taken to mean that the return register is live. | |
1198 | |
1199 This sequence is no longer maintained when epilogue instructions are | |
1200 added to the RTL chain. To reconstruct the original meaning, the | |
1201 start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the | |
1202 point where these registers become live (start_of_epilogue_needs). | |
1203 If epilogue instructions are present, the registers set by those | |
1204 instructions won't have been processed by flow. Thus, those | |
1205 registers are additionally required at the end of the RTL chain | |
1206 (end_of_function_needs). */ | |
1207 | |
1208 start_of_epilogue_needs = end_of_function_needs; | |
1209 | |
1210 while ((epilogue_insn = next_nonnote_insn (epilogue_insn))) | |
1211 { | |
1212 mark_set_resources (epilogue_insn, &end_of_function_needs, 0, | |
1213 MARK_SRC_DEST_CALL); | |
1214 if (return_insn_p (epilogue_insn)) | |
1215 break; | |
1216 } | |
1217 | |
1218 /* Allocate and initialize the tables used by mark_target_live_regs. */ | |
1219 target_hash_table = XCNEWVEC (struct target_info *, TARGET_HASH_PRIME); | |
111 | 1220 bb_ticks = XCNEWVEC (int, last_basic_block_for_fn (cfun)); |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1221 |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1222 /* Set the BLOCK_FOR_INSN of each label that starts a basic block. */ |
111 | 1223 FOR_EACH_BB_FN (bb, cfun) |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1224 if (LABEL_P (BB_HEAD (bb))) |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1225 BLOCK_FOR_INSN (BB_HEAD (bb)) = bb; |
0 | 1226 } |
1227 | |
1228 /* Free up the resources allocated to mark_target_live_regs (). This | |
1229 should be invoked after the last call to mark_target_live_regs (). */ | |
1230 | |
1231 void | |
1232 free_resource_info (void) | |
1233 { | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1234 basic_block bb; |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1235 |
0 | 1236 if (target_hash_table != NULL) |
1237 { | |
1238 int i; | |
1239 | |
1240 for (i = 0; i < TARGET_HASH_PRIME; ++i) | |
1241 { | |
1242 struct target_info *ti = target_hash_table[i]; | |
1243 | |
1244 while (ti) | |
1245 { | |
1246 struct target_info *next = ti->next; | |
1247 free (ti); | |
1248 ti = next; | |
1249 } | |
1250 } | |
1251 | |
1252 free (target_hash_table); | |
1253 target_hash_table = NULL; | |
1254 } | |
1255 | |
1256 if (bb_ticks != NULL) | |
1257 { | |
1258 free (bb_ticks); | |
1259 bb_ticks = NULL; | |
1260 } | |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1261 |
111 | 1262 FOR_EACH_BB_FN (bb, cfun) |
19
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1263 if (LABEL_P (BB_HEAD (bb))) |
58ad6c70ea60
update gcc from 4.4.0 to 4.4.1.
kent@firefly.cr.ie.u-ryukyu.ac.jp
parents:
0
diff
changeset
|
1264 BLOCK_FOR_INSN (BB_HEAD (bb)) = NULL; |
0 | 1265 } |
1266 | |
1267 /* Clear any hashed information that we have stored for INSN. */ | |
1268 | |
1269 void | |
111 | 1270 clear_hashed_info_for_insn (rtx_insn *insn) |
0 | 1271 { |
1272 struct target_info *tinfo; | |
1273 | |
1274 if (target_hash_table != NULL) | |
1275 { | |
1276 for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME]; | |
1277 tinfo; tinfo = tinfo->next) | |
1278 if (tinfo->uid == INSN_UID (insn)) | |
1279 break; | |
1280 | |
1281 if (tinfo) | |
1282 tinfo->block = -1; | |
1283 } | |
1284 } | |
1285 | |
1286 /* Increment the tick count for the basic block that contains INSN. */ | |
1287 | |
1288 void | |
111 | 1289 incr_ticks_for_insn (rtx_insn *insn) |
0 | 1290 { |
145 | 1291 int b = find_basic_block (insn, param_max_delay_slot_live_search); |
0 | 1292 |
1293 if (b != -1) | |
1294 bb_ticks[b]++; | |
1295 } | |
1296 | |
1297 /* Add TRIAL to the set of resources used at the end of the current | |
1298 function. */ | |
1299 void | |
55
77e2b8dfacca
update it from 4.4.3 to 4.5.0
ryoma <e075725@ie.u-ryukyu.ac.jp>
parents:
36
diff
changeset
|
1300 mark_end_of_function_resources (rtx trial, bool include_delayed_effects) |
0 | 1301 { |
1302 mark_referenced_resources (trial, &end_of_function_needs, | |
1303 include_delayed_effects); | |
1304 } |