comparison gcc/rtlanal.c @ 0:a06113de4d67

first commit
author kent <kent@cr.ie.u-ryukyu.ac.jp>
date Fri, 17 Jul 2009 14:47:48 +0900
parents
children 3bfb6c00c1e0
comparison
equal deleted inserted replaced
-1:000000000000 0:a06113de4d67
1 /* Analyze RTL for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "target.h"
33 #include "output.h"
34 #include "tm_p.h"
35 #include "flags.h"
36 #include "real.h"
37 #include "regs.h"
38 #include "function.h"
39 #include "df.h"
40 #include "tree.h"
41
42 /* Information about a subreg of a hard register. */
43 struct subreg_info
44 {
45 /* Offset of first hard register involved in the subreg. */
46 int offset;
47 /* Number of hard registers involved in the subreg. */
48 int nregs;
49 /* Whether this subreg can be represented as a hard reg with the new
50 mode. */
51 bool representable_p;
52 };
53
54 /* Forward declarations */
55 static void set_of_1 (rtx, const_rtx, void *);
56 static bool covers_regno_p (const_rtx, unsigned int);
57 static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
58 static int rtx_referenced_p_1 (rtx *, void *);
59 static int computed_jump_p_1 (const_rtx);
60 static void parms_set (rtx, const_rtx, void *);
61 static void subreg_get_info (unsigned int, enum machine_mode,
62 unsigned int, enum machine_mode,
63 struct subreg_info *);
64
65 static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, enum machine_mode,
66 const_rtx, enum machine_mode,
67 unsigned HOST_WIDE_INT);
68 static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, enum machine_mode,
69 const_rtx, enum machine_mode,
70 unsigned HOST_WIDE_INT);
71 static unsigned int cached_num_sign_bit_copies (const_rtx, enum machine_mode, const_rtx,
72 enum machine_mode,
73 unsigned int);
74 static unsigned int num_sign_bit_copies1 (const_rtx, enum machine_mode, const_rtx,
75 enum machine_mode, unsigned int);
76
77 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
78 -1 if a code has no such operand. */
79 static int non_rtx_starting_operands[NUM_RTX_CODE];
80
81 /* Bit flags that specify the machine subtype we are compiling for.
82 Bits are tested using macros TARGET_... defined in the tm.h file
83 and set by `-m...' switches. Must be defined in rtlanal.c. */
84
85 int target_flags;
86
87 /* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
88 If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
89 SIGN_EXTEND then while narrowing we also have to enforce the
90 representation and sign-extend the value to mode DESTINATION_REP.
91
92 If the value is already sign-extended to DESTINATION_REP mode we
93 can just switch to DESTINATION mode on it. For each pair of
94 integral modes SOURCE and DESTINATION, when truncating from SOURCE
95 to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
96 contains the number of high-order bits in SOURCE that have to be
97 copies of the sign-bit so that we can do this mode-switch to
98 DESTINATION. */
99
100 static unsigned int
101 num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
102
103 /* Return 1 if the value of X is unstable
104 (would be different at a different point in the program).
105 The frame pointer, arg pointer, etc. are considered stable
106 (within one function) and so is anything marked `unchanging'. */
107
108 int
109 rtx_unstable_p (const_rtx x)
110 {
111 const RTX_CODE code = GET_CODE (x);
112 int i;
113 const char *fmt;
114
115 switch (code)
116 {
117 case MEM:
118 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
119
120 case CONST:
121 case CONST_INT:
122 case CONST_DOUBLE:
123 case CONST_FIXED:
124 case CONST_VECTOR:
125 case SYMBOL_REF:
126 case LABEL_REF:
127 return 0;
128
129 case REG:
130 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
131 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
132 /* The arg pointer varies if it is not a fixed register. */
133 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
134 return 0;
135 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
136 /* ??? When call-clobbered, the value is stable modulo the restore
137 that must happen after a call. This currently screws up local-alloc
138 into believing that the restore is not needed. */
139 if (x == pic_offset_table_rtx)
140 return 0;
141 #endif
142 return 1;
143
144 case ASM_OPERANDS:
145 if (MEM_VOLATILE_P (x))
146 return 1;
147
148 /* Fall through. */
149
150 default:
151 break;
152 }
153
154 fmt = GET_RTX_FORMAT (code);
155 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
156 if (fmt[i] == 'e')
157 {
158 if (rtx_unstable_p (XEXP (x, i)))
159 return 1;
160 }
161 else if (fmt[i] == 'E')
162 {
163 int j;
164 for (j = 0; j < XVECLEN (x, i); j++)
165 if (rtx_unstable_p (XVECEXP (x, i, j)))
166 return 1;
167 }
168
169 return 0;
170 }
171
172 /* Return 1 if X has a value that can vary even between two
173 executions of the program. 0 means X can be compared reliably
174 against certain constants or near-constants.
175 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
176 zero, we are slightly more conservative.
177 The frame pointer and the arg pointer are considered constant. */
178
179 bool
180 rtx_varies_p (const_rtx x, bool for_alias)
181 {
182 RTX_CODE code;
183 int i;
184 const char *fmt;
185
186 if (!x)
187 return 0;
188
189 code = GET_CODE (x);
190 switch (code)
191 {
192 case MEM:
193 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
194
195 case CONST:
196 case CONST_INT:
197 case CONST_DOUBLE:
198 case CONST_FIXED:
199 case CONST_VECTOR:
200 case SYMBOL_REF:
201 case LABEL_REF:
202 return 0;
203
204 case REG:
205 /* Note that we have to test for the actual rtx used for the frame
206 and arg pointers and not just the register number in case we have
207 eliminated the frame and/or arg pointer and are using it
208 for pseudos. */
209 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
210 /* The arg pointer varies if it is not a fixed register. */
211 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
212 return 0;
213 if (x == pic_offset_table_rtx
214 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
215 /* ??? When call-clobbered, the value is stable modulo the restore
216 that must happen after a call. This currently screws up
217 local-alloc into believing that the restore is not needed, so we
218 must return 0 only if we are called from alias analysis. */
219 && for_alias
220 #endif
221 )
222 return 0;
223 return 1;
224
225 case LO_SUM:
226 /* The operand 0 of a LO_SUM is considered constant
227 (in fact it is related specifically to operand 1)
228 during alias analysis. */
229 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
230 || rtx_varies_p (XEXP (x, 1), for_alias);
231
232 case ASM_OPERANDS:
233 if (MEM_VOLATILE_P (x))
234 return 1;
235
236 /* Fall through. */
237
238 default:
239 break;
240 }
241
242 fmt = GET_RTX_FORMAT (code);
243 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
244 if (fmt[i] == 'e')
245 {
246 if (rtx_varies_p (XEXP (x, i), for_alias))
247 return 1;
248 }
249 else if (fmt[i] == 'E')
250 {
251 int j;
252 for (j = 0; j < XVECLEN (x, i); j++)
253 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
254 return 1;
255 }
256
257 return 0;
258 }
259
260 /* Return nonzero if the use of X as an address in a MEM can cause a trap.
261 MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
262 whether nonzero is returned for unaligned memory accesses on strict
263 alignment machines. */
264
265 static int
266 rtx_addr_can_trap_p_1 (const_rtx x, HOST_WIDE_INT offset, HOST_WIDE_INT size,
267 enum machine_mode mode, bool unaligned_mems)
268 {
269 enum rtx_code code = GET_CODE (x);
270
271 if (STRICT_ALIGNMENT
272 && unaligned_mems
273 && GET_MODE_SIZE (mode) != 0)
274 {
275 HOST_WIDE_INT actual_offset = offset;
276 #ifdef SPARC_STACK_BOUNDARY_HACK
277 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
278 the real alignment of %sp. However, when it does this, the
279 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
280 if (SPARC_STACK_BOUNDARY_HACK
281 && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx))
282 actual_offset -= STACK_POINTER_OFFSET;
283 #endif
284
285 if (actual_offset % GET_MODE_SIZE (mode) != 0)
286 return 1;
287 }
288
289 switch (code)
290 {
291 case SYMBOL_REF:
292 if (SYMBOL_REF_WEAK (x))
293 return 1;
294 if (!CONSTANT_POOL_ADDRESS_P (x))
295 {
296 tree decl;
297 HOST_WIDE_INT decl_size;
298
299 if (offset < 0)
300 return 1;
301 if (size == 0)
302 size = GET_MODE_SIZE (mode);
303 if (size == 0)
304 return offset != 0;
305
306 /* If the size of the access or of the symbol is unknown,
307 assume the worst. */
308 decl = SYMBOL_REF_DECL (x);
309
310 /* Else check that the access is in bounds. TODO: restructure
311 expr_size/lhd_expr_size/int_expr_size and just use the latter. */
312 if (!decl)
313 decl_size = -1;
314 else if (DECL_P (decl) && DECL_SIZE_UNIT (decl))
315 decl_size = (host_integerp (DECL_SIZE_UNIT (decl), 0)
316 ? tree_low_cst (DECL_SIZE_UNIT (decl), 0)
317 : -1);
318 else if (TREE_CODE (decl) == STRING_CST)
319 decl_size = TREE_STRING_LENGTH (decl);
320 else if (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
321 decl_size = int_size_in_bytes (TREE_TYPE (decl));
322 else
323 decl_size = -1;
324
325 return (decl_size <= 0 ? offset != 0 : offset + size > decl_size);
326 }
327
328 return 0;
329
330 case LABEL_REF:
331 return 0;
332
333 case REG:
334 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
335 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
336 || x == stack_pointer_rtx
337 /* The arg pointer varies if it is not a fixed register. */
338 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
339 return 0;
340 /* All of the virtual frame registers are stack references. */
341 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
342 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
343 return 0;
344 return 1;
345
346 case CONST:
347 return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
348 mode, unaligned_mems);
349
350 case PLUS:
351 /* An address is assumed not to trap if:
352 - it is the pic register plus a constant. */
353 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
354 return 0;
355
356 /* - or it is an address that can't trap plus a constant integer,
357 with the proper remainder modulo the mode size if we are
358 considering unaligned memory references. */
359 if (GET_CODE (XEXP (x, 1)) == CONST_INT
360 && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + INTVAL (XEXP (x, 1)),
361 size, mode, unaligned_mems))
362 return 0;
363
364 return 1;
365
366 case LO_SUM:
367 case PRE_MODIFY:
368 return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size,
369 mode, unaligned_mems);
370
371 case PRE_DEC:
372 case PRE_INC:
373 case POST_DEC:
374 case POST_INC:
375 case POST_MODIFY:
376 return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
377 mode, unaligned_mems);
378
379 default:
380 break;
381 }
382
383 /* If it isn't one of the case above, it can cause a trap. */
384 return 1;
385 }
386
387 /* Return nonzero if the use of X as an address in a MEM can cause a trap. */
388
389 int
390 rtx_addr_can_trap_p (const_rtx x)
391 {
392 return rtx_addr_can_trap_p_1 (x, 0, 0, VOIDmode, false);
393 }
394
395 /* Return true if X is an address that is known to not be zero. */
396
397 bool
398 nonzero_address_p (const_rtx x)
399 {
400 const enum rtx_code code = GET_CODE (x);
401
402 switch (code)
403 {
404 case SYMBOL_REF:
405 return !SYMBOL_REF_WEAK (x);
406
407 case LABEL_REF:
408 return true;
409
410 case REG:
411 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
412 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
413 || x == stack_pointer_rtx
414 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
415 return true;
416 /* All of the virtual frame registers are stack references. */
417 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
418 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
419 return true;
420 return false;
421
422 case CONST:
423 return nonzero_address_p (XEXP (x, 0));
424
425 case PLUS:
426 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
427 return nonzero_address_p (XEXP (x, 0));
428 /* Handle PIC references. */
429 else if (XEXP (x, 0) == pic_offset_table_rtx
430 && CONSTANT_P (XEXP (x, 1)))
431 return true;
432 return false;
433
434 case PRE_MODIFY:
435 /* Similar to the above; allow positive offsets. Further, since
436 auto-inc is only allowed in memories, the register must be a
437 pointer. */
438 if (GET_CODE (XEXP (x, 1)) == CONST_INT
439 && INTVAL (XEXP (x, 1)) > 0)
440 return true;
441 return nonzero_address_p (XEXP (x, 0));
442
443 case PRE_INC:
444 /* Similarly. Further, the offset is always positive. */
445 return true;
446
447 case PRE_DEC:
448 case POST_DEC:
449 case POST_INC:
450 case POST_MODIFY:
451 return nonzero_address_p (XEXP (x, 0));
452
453 case LO_SUM:
454 return nonzero_address_p (XEXP (x, 1));
455
456 default:
457 break;
458 }
459
460 /* If it isn't one of the case above, might be zero. */
461 return false;
462 }
463
464 /* Return 1 if X refers to a memory location whose address
465 cannot be compared reliably with constant addresses,
466 or if X refers to a BLKmode memory object.
467 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
468 zero, we are slightly more conservative. */
469
470 bool
471 rtx_addr_varies_p (const_rtx x, bool for_alias)
472 {
473 enum rtx_code code;
474 int i;
475 const char *fmt;
476
477 if (x == 0)
478 return 0;
479
480 code = GET_CODE (x);
481 if (code == MEM)
482 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
483
484 fmt = GET_RTX_FORMAT (code);
485 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
486 if (fmt[i] == 'e')
487 {
488 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
489 return 1;
490 }
491 else if (fmt[i] == 'E')
492 {
493 int j;
494 for (j = 0; j < XVECLEN (x, i); j++)
495 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
496 return 1;
497 }
498 return 0;
499 }
500
501 /* Return the value of the integer term in X, if one is apparent;
502 otherwise return 0.
503 Only obvious integer terms are detected.
504 This is used in cse.c with the `related_value' field. */
505
506 HOST_WIDE_INT
507 get_integer_term (const_rtx x)
508 {
509 if (GET_CODE (x) == CONST)
510 x = XEXP (x, 0);
511
512 if (GET_CODE (x) == MINUS
513 && GET_CODE (XEXP (x, 1)) == CONST_INT)
514 return - INTVAL (XEXP (x, 1));
515 if (GET_CODE (x) == PLUS
516 && GET_CODE (XEXP (x, 1)) == CONST_INT)
517 return INTVAL (XEXP (x, 1));
518 return 0;
519 }
520
521 /* If X is a constant, return the value sans apparent integer term;
522 otherwise return 0.
523 Only obvious integer terms are detected. */
524
525 rtx
526 get_related_value (const_rtx x)
527 {
528 if (GET_CODE (x) != CONST)
529 return 0;
530 x = XEXP (x, 0);
531 if (GET_CODE (x) == PLUS
532 && GET_CODE (XEXP (x, 1)) == CONST_INT)
533 return XEXP (x, 0);
534 else if (GET_CODE (x) == MINUS
535 && GET_CODE (XEXP (x, 1)) == CONST_INT)
536 return XEXP (x, 0);
537 return 0;
538 }
539
540 /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
541 to somewhere in the same object or object_block as SYMBOL. */
542
543 bool
544 offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
545 {
546 tree decl;
547
548 if (GET_CODE (symbol) != SYMBOL_REF)
549 return false;
550
551 if (offset == 0)
552 return true;
553
554 if (offset > 0)
555 {
556 if (CONSTANT_POOL_ADDRESS_P (symbol)
557 && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
558 return true;
559
560 decl = SYMBOL_REF_DECL (symbol);
561 if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
562 return true;
563 }
564
565 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
566 && SYMBOL_REF_BLOCK (symbol)
567 && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
568 && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
569 < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
570 return true;
571
572 return false;
573 }
574
575 /* Split X into a base and a constant offset, storing them in *BASE_OUT
576 and *OFFSET_OUT respectively. */
577
578 void
579 split_const (rtx x, rtx *base_out, rtx *offset_out)
580 {
581 if (GET_CODE (x) == CONST)
582 {
583 x = XEXP (x, 0);
584 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
585 {
586 *base_out = XEXP (x, 0);
587 *offset_out = XEXP (x, 1);
588 return;
589 }
590 }
591 *base_out = x;
592 *offset_out = const0_rtx;
593 }
594
595 /* Return the number of places FIND appears within X. If COUNT_DEST is
596 zero, we do not count occurrences inside the destination of a SET. */
597
598 int
599 count_occurrences (const_rtx x, const_rtx find, int count_dest)
600 {
601 int i, j;
602 enum rtx_code code;
603 const char *format_ptr;
604 int count;
605
606 if (x == find)
607 return 1;
608
609 code = GET_CODE (x);
610
611 switch (code)
612 {
613 case REG:
614 case CONST_INT:
615 case CONST_DOUBLE:
616 case CONST_FIXED:
617 case CONST_VECTOR:
618 case SYMBOL_REF:
619 case CODE_LABEL:
620 case PC:
621 case CC0:
622 return 0;
623
624 case EXPR_LIST:
625 count = count_occurrences (XEXP (x, 0), find, count_dest);
626 if (XEXP (x, 1))
627 count += count_occurrences (XEXP (x, 1), find, count_dest);
628 return count;
629
630 case MEM:
631 if (MEM_P (find) && rtx_equal_p (x, find))
632 return 1;
633 break;
634
635 case SET:
636 if (SET_DEST (x) == find && ! count_dest)
637 return count_occurrences (SET_SRC (x), find, count_dest);
638 break;
639
640 default:
641 break;
642 }
643
644 format_ptr = GET_RTX_FORMAT (code);
645 count = 0;
646
647 for (i = 0; i < GET_RTX_LENGTH (code); i++)
648 {
649 switch (*format_ptr++)
650 {
651 case 'e':
652 count += count_occurrences (XEXP (x, i), find, count_dest);
653 break;
654
655 case 'E':
656 for (j = 0; j < XVECLEN (x, i); j++)
657 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
658 break;
659 }
660 }
661 return count;
662 }
663
664
665 /* Nonzero if register REG appears somewhere within IN.
666 Also works if REG is not a register; in this case it checks
667 for a subexpression of IN that is Lisp "equal" to REG. */
668
669 int
670 reg_mentioned_p (const_rtx reg, const_rtx in)
671 {
672 const char *fmt;
673 int i;
674 enum rtx_code code;
675
676 if (in == 0)
677 return 0;
678
679 if (reg == in)
680 return 1;
681
682 if (GET_CODE (in) == LABEL_REF)
683 return reg == XEXP (in, 0);
684
685 code = GET_CODE (in);
686
687 switch (code)
688 {
689 /* Compare registers by number. */
690 case REG:
691 return REG_P (reg) && REGNO (in) == REGNO (reg);
692
693 /* These codes have no constituent expressions
694 and are unique. */
695 case SCRATCH:
696 case CC0:
697 case PC:
698 return 0;
699
700 case CONST_INT:
701 case CONST_VECTOR:
702 case CONST_DOUBLE:
703 case CONST_FIXED:
704 /* These are kept unique for a given value. */
705 return 0;
706
707 default:
708 break;
709 }
710
711 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
712 return 1;
713
714 fmt = GET_RTX_FORMAT (code);
715
716 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
717 {
718 if (fmt[i] == 'E')
719 {
720 int j;
721 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
722 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
723 return 1;
724 }
725 else if (fmt[i] == 'e'
726 && reg_mentioned_p (reg, XEXP (in, i)))
727 return 1;
728 }
729 return 0;
730 }
731
732 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
733 no CODE_LABEL insn. */
734
735 int
736 no_labels_between_p (const_rtx beg, const_rtx end)
737 {
738 rtx p;
739 if (beg == end)
740 return 0;
741 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
742 if (LABEL_P (p))
743 return 0;
744 return 1;
745 }
746
747 /* Nonzero if register REG is used in an insn between
748 FROM_INSN and TO_INSN (exclusive of those two). */
749
750 int
751 reg_used_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
752 {
753 rtx insn;
754
755 if (from_insn == to_insn)
756 return 0;
757
758 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
759 if (INSN_P (insn)
760 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
761 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
762 return 1;
763 return 0;
764 }
765
766 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
767 is entirely replaced by a new value and the only use is as a SET_DEST,
768 we do not consider it a reference. */
769
770 int
771 reg_referenced_p (const_rtx x, const_rtx body)
772 {
773 int i;
774
775 switch (GET_CODE (body))
776 {
777 case SET:
778 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
779 return 1;
780
781 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
782 of a REG that occupies all of the REG, the insn references X if
783 it is mentioned in the destination. */
784 if (GET_CODE (SET_DEST (body)) != CC0
785 && GET_CODE (SET_DEST (body)) != PC
786 && !REG_P (SET_DEST (body))
787 && ! (GET_CODE (SET_DEST (body)) == SUBREG
788 && REG_P (SUBREG_REG (SET_DEST (body)))
789 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
790 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
791 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
792 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
793 && reg_overlap_mentioned_p (x, SET_DEST (body)))
794 return 1;
795 return 0;
796
797 case ASM_OPERANDS:
798 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
799 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
800 return 1;
801 return 0;
802
803 case CALL:
804 case USE:
805 case IF_THEN_ELSE:
806 return reg_overlap_mentioned_p (x, body);
807
808 case TRAP_IF:
809 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
810
811 case PREFETCH:
812 return reg_overlap_mentioned_p (x, XEXP (body, 0));
813
814 case UNSPEC:
815 case UNSPEC_VOLATILE:
816 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
817 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
818 return 1;
819 return 0;
820
821 case PARALLEL:
822 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
823 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
824 return 1;
825 return 0;
826
827 case CLOBBER:
828 if (MEM_P (XEXP (body, 0)))
829 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
830 return 1;
831 return 0;
832
833 case COND_EXEC:
834 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
835 return 1;
836 return reg_referenced_p (x, COND_EXEC_CODE (body));
837
838 default:
839 return 0;
840 }
841 }
842
843 /* Nonzero if register REG is set or clobbered in an insn between
844 FROM_INSN and TO_INSN (exclusive of those two). */
845
846 int
847 reg_set_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
848 {
849 const_rtx insn;
850
851 if (from_insn == to_insn)
852 return 0;
853
854 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
855 if (INSN_P (insn) && reg_set_p (reg, insn))
856 return 1;
857 return 0;
858 }
859
860 /* Internals of reg_set_between_p. */
861 int
862 reg_set_p (const_rtx reg, const_rtx insn)
863 {
864 /* We can be passed an insn or part of one. If we are passed an insn,
865 check if a side-effect of the insn clobbers REG. */
866 if (INSN_P (insn)
867 && (FIND_REG_INC_NOTE (insn, reg)
868 || (CALL_P (insn)
869 && ((REG_P (reg)
870 && REGNO (reg) < FIRST_PSEUDO_REGISTER
871 && overlaps_hard_reg_set_p (regs_invalidated_by_call,
872 GET_MODE (reg), REGNO (reg)))
873 || MEM_P (reg)
874 || find_reg_fusage (insn, CLOBBER, reg)))))
875 return 1;
876
877 return set_of (reg, insn) != NULL_RTX;
878 }
879
880 /* Similar to reg_set_between_p, but check all registers in X. Return 0
881 only if none of them are modified between START and END. Return 1 if
882 X contains a MEM; this routine does use memory aliasing. */
883
884 int
885 modified_between_p (const_rtx x, const_rtx start, const_rtx end)
886 {
887 const enum rtx_code code = GET_CODE (x);
888 const char *fmt;
889 int i, j;
890 rtx insn;
891
892 if (start == end)
893 return 0;
894
895 switch (code)
896 {
897 case CONST_INT:
898 case CONST_DOUBLE:
899 case CONST_FIXED:
900 case CONST_VECTOR:
901 case CONST:
902 case SYMBOL_REF:
903 case LABEL_REF:
904 return 0;
905
906 case PC:
907 case CC0:
908 return 1;
909
910 case MEM:
911 if (modified_between_p (XEXP (x, 0), start, end))
912 return 1;
913 if (MEM_READONLY_P (x))
914 return 0;
915 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
916 if (memory_modified_in_insn_p (x, insn))
917 return 1;
918 return 0;
919 break;
920
921 case REG:
922 return reg_set_between_p (x, start, end);
923
924 default:
925 break;
926 }
927
928 fmt = GET_RTX_FORMAT (code);
929 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
930 {
931 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
932 return 1;
933
934 else if (fmt[i] == 'E')
935 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
936 if (modified_between_p (XVECEXP (x, i, j), start, end))
937 return 1;
938 }
939
940 return 0;
941 }
942
943 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
944 of them are modified in INSN. Return 1 if X contains a MEM; this routine
945 does use memory aliasing. */
946
947 int
948 modified_in_p (const_rtx x, const_rtx insn)
949 {
950 const enum rtx_code code = GET_CODE (x);
951 const char *fmt;
952 int i, j;
953
954 switch (code)
955 {
956 case CONST_INT:
957 case CONST_DOUBLE:
958 case CONST_FIXED:
959 case CONST_VECTOR:
960 case CONST:
961 case SYMBOL_REF:
962 case LABEL_REF:
963 return 0;
964
965 case PC:
966 case CC0:
967 return 1;
968
969 case MEM:
970 if (modified_in_p (XEXP (x, 0), insn))
971 return 1;
972 if (MEM_READONLY_P (x))
973 return 0;
974 if (memory_modified_in_insn_p (x, insn))
975 return 1;
976 return 0;
977 break;
978
979 case REG:
980 return reg_set_p (x, insn);
981
982 default:
983 break;
984 }
985
986 fmt = GET_RTX_FORMAT (code);
987 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
988 {
989 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
990 return 1;
991
992 else if (fmt[i] == 'E')
993 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
994 if (modified_in_p (XVECEXP (x, i, j), insn))
995 return 1;
996 }
997
998 return 0;
999 }
1000
1001 /* Helper function for set_of. */
1002 struct set_of_data
1003 {
1004 const_rtx found;
1005 const_rtx pat;
1006 };
1007
1008 static void
1009 set_of_1 (rtx x, const_rtx pat, void *data1)
1010 {
1011 struct set_of_data *const data = (struct set_of_data *) (data1);
1012 if (rtx_equal_p (x, data->pat)
1013 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
1014 data->found = pat;
1015 }
1016
1017 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1018 (either directly or via STRICT_LOW_PART and similar modifiers). */
1019 const_rtx
1020 set_of (const_rtx pat, const_rtx insn)
1021 {
1022 struct set_of_data data;
1023 data.found = NULL_RTX;
1024 data.pat = pat;
1025 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1026 return data.found;
1027 }
1028
1029 /* Given an INSN, return a SET expression if this insn has only a single SET.
1030 It may also have CLOBBERs, USEs, or SET whose output
1031 will not be used, which we ignore. */
1032
1033 rtx
1034 single_set_2 (const_rtx insn, const_rtx pat)
1035 {
1036 rtx set = NULL;
1037 int set_verified = 1;
1038 int i;
1039
1040 if (GET_CODE (pat) == PARALLEL)
1041 {
1042 for (i = 0; i < XVECLEN (pat, 0); i++)
1043 {
1044 rtx sub = XVECEXP (pat, 0, i);
1045 switch (GET_CODE (sub))
1046 {
1047 case USE:
1048 case CLOBBER:
1049 break;
1050
1051 case SET:
1052 /* We can consider insns having multiple sets, where all
1053 but one are dead as single set insns. In common case
1054 only single set is present in the pattern so we want
1055 to avoid checking for REG_UNUSED notes unless necessary.
1056
1057 When we reach set first time, we just expect this is
1058 the single set we are looking for and only when more
1059 sets are found in the insn, we check them. */
1060 if (!set_verified)
1061 {
1062 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1063 && !side_effects_p (set))
1064 set = NULL;
1065 else
1066 set_verified = 1;
1067 }
1068 if (!set)
1069 set = sub, set_verified = 0;
1070 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1071 || side_effects_p (sub))
1072 return NULL_RTX;
1073 break;
1074
1075 default:
1076 return NULL_RTX;
1077 }
1078 }
1079 }
1080 return set;
1081 }
1082
1083 /* Given an INSN, return nonzero if it has more than one SET, else return
1084 zero. */
1085
1086 int
1087 multiple_sets (const_rtx insn)
1088 {
1089 int found;
1090 int i;
1091
1092 /* INSN must be an insn. */
1093 if (! INSN_P (insn))
1094 return 0;
1095
1096 /* Only a PARALLEL can have multiple SETs. */
1097 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1098 {
1099 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1100 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1101 {
1102 /* If we have already found a SET, then return now. */
1103 if (found)
1104 return 1;
1105 else
1106 found = 1;
1107 }
1108 }
1109
1110 /* Either zero or one SET. */
1111 return 0;
1112 }
1113
1114 /* Return nonzero if the destination of SET equals the source
1115 and there are no side effects. */
1116
1117 int
1118 set_noop_p (const_rtx set)
1119 {
1120 rtx src = SET_SRC (set);
1121 rtx dst = SET_DEST (set);
1122
1123 if (dst == pc_rtx && src == pc_rtx)
1124 return 1;
1125
1126 if (MEM_P (dst) && MEM_P (src))
1127 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1128
1129 if (GET_CODE (dst) == ZERO_EXTRACT)
1130 return rtx_equal_p (XEXP (dst, 0), src)
1131 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1132 && !side_effects_p (src);
1133
1134 if (GET_CODE (dst) == STRICT_LOW_PART)
1135 dst = XEXP (dst, 0);
1136
1137 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1138 {
1139 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1140 return 0;
1141 src = SUBREG_REG (src);
1142 dst = SUBREG_REG (dst);
1143 }
1144
1145 return (REG_P (src) && REG_P (dst)
1146 && REGNO (src) == REGNO (dst));
1147 }
1148
1149 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1150 value to itself. */
1151
1152 int
1153 noop_move_p (const_rtx insn)
1154 {
1155 rtx pat = PATTERN (insn);
1156
1157 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1158 return 1;
1159
1160 /* Insns carrying these notes are useful later on. */
1161 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1162 return 0;
1163
1164 if (GET_CODE (pat) == SET && set_noop_p (pat))
1165 return 1;
1166
1167 if (GET_CODE (pat) == PARALLEL)
1168 {
1169 int i;
1170 /* If nothing but SETs of registers to themselves,
1171 this insn can also be deleted. */
1172 for (i = 0; i < XVECLEN (pat, 0); i++)
1173 {
1174 rtx tem = XVECEXP (pat, 0, i);
1175
1176 if (GET_CODE (tem) == USE
1177 || GET_CODE (tem) == CLOBBER)
1178 continue;
1179
1180 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1181 return 0;
1182 }
1183
1184 return 1;
1185 }
1186 return 0;
1187 }
1188
1189
1190 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1191 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1192 If the object was modified, if we hit a partial assignment to X, or hit a
1193 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1194 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1195 be the src. */
1196
1197 rtx
1198 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1199 {
1200 rtx p;
1201
1202 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1203 p = PREV_INSN (p))
1204 if (INSN_P (p))
1205 {
1206 rtx set = single_set (p);
1207 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1208
1209 if (set && rtx_equal_p (x, SET_DEST (set)))
1210 {
1211 rtx src = SET_SRC (set);
1212
1213 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1214 src = XEXP (note, 0);
1215
1216 if ((valid_to == NULL_RTX
1217 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1218 /* Reject hard registers because we don't usually want
1219 to use them; we'd rather use a pseudo. */
1220 && (! (REG_P (src)
1221 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1222 {
1223 *pinsn = p;
1224 return src;
1225 }
1226 }
1227
1228 /* If set in non-simple way, we don't have a value. */
1229 if (reg_set_p (x, p))
1230 break;
1231 }
1232
1233 return x;
1234 }
1235
1236 /* Return nonzero if register in range [REGNO, ENDREGNO)
1237 appears either explicitly or implicitly in X
1238 other than being stored into.
1239
1240 References contained within the substructure at LOC do not count.
1241 LOC may be zero, meaning don't ignore anything. */
1242
1243 int
1244 refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
1245 rtx *loc)
1246 {
1247 int i;
1248 unsigned int x_regno;
1249 RTX_CODE code;
1250 const char *fmt;
1251
1252 repeat:
1253 /* The contents of a REG_NONNEG note is always zero, so we must come here
1254 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1255 if (x == 0)
1256 return 0;
1257
1258 code = GET_CODE (x);
1259
1260 switch (code)
1261 {
1262 case REG:
1263 x_regno = REGNO (x);
1264
1265 /* If we modifying the stack, frame, or argument pointer, it will
1266 clobber a virtual register. In fact, we could be more precise,
1267 but it isn't worth it. */
1268 if ((x_regno == STACK_POINTER_REGNUM
1269 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1270 || x_regno == ARG_POINTER_REGNUM
1271 #endif
1272 || x_regno == FRAME_POINTER_REGNUM)
1273 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1274 return 1;
1275
1276 return endregno > x_regno && regno < END_REGNO (x);
1277
1278 case SUBREG:
1279 /* If this is a SUBREG of a hard reg, we can see exactly which
1280 registers are being modified. Otherwise, handle normally. */
1281 if (REG_P (SUBREG_REG (x))
1282 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1283 {
1284 unsigned int inner_regno = subreg_regno (x);
1285 unsigned int inner_endregno
1286 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1287 ? subreg_nregs (x) : 1);
1288
1289 return endregno > inner_regno && regno < inner_endregno;
1290 }
1291 break;
1292
1293 case CLOBBER:
1294 case SET:
1295 if (&SET_DEST (x) != loc
1296 /* Note setting a SUBREG counts as referring to the REG it is in for
1297 a pseudo but not for hard registers since we can
1298 treat each word individually. */
1299 && ((GET_CODE (SET_DEST (x)) == SUBREG
1300 && loc != &SUBREG_REG (SET_DEST (x))
1301 && REG_P (SUBREG_REG (SET_DEST (x)))
1302 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1303 && refers_to_regno_p (regno, endregno,
1304 SUBREG_REG (SET_DEST (x)), loc))
1305 || (!REG_P (SET_DEST (x))
1306 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1307 return 1;
1308
1309 if (code == CLOBBER || loc == &SET_SRC (x))
1310 return 0;
1311 x = SET_SRC (x);
1312 goto repeat;
1313
1314 default:
1315 break;
1316 }
1317
1318 /* X does not match, so try its subexpressions. */
1319
1320 fmt = GET_RTX_FORMAT (code);
1321 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1322 {
1323 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1324 {
1325 if (i == 0)
1326 {
1327 x = XEXP (x, 0);
1328 goto repeat;
1329 }
1330 else
1331 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1332 return 1;
1333 }
1334 else if (fmt[i] == 'E')
1335 {
1336 int j;
1337 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1338 if (loc != &XVECEXP (x, i, j)
1339 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1340 return 1;
1341 }
1342 }
1343 return 0;
1344 }
1345
1346 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1347 we check if any register number in X conflicts with the relevant register
1348 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1349 contains a MEM (we don't bother checking for memory addresses that can't
1350 conflict because we expect this to be a rare case. */
1351
1352 int
1353 reg_overlap_mentioned_p (const_rtx x, const_rtx in)
1354 {
1355 unsigned int regno, endregno;
1356
1357 /* If either argument is a constant, then modifying X can not
1358 affect IN. Here we look at IN, we can profitably combine
1359 CONSTANT_P (x) with the switch statement below. */
1360 if (CONSTANT_P (in))
1361 return 0;
1362
1363 recurse:
1364 switch (GET_CODE (x))
1365 {
1366 case STRICT_LOW_PART:
1367 case ZERO_EXTRACT:
1368 case SIGN_EXTRACT:
1369 /* Overly conservative. */
1370 x = XEXP (x, 0);
1371 goto recurse;
1372
1373 case SUBREG:
1374 regno = REGNO (SUBREG_REG (x));
1375 if (regno < FIRST_PSEUDO_REGISTER)
1376 regno = subreg_regno (x);
1377 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1378 ? subreg_nregs (x) : 1);
1379 goto do_reg;
1380
1381 case REG:
1382 regno = REGNO (x);
1383 endregno = END_REGNO (x);
1384 do_reg:
1385 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1386
1387 case MEM:
1388 {
1389 const char *fmt;
1390 int i;
1391
1392 if (MEM_P (in))
1393 return 1;
1394
1395 fmt = GET_RTX_FORMAT (GET_CODE (in));
1396 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1397 if (fmt[i] == 'e')
1398 {
1399 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1400 return 1;
1401 }
1402 else if (fmt[i] == 'E')
1403 {
1404 int j;
1405 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1406 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1407 return 1;
1408 }
1409
1410 return 0;
1411 }
1412
1413 case SCRATCH:
1414 case PC:
1415 case CC0:
1416 return reg_mentioned_p (x, in);
1417
1418 case PARALLEL:
1419 {
1420 int i;
1421
1422 /* If any register in here refers to it we return true. */
1423 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1424 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1425 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1426 return 1;
1427 return 0;
1428 }
1429
1430 default:
1431 gcc_assert (CONSTANT_P (x));
1432 return 0;
1433 }
1434 }
1435
1436 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1437 (X would be the pattern of an insn). DATA is an arbitrary pointer,
1438 ignored by note_stores, but passed to FUN.
1439
1440 FUN receives three arguments:
1441 1. the REG, MEM, CC0 or PC being stored in or clobbered,
1442 2. the SET or CLOBBER rtx that does the store,
1443 3. the pointer DATA provided to note_stores.
1444
1445 If the item being stored in or clobbered is a SUBREG of a hard register,
1446 the SUBREG will be passed. */
1447
1448 void
1449 note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data)
1450 {
1451 int i;
1452
1453 if (GET_CODE (x) == COND_EXEC)
1454 x = COND_EXEC_CODE (x);
1455
1456 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1457 {
1458 rtx dest = SET_DEST (x);
1459
1460 while ((GET_CODE (dest) == SUBREG
1461 && (!REG_P (SUBREG_REG (dest))
1462 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1463 || GET_CODE (dest) == ZERO_EXTRACT
1464 || GET_CODE (dest) == STRICT_LOW_PART)
1465 dest = XEXP (dest, 0);
1466
1467 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1468 each of whose first operand is a register. */
1469 if (GET_CODE (dest) == PARALLEL)
1470 {
1471 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1472 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1473 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1474 }
1475 else
1476 (*fun) (dest, x, data);
1477 }
1478
1479 else if (GET_CODE (x) == PARALLEL)
1480 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1481 note_stores (XVECEXP (x, 0, i), fun, data);
1482 }
1483
1484 /* Like notes_stores, but call FUN for each expression that is being
1485 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1486 FUN for each expression, not any interior subexpressions. FUN receives a
1487 pointer to the expression and the DATA passed to this function.
1488
1489 Note that this is not quite the same test as that done in reg_referenced_p
1490 since that considers something as being referenced if it is being
1491 partially set, while we do not. */
1492
1493 void
1494 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1495 {
1496 rtx body = *pbody;
1497 int i;
1498
1499 switch (GET_CODE (body))
1500 {
1501 case COND_EXEC:
1502 (*fun) (&COND_EXEC_TEST (body), data);
1503 note_uses (&COND_EXEC_CODE (body), fun, data);
1504 return;
1505
1506 case PARALLEL:
1507 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1508 note_uses (&XVECEXP (body, 0, i), fun, data);
1509 return;
1510
1511 case SEQUENCE:
1512 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1513 note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1514 return;
1515
1516 case USE:
1517 (*fun) (&XEXP (body, 0), data);
1518 return;
1519
1520 case ASM_OPERANDS:
1521 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1522 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1523 return;
1524
1525 case TRAP_IF:
1526 (*fun) (&TRAP_CONDITION (body), data);
1527 return;
1528
1529 case PREFETCH:
1530 (*fun) (&XEXP (body, 0), data);
1531 return;
1532
1533 case UNSPEC:
1534 case UNSPEC_VOLATILE:
1535 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1536 (*fun) (&XVECEXP (body, 0, i), data);
1537 return;
1538
1539 case CLOBBER:
1540 if (MEM_P (XEXP (body, 0)))
1541 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1542 return;
1543
1544 case SET:
1545 {
1546 rtx dest = SET_DEST (body);
1547
1548 /* For sets we replace everything in source plus registers in memory
1549 expression in store and operands of a ZERO_EXTRACT. */
1550 (*fun) (&SET_SRC (body), data);
1551
1552 if (GET_CODE (dest) == ZERO_EXTRACT)
1553 {
1554 (*fun) (&XEXP (dest, 1), data);
1555 (*fun) (&XEXP (dest, 2), data);
1556 }
1557
1558 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1559 dest = XEXP (dest, 0);
1560
1561 if (MEM_P (dest))
1562 (*fun) (&XEXP (dest, 0), data);
1563 }
1564 return;
1565
1566 default:
1567 /* All the other possibilities never store. */
1568 (*fun) (pbody, data);
1569 return;
1570 }
1571 }
1572
1573 /* Return nonzero if X's old contents don't survive after INSN.
1574 This will be true if X is (cc0) or if X is a register and
1575 X dies in INSN or because INSN entirely sets X.
1576
1577 "Entirely set" means set directly and not through a SUBREG, or
1578 ZERO_EXTRACT, so no trace of the old contents remains.
1579 Likewise, REG_INC does not count.
1580
1581 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1582 but for this use that makes no difference, since regs don't overlap
1583 during their lifetimes. Therefore, this function may be used
1584 at any time after deaths have been computed.
1585
1586 If REG is a hard reg that occupies multiple machine registers, this
1587 function will only return 1 if each of those registers will be replaced
1588 by INSN. */
1589
1590 int
1591 dead_or_set_p (const_rtx insn, const_rtx x)
1592 {
1593 unsigned int regno, end_regno;
1594 unsigned int i;
1595
1596 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1597 if (GET_CODE (x) == CC0)
1598 return 1;
1599
1600 gcc_assert (REG_P (x));
1601
1602 regno = REGNO (x);
1603 end_regno = END_REGNO (x);
1604 for (i = regno; i < end_regno; i++)
1605 if (! dead_or_set_regno_p (insn, i))
1606 return 0;
1607
1608 return 1;
1609 }
1610
1611 /* Return TRUE iff DEST is a register or subreg of a register and
1612 doesn't change the number of words of the inner register, and any
1613 part of the register is TEST_REGNO. */
1614
1615 static bool
1616 covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
1617 {
1618 unsigned int regno, endregno;
1619
1620 if (GET_CODE (dest) == SUBREG
1621 && (((GET_MODE_SIZE (GET_MODE (dest))
1622 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1623 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1624 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1625 dest = SUBREG_REG (dest);
1626
1627 if (!REG_P (dest))
1628 return false;
1629
1630 regno = REGNO (dest);
1631 endregno = END_REGNO (dest);
1632 return (test_regno >= regno && test_regno < endregno);
1633 }
1634
1635 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1636 any member matches the covers_regno_no_parallel_p criteria. */
1637
1638 static bool
1639 covers_regno_p (const_rtx dest, unsigned int test_regno)
1640 {
1641 if (GET_CODE (dest) == PARALLEL)
1642 {
1643 /* Some targets place small structures in registers for return
1644 values of functions, and those registers are wrapped in
1645 PARALLELs that we may see as the destination of a SET. */
1646 int i;
1647
1648 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1649 {
1650 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1651 if (inner != NULL_RTX
1652 && covers_regno_no_parallel_p (inner, test_regno))
1653 return true;
1654 }
1655
1656 return false;
1657 }
1658 else
1659 return covers_regno_no_parallel_p (dest, test_regno);
1660 }
1661
1662 /* Utility function for dead_or_set_p to check an individual register. */
1663
1664 int
1665 dead_or_set_regno_p (const_rtx insn, unsigned int test_regno)
1666 {
1667 const_rtx pattern;
1668
1669 /* See if there is a death note for something that includes TEST_REGNO. */
1670 if (find_regno_note (insn, REG_DEAD, test_regno))
1671 return 1;
1672
1673 if (CALL_P (insn)
1674 && find_regno_fusage (insn, CLOBBER, test_regno))
1675 return 1;
1676
1677 pattern = PATTERN (insn);
1678
1679 if (GET_CODE (pattern) == COND_EXEC)
1680 pattern = COND_EXEC_CODE (pattern);
1681
1682 if (GET_CODE (pattern) == SET)
1683 return covers_regno_p (SET_DEST (pattern), test_regno);
1684 else if (GET_CODE (pattern) == PARALLEL)
1685 {
1686 int i;
1687
1688 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1689 {
1690 rtx body = XVECEXP (pattern, 0, i);
1691
1692 if (GET_CODE (body) == COND_EXEC)
1693 body = COND_EXEC_CODE (body);
1694
1695 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1696 && covers_regno_p (SET_DEST (body), test_regno))
1697 return 1;
1698 }
1699 }
1700
1701 return 0;
1702 }
1703
1704 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1705 If DATUM is nonzero, look for one whose datum is DATUM. */
1706
1707 rtx
1708 find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
1709 {
1710 rtx link;
1711
1712 gcc_assert (insn);
1713
1714 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1715 if (! INSN_P (insn))
1716 return 0;
1717 if (datum == 0)
1718 {
1719 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1720 if (REG_NOTE_KIND (link) == kind)
1721 return link;
1722 return 0;
1723 }
1724
1725 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1726 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1727 return link;
1728 return 0;
1729 }
1730
1731 /* Return the reg-note of kind KIND in insn INSN which applies to register
1732 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1733 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1734 it might be the case that the note overlaps REGNO. */
1735
1736 rtx
1737 find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
1738 {
1739 rtx link;
1740
1741 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1742 if (! INSN_P (insn))
1743 return 0;
1744
1745 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1746 if (REG_NOTE_KIND (link) == kind
1747 /* Verify that it is a register, so that scratch and MEM won't cause a
1748 problem here. */
1749 && REG_P (XEXP (link, 0))
1750 && REGNO (XEXP (link, 0)) <= regno
1751 && END_REGNO (XEXP (link, 0)) > regno)
1752 return link;
1753 return 0;
1754 }
1755
1756 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1757 has such a note. */
1758
1759 rtx
1760 find_reg_equal_equiv_note (const_rtx insn)
1761 {
1762 rtx link;
1763
1764 if (!INSN_P (insn))
1765 return 0;
1766
1767 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1768 if (REG_NOTE_KIND (link) == REG_EQUAL
1769 || REG_NOTE_KIND (link) == REG_EQUIV)
1770 {
1771 /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
1772 insns that have multiple sets. Checking single_set to
1773 make sure of this is not the proper check, as explained
1774 in the comment in set_unique_reg_note.
1775
1776 This should be changed into an assert. */
1777 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
1778 return 0;
1779 return link;
1780 }
1781 return NULL;
1782 }
1783
1784 /* Check whether INSN is a single_set whose source is known to be
1785 equivalent to a constant. Return that constant if so, otherwise
1786 return null. */
1787
1788 rtx
1789 find_constant_src (const_rtx insn)
1790 {
1791 rtx note, set, x;
1792
1793 set = single_set (insn);
1794 if (set)
1795 {
1796 x = avoid_constant_pool_reference (SET_SRC (set));
1797 if (CONSTANT_P (x))
1798 return x;
1799 }
1800
1801 note = find_reg_equal_equiv_note (insn);
1802 if (note && CONSTANT_P (XEXP (note, 0)))
1803 return XEXP (note, 0);
1804
1805 return NULL_RTX;
1806 }
1807
1808 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1809 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1810
1811 int
1812 find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
1813 {
1814 /* If it's not a CALL_INSN, it can't possibly have a
1815 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1816 if (!CALL_P (insn))
1817 return 0;
1818
1819 gcc_assert (datum);
1820
1821 if (!REG_P (datum))
1822 {
1823 rtx link;
1824
1825 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1826 link;
1827 link = XEXP (link, 1))
1828 if (GET_CODE (XEXP (link, 0)) == code
1829 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1830 return 1;
1831 }
1832 else
1833 {
1834 unsigned int regno = REGNO (datum);
1835
1836 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1837 to pseudo registers, so don't bother checking. */
1838
1839 if (regno < FIRST_PSEUDO_REGISTER)
1840 {
1841 unsigned int end_regno = END_HARD_REGNO (datum);
1842 unsigned int i;
1843
1844 for (i = regno; i < end_regno; i++)
1845 if (find_regno_fusage (insn, code, i))
1846 return 1;
1847 }
1848 }
1849
1850 return 0;
1851 }
1852
1853 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1854 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1855
1856 int
1857 find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
1858 {
1859 rtx link;
1860
1861 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1862 to pseudo registers, so don't bother checking. */
1863
1864 if (regno >= FIRST_PSEUDO_REGISTER
1865 || !CALL_P (insn) )
1866 return 0;
1867
1868 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1869 {
1870 rtx op, reg;
1871
1872 if (GET_CODE (op = XEXP (link, 0)) == code
1873 && REG_P (reg = XEXP (op, 0))
1874 && REGNO (reg) <= regno
1875 && END_HARD_REGNO (reg) > regno)
1876 return 1;
1877 }
1878
1879 return 0;
1880 }
1881
1882
1883 /* Add register note with kind KIND and datum DATUM to INSN. */
1884
1885 void
1886 add_reg_note (rtx insn, enum reg_note kind, rtx datum)
1887 {
1888 rtx note;
1889
1890 switch (kind)
1891 {
1892 case REG_CC_SETTER:
1893 case REG_CC_USER:
1894 case REG_LABEL_TARGET:
1895 case REG_LABEL_OPERAND:
1896 /* These types of register notes use an INSN_LIST rather than an
1897 EXPR_LIST, so that copying is done right and dumps look
1898 better. */
1899 note = alloc_INSN_LIST (datum, REG_NOTES (insn));
1900 PUT_REG_NOTE_KIND (note, kind);
1901 break;
1902
1903 default:
1904 note = alloc_EXPR_LIST (kind, datum, REG_NOTES (insn));
1905 break;
1906 }
1907
1908 REG_NOTES (insn) = note;
1909 }
1910
1911 /* Remove register note NOTE from the REG_NOTES of INSN. */
1912
1913 void
1914 remove_note (rtx insn, const_rtx note)
1915 {
1916 rtx link;
1917
1918 if (note == NULL_RTX)
1919 return;
1920
1921 if (REG_NOTES (insn) == note)
1922 REG_NOTES (insn) = XEXP (note, 1);
1923 else
1924 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1925 if (XEXP (link, 1) == note)
1926 {
1927 XEXP (link, 1) = XEXP (note, 1);
1928 break;
1929 }
1930
1931 switch (REG_NOTE_KIND (note))
1932 {
1933 case REG_EQUAL:
1934 case REG_EQUIV:
1935 df_notes_rescan (insn);
1936 break;
1937 default:
1938 break;
1939 }
1940 }
1941
1942 /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes. */
1943
1944 void
1945 remove_reg_equal_equiv_notes (rtx insn)
1946 {
1947 rtx *loc;
1948
1949 loc = &REG_NOTES (insn);
1950 while (*loc)
1951 {
1952 enum reg_note kind = REG_NOTE_KIND (*loc);
1953 if (kind == REG_EQUAL || kind == REG_EQUIV)
1954 *loc = XEXP (*loc, 1);
1955 else
1956 loc = &XEXP (*loc, 1);
1957 }
1958 }
1959
1960 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1961 return 1 if it is found. A simple equality test is used to determine if
1962 NODE matches. */
1963
1964 int
1965 in_expr_list_p (const_rtx listp, const_rtx node)
1966 {
1967 const_rtx x;
1968
1969 for (x = listp; x; x = XEXP (x, 1))
1970 if (node == XEXP (x, 0))
1971 return 1;
1972
1973 return 0;
1974 }
1975
1976 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1977 remove that entry from the list if it is found.
1978
1979 A simple equality test is used to determine if NODE matches. */
1980
1981 void
1982 remove_node_from_expr_list (const_rtx node, rtx *listp)
1983 {
1984 rtx temp = *listp;
1985 rtx prev = NULL_RTX;
1986
1987 while (temp)
1988 {
1989 if (node == XEXP (temp, 0))
1990 {
1991 /* Splice the node out of the list. */
1992 if (prev)
1993 XEXP (prev, 1) = XEXP (temp, 1);
1994 else
1995 *listp = XEXP (temp, 1);
1996
1997 return;
1998 }
1999
2000 prev = temp;
2001 temp = XEXP (temp, 1);
2002 }
2003 }
2004
2005 /* Nonzero if X contains any volatile instructions. These are instructions
2006 which may cause unpredictable machine state instructions, and thus no
2007 instructions should be moved or combined across them. This includes
2008 only volatile asms and UNSPEC_VOLATILE instructions. */
2009
2010 int
2011 volatile_insn_p (const_rtx x)
2012 {
2013 const RTX_CODE code = GET_CODE (x);
2014 switch (code)
2015 {
2016 case LABEL_REF:
2017 case SYMBOL_REF:
2018 case CONST_INT:
2019 case CONST:
2020 case CONST_DOUBLE:
2021 case CONST_FIXED:
2022 case CONST_VECTOR:
2023 case CC0:
2024 case PC:
2025 case REG:
2026 case SCRATCH:
2027 case CLOBBER:
2028 case ADDR_VEC:
2029 case ADDR_DIFF_VEC:
2030 case CALL:
2031 case MEM:
2032 return 0;
2033
2034 case UNSPEC_VOLATILE:
2035 /* case TRAP_IF: This isn't clear yet. */
2036 return 1;
2037
2038 case ASM_INPUT:
2039 case ASM_OPERANDS:
2040 if (MEM_VOLATILE_P (x))
2041 return 1;
2042
2043 default:
2044 break;
2045 }
2046
2047 /* Recursively scan the operands of this expression. */
2048
2049 {
2050 const char *const fmt = GET_RTX_FORMAT (code);
2051 int i;
2052
2053 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2054 {
2055 if (fmt[i] == 'e')
2056 {
2057 if (volatile_insn_p (XEXP (x, i)))
2058 return 1;
2059 }
2060 else if (fmt[i] == 'E')
2061 {
2062 int j;
2063 for (j = 0; j < XVECLEN (x, i); j++)
2064 if (volatile_insn_p (XVECEXP (x, i, j)))
2065 return 1;
2066 }
2067 }
2068 }
2069 return 0;
2070 }
2071
2072 /* Nonzero if X contains any volatile memory references
2073 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2074
2075 int
2076 volatile_refs_p (const_rtx x)
2077 {
2078 const RTX_CODE code = GET_CODE (x);
2079 switch (code)
2080 {
2081 case LABEL_REF:
2082 case SYMBOL_REF:
2083 case CONST_INT:
2084 case CONST:
2085 case CONST_DOUBLE:
2086 case CONST_FIXED:
2087 case CONST_VECTOR:
2088 case CC0:
2089 case PC:
2090 case REG:
2091 case SCRATCH:
2092 case CLOBBER:
2093 case ADDR_VEC:
2094 case ADDR_DIFF_VEC:
2095 return 0;
2096
2097 case UNSPEC_VOLATILE:
2098 return 1;
2099
2100 case MEM:
2101 case ASM_INPUT:
2102 case ASM_OPERANDS:
2103 if (MEM_VOLATILE_P (x))
2104 return 1;
2105
2106 default:
2107 break;
2108 }
2109
2110 /* Recursively scan the operands of this expression. */
2111
2112 {
2113 const char *const fmt = GET_RTX_FORMAT (code);
2114 int i;
2115
2116 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2117 {
2118 if (fmt[i] == 'e')
2119 {
2120 if (volatile_refs_p (XEXP (x, i)))
2121 return 1;
2122 }
2123 else if (fmt[i] == 'E')
2124 {
2125 int j;
2126 for (j = 0; j < XVECLEN (x, i); j++)
2127 if (volatile_refs_p (XVECEXP (x, i, j)))
2128 return 1;
2129 }
2130 }
2131 }
2132 return 0;
2133 }
2134
2135 /* Similar to above, except that it also rejects register pre- and post-
2136 incrementing. */
2137
2138 int
2139 side_effects_p (const_rtx x)
2140 {
2141 const RTX_CODE code = GET_CODE (x);
2142 switch (code)
2143 {
2144 case LABEL_REF:
2145 case SYMBOL_REF:
2146 case CONST_INT:
2147 case CONST:
2148 case CONST_DOUBLE:
2149 case CONST_FIXED:
2150 case CONST_VECTOR:
2151 case CC0:
2152 case PC:
2153 case REG:
2154 case SCRATCH:
2155 case ADDR_VEC:
2156 case ADDR_DIFF_VEC:
2157 return 0;
2158
2159 case CLOBBER:
2160 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2161 when some combination can't be done. If we see one, don't think
2162 that we can simplify the expression. */
2163 return (GET_MODE (x) != VOIDmode);
2164
2165 case PRE_INC:
2166 case PRE_DEC:
2167 case POST_INC:
2168 case POST_DEC:
2169 case PRE_MODIFY:
2170 case POST_MODIFY:
2171 case CALL:
2172 case UNSPEC_VOLATILE:
2173 /* case TRAP_IF: This isn't clear yet. */
2174 return 1;
2175
2176 case MEM:
2177 case ASM_INPUT:
2178 case ASM_OPERANDS:
2179 if (MEM_VOLATILE_P (x))
2180 return 1;
2181
2182 default:
2183 break;
2184 }
2185
2186 /* Recursively scan the operands of this expression. */
2187
2188 {
2189 const char *fmt = GET_RTX_FORMAT (code);
2190 int i;
2191
2192 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2193 {
2194 if (fmt[i] == 'e')
2195 {
2196 if (side_effects_p (XEXP (x, i)))
2197 return 1;
2198 }
2199 else if (fmt[i] == 'E')
2200 {
2201 int j;
2202 for (j = 0; j < XVECLEN (x, i); j++)
2203 if (side_effects_p (XVECEXP (x, i, j)))
2204 return 1;
2205 }
2206 }
2207 }
2208 return 0;
2209 }
2210
2211 /* Return nonzero if evaluating rtx X might cause a trap.
2212 FLAGS controls how to consider MEMs. A nonzero means the context
2213 of the access may have changed from the original, such that the
2214 address may have become invalid. */
2215
2216 int
2217 may_trap_p_1 (const_rtx x, unsigned flags)
2218 {
2219 int i;
2220 enum rtx_code code;
2221 const char *fmt;
2222
2223 /* We make no distinction currently, but this function is part of
2224 the internal target-hooks ABI so we keep the parameter as
2225 "unsigned flags". */
2226 bool code_changed = flags != 0;
2227
2228 if (x == 0)
2229 return 0;
2230 code = GET_CODE (x);
2231 switch (code)
2232 {
2233 /* Handle these cases quickly. */
2234 case CONST_INT:
2235 case CONST_DOUBLE:
2236 case CONST_FIXED:
2237 case CONST_VECTOR:
2238 case SYMBOL_REF:
2239 case LABEL_REF:
2240 case CONST:
2241 case PC:
2242 case CC0:
2243 case REG:
2244 case SCRATCH:
2245 return 0;
2246
2247 case UNSPEC:
2248 case UNSPEC_VOLATILE:
2249 return targetm.unspec_may_trap_p (x, flags);
2250
2251 case ASM_INPUT:
2252 case TRAP_IF:
2253 return 1;
2254
2255 case ASM_OPERANDS:
2256 return MEM_VOLATILE_P (x);
2257
2258 /* Memory ref can trap unless it's a static var or a stack slot. */
2259 case MEM:
2260 if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2261 reference; moving it out of context such as when moving code
2262 when optimizing, might cause its address to become invalid. */
2263 code_changed
2264 || !MEM_NOTRAP_P (x))
2265 {
2266 HOST_WIDE_INT size = MEM_SIZE (x) ? INTVAL (MEM_SIZE (x)) : 0;
2267 return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size,
2268 GET_MODE (x), code_changed);
2269 }
2270
2271 return 0;
2272
2273 /* Division by a non-constant might trap. */
2274 case DIV:
2275 case MOD:
2276 case UDIV:
2277 case UMOD:
2278 if (HONOR_SNANS (GET_MODE (x)))
2279 return 1;
2280 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
2281 return flag_trapping_math;
2282 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2283 return 1;
2284 break;
2285
2286 case EXPR_LIST:
2287 /* An EXPR_LIST is used to represent a function call. This
2288 certainly may trap. */
2289 return 1;
2290
2291 case GE:
2292 case GT:
2293 case LE:
2294 case LT:
2295 case LTGT:
2296 case COMPARE:
2297 /* Some floating point comparisons may trap. */
2298 if (!flag_trapping_math)
2299 break;
2300 /* ??? There is no machine independent way to check for tests that trap
2301 when COMPARE is used, though many targets do make this distinction.
2302 For instance, sparc uses CCFPE for compares which generate exceptions
2303 and CCFP for compares which do not generate exceptions. */
2304 if (HONOR_NANS (GET_MODE (x)))
2305 return 1;
2306 /* But often the compare has some CC mode, so check operand
2307 modes as well. */
2308 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2309 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2310 return 1;
2311 break;
2312
2313 case EQ:
2314 case NE:
2315 if (HONOR_SNANS (GET_MODE (x)))
2316 return 1;
2317 /* Often comparison is CC mode, so check operand modes. */
2318 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2319 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2320 return 1;
2321 break;
2322
2323 case FIX:
2324 /* Conversion of floating point might trap. */
2325 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2326 return 1;
2327 break;
2328
2329 case NEG:
2330 case ABS:
2331 case SUBREG:
2332 /* These operations don't trap even with floating point. */
2333 break;
2334
2335 default:
2336 /* Any floating arithmetic may trap. */
2337 if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
2338 && flag_trapping_math)
2339 return 1;
2340 }
2341
2342 fmt = GET_RTX_FORMAT (code);
2343 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2344 {
2345 if (fmt[i] == 'e')
2346 {
2347 if (may_trap_p_1 (XEXP (x, i), flags))
2348 return 1;
2349 }
2350 else if (fmt[i] == 'E')
2351 {
2352 int j;
2353 for (j = 0; j < XVECLEN (x, i); j++)
2354 if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2355 return 1;
2356 }
2357 }
2358 return 0;
2359 }
2360
2361 /* Return nonzero if evaluating rtx X might cause a trap. */
2362
2363 int
2364 may_trap_p (const_rtx x)
2365 {
2366 return may_trap_p_1 (x, 0);
2367 }
2368
2369 /* Same as above, but additionally return nonzero if evaluating rtx X might
2370 cause a fault. We define a fault for the purpose of this function as a
2371 erroneous execution condition that cannot be encountered during the normal
2372 execution of a valid program; the typical example is an unaligned memory
2373 access on a strict alignment machine. The compiler guarantees that it
2374 doesn't generate code that will fault from a valid program, but this
2375 guarantee doesn't mean anything for individual instructions. Consider
2376 the following example:
2377
2378 struct S { int d; union { char *cp; int *ip; }; };
2379
2380 int foo(struct S *s)
2381 {
2382 if (s->d == 1)
2383 return *s->ip;
2384 else
2385 return *s->cp;
2386 }
2387
2388 on a strict alignment machine. In a valid program, foo will never be
2389 invoked on a structure for which d is equal to 1 and the underlying
2390 unique field of the union not aligned on a 4-byte boundary, but the
2391 expression *s->ip might cause a fault if considered individually.
2392
2393 At the RTL level, potentially problematic expressions will almost always
2394 verify may_trap_p; for example, the above dereference can be emitted as
2395 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2396 However, suppose that foo is inlined in a caller that causes s->cp to
2397 point to a local character variable and guarantees that s->d is not set
2398 to 1; foo may have been effectively translated into pseudo-RTL as:
2399
2400 if ((reg:SI) == 1)
2401 (set (reg:SI) (mem:SI (%fp - 7)))
2402 else
2403 (set (reg:QI) (mem:QI (%fp - 7)))
2404
2405 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2406 memory reference to a stack slot, but it will certainly cause a fault
2407 on a strict alignment machine. */
2408
2409 int
2410 may_trap_or_fault_p (const_rtx x)
2411 {
2412 return may_trap_p_1 (x, 1);
2413 }
2414
2415 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2416 i.e., an inequality. */
2417
2418 int
2419 inequality_comparisons_p (const_rtx x)
2420 {
2421 const char *fmt;
2422 int len, i;
2423 const enum rtx_code code = GET_CODE (x);
2424
2425 switch (code)
2426 {
2427 case REG:
2428 case SCRATCH:
2429 case PC:
2430 case CC0:
2431 case CONST_INT:
2432 case CONST_DOUBLE:
2433 case CONST_FIXED:
2434 case CONST_VECTOR:
2435 case CONST:
2436 case LABEL_REF:
2437 case SYMBOL_REF:
2438 return 0;
2439
2440 case LT:
2441 case LTU:
2442 case GT:
2443 case GTU:
2444 case LE:
2445 case LEU:
2446 case GE:
2447 case GEU:
2448 return 1;
2449
2450 default:
2451 break;
2452 }
2453
2454 len = GET_RTX_LENGTH (code);
2455 fmt = GET_RTX_FORMAT (code);
2456
2457 for (i = 0; i < len; i++)
2458 {
2459 if (fmt[i] == 'e')
2460 {
2461 if (inequality_comparisons_p (XEXP (x, i)))
2462 return 1;
2463 }
2464 else if (fmt[i] == 'E')
2465 {
2466 int j;
2467 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2468 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2469 return 1;
2470 }
2471 }
2472
2473 return 0;
2474 }
2475
2476 /* Replace any occurrence of FROM in X with TO. The function does
2477 not enter into CONST_DOUBLE for the replace.
2478
2479 Note that copying is not done so X must not be shared unless all copies
2480 are to be modified. */
2481
2482 rtx
2483 replace_rtx (rtx x, rtx from, rtx to)
2484 {
2485 int i, j;
2486 const char *fmt;
2487
2488 /* The following prevents loops occurrence when we change MEM in
2489 CONST_DOUBLE onto the same CONST_DOUBLE. */
2490 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2491 return x;
2492
2493 if (x == from)
2494 return to;
2495
2496 /* Allow this function to make replacements in EXPR_LISTs. */
2497 if (x == 0)
2498 return 0;
2499
2500 if (GET_CODE (x) == SUBREG)
2501 {
2502 rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to);
2503
2504 if (GET_CODE (new_rtx) == CONST_INT)
2505 {
2506 x = simplify_subreg (GET_MODE (x), new_rtx,
2507 GET_MODE (SUBREG_REG (x)),
2508 SUBREG_BYTE (x));
2509 gcc_assert (x);
2510 }
2511 else
2512 SUBREG_REG (x) = new_rtx;
2513
2514 return x;
2515 }
2516 else if (GET_CODE (x) == ZERO_EXTEND)
2517 {
2518 rtx new_rtx = replace_rtx (XEXP (x, 0), from, to);
2519
2520 if (GET_CODE (new_rtx) == CONST_INT)
2521 {
2522 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2523 new_rtx, GET_MODE (XEXP (x, 0)));
2524 gcc_assert (x);
2525 }
2526 else
2527 XEXP (x, 0) = new_rtx;
2528
2529 return x;
2530 }
2531
2532 fmt = GET_RTX_FORMAT (GET_CODE (x));
2533 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2534 {
2535 if (fmt[i] == 'e')
2536 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2537 else if (fmt[i] == 'E')
2538 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2539 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2540 }
2541
2542 return x;
2543 }
2544
2545 /* Replace occurrences of the old label in *X with the new one.
2546 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2547
2548 int
2549 replace_label (rtx *x, void *data)
2550 {
2551 rtx l = *x;
2552 rtx old_label = ((replace_label_data *) data)->r1;
2553 rtx new_label = ((replace_label_data *) data)->r2;
2554 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2555
2556 if (l == NULL_RTX)
2557 return 0;
2558
2559 if (GET_CODE (l) == SYMBOL_REF
2560 && CONSTANT_POOL_ADDRESS_P (l))
2561 {
2562 rtx c = get_pool_constant (l);
2563 if (rtx_referenced_p (old_label, c))
2564 {
2565 rtx new_c, new_l;
2566 replace_label_data *d = (replace_label_data *) data;
2567
2568 /* Create a copy of constant C; replace the label inside
2569 but do not update LABEL_NUSES because uses in constant pool
2570 are not counted. */
2571 new_c = copy_rtx (c);
2572 d->update_label_nuses = false;
2573 for_each_rtx (&new_c, replace_label, data);
2574 d->update_label_nuses = update_label_nuses;
2575
2576 /* Add the new constant NEW_C to constant pool and replace
2577 the old reference to constant by new reference. */
2578 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2579 *x = replace_rtx (l, l, new_l);
2580 }
2581 return 0;
2582 }
2583
2584 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2585 field. This is not handled by for_each_rtx because it doesn't
2586 handle unprinted ('0') fields. */
2587 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2588 JUMP_LABEL (l) = new_label;
2589
2590 if ((GET_CODE (l) == LABEL_REF
2591 || GET_CODE (l) == INSN_LIST)
2592 && XEXP (l, 0) == old_label)
2593 {
2594 XEXP (l, 0) = new_label;
2595 if (update_label_nuses)
2596 {
2597 ++LABEL_NUSES (new_label);
2598 --LABEL_NUSES (old_label);
2599 }
2600 return 0;
2601 }
2602
2603 return 0;
2604 }
2605
2606 /* When *BODY is equal to X or X is directly referenced by *BODY
2607 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2608 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2609
2610 static int
2611 rtx_referenced_p_1 (rtx *body, void *x)
2612 {
2613 rtx y = (rtx) x;
2614
2615 if (*body == NULL_RTX)
2616 return y == NULL_RTX;
2617
2618 /* Return true if a label_ref *BODY refers to label Y. */
2619 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2620 return XEXP (*body, 0) == y;
2621
2622 /* If *BODY is a reference to pool constant traverse the constant. */
2623 if (GET_CODE (*body) == SYMBOL_REF
2624 && CONSTANT_POOL_ADDRESS_P (*body))
2625 return rtx_referenced_p (y, get_pool_constant (*body));
2626
2627 /* By default, compare the RTL expressions. */
2628 return rtx_equal_p (*body, y);
2629 }
2630
2631 /* Return true if X is referenced in BODY. */
2632
2633 int
2634 rtx_referenced_p (rtx x, rtx body)
2635 {
2636 return for_each_rtx (&body, rtx_referenced_p_1, x);
2637 }
2638
2639 /* If INSN is a tablejump return true and store the label (before jump table) to
2640 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2641
2642 bool
2643 tablejump_p (const_rtx insn, rtx *labelp, rtx *tablep)
2644 {
2645 rtx label, table;
2646
2647 if (JUMP_P (insn)
2648 && (label = JUMP_LABEL (insn)) != NULL_RTX
2649 && (table = next_active_insn (label)) != NULL_RTX
2650 && JUMP_P (table)
2651 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2652 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2653 {
2654 if (labelp)
2655 *labelp = label;
2656 if (tablep)
2657 *tablep = table;
2658 return true;
2659 }
2660 return false;
2661 }
2662
2663 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2664 constant that is not in the constant pool and not in the condition
2665 of an IF_THEN_ELSE. */
2666
2667 static int
2668 computed_jump_p_1 (const_rtx x)
2669 {
2670 const enum rtx_code code = GET_CODE (x);
2671 int i, j;
2672 const char *fmt;
2673
2674 switch (code)
2675 {
2676 case LABEL_REF:
2677 case PC:
2678 return 0;
2679
2680 case CONST:
2681 case CONST_INT:
2682 case CONST_DOUBLE:
2683 case CONST_FIXED:
2684 case CONST_VECTOR:
2685 case SYMBOL_REF:
2686 case REG:
2687 return 1;
2688
2689 case MEM:
2690 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2691 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2692
2693 case IF_THEN_ELSE:
2694 return (computed_jump_p_1 (XEXP (x, 1))
2695 || computed_jump_p_1 (XEXP (x, 2)));
2696
2697 default:
2698 break;
2699 }
2700
2701 fmt = GET_RTX_FORMAT (code);
2702 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2703 {
2704 if (fmt[i] == 'e'
2705 && computed_jump_p_1 (XEXP (x, i)))
2706 return 1;
2707
2708 else if (fmt[i] == 'E')
2709 for (j = 0; j < XVECLEN (x, i); j++)
2710 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2711 return 1;
2712 }
2713
2714 return 0;
2715 }
2716
2717 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2718
2719 Tablejumps and casesi insns are not considered indirect jumps;
2720 we can recognize them by a (use (label_ref)). */
2721
2722 int
2723 computed_jump_p (const_rtx insn)
2724 {
2725 int i;
2726 if (JUMP_P (insn))
2727 {
2728 rtx pat = PATTERN (insn);
2729
2730 /* If we have a JUMP_LABEL set, we're not a computed jump. */
2731 if (JUMP_LABEL (insn) != NULL)
2732 return 0;
2733
2734 if (GET_CODE (pat) == PARALLEL)
2735 {
2736 int len = XVECLEN (pat, 0);
2737 int has_use_labelref = 0;
2738
2739 for (i = len - 1; i >= 0; i--)
2740 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2741 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2742 == LABEL_REF))
2743 has_use_labelref = 1;
2744
2745 if (! has_use_labelref)
2746 for (i = len - 1; i >= 0; i--)
2747 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2748 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2749 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2750 return 1;
2751 }
2752 else if (GET_CODE (pat) == SET
2753 && SET_DEST (pat) == pc_rtx
2754 && computed_jump_p_1 (SET_SRC (pat)))
2755 return 1;
2756 }
2757 return 0;
2758 }
2759
2760 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2761 calls. Processes the subexpressions of EXP and passes them to F. */
2762 static int
2763 for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2764 {
2765 int result, i, j;
2766 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2767 rtx *x;
2768
2769 for (; format[n] != '\0'; n++)
2770 {
2771 switch (format[n])
2772 {
2773 case 'e':
2774 /* Call F on X. */
2775 x = &XEXP (exp, n);
2776 result = (*f) (x, data);
2777 if (result == -1)
2778 /* Do not traverse sub-expressions. */
2779 continue;
2780 else if (result != 0)
2781 /* Stop the traversal. */
2782 return result;
2783
2784 if (*x == NULL_RTX)
2785 /* There are no sub-expressions. */
2786 continue;
2787
2788 i = non_rtx_starting_operands[GET_CODE (*x)];
2789 if (i >= 0)
2790 {
2791 result = for_each_rtx_1 (*x, i, f, data);
2792 if (result != 0)
2793 return result;
2794 }
2795 break;
2796
2797 case 'V':
2798 case 'E':
2799 if (XVEC (exp, n) == 0)
2800 continue;
2801 for (j = 0; j < XVECLEN (exp, n); ++j)
2802 {
2803 /* Call F on X. */
2804 x = &XVECEXP (exp, n, j);
2805 result = (*f) (x, data);
2806 if (result == -1)
2807 /* Do not traverse sub-expressions. */
2808 continue;
2809 else if (result != 0)
2810 /* Stop the traversal. */
2811 return result;
2812
2813 if (*x == NULL_RTX)
2814 /* There are no sub-expressions. */
2815 continue;
2816
2817 i = non_rtx_starting_operands[GET_CODE (*x)];
2818 if (i >= 0)
2819 {
2820 result = for_each_rtx_1 (*x, i, f, data);
2821 if (result != 0)
2822 return result;
2823 }
2824 }
2825 break;
2826
2827 default:
2828 /* Nothing to do. */
2829 break;
2830 }
2831 }
2832
2833 return 0;
2834 }
2835
2836 /* Traverse X via depth-first search, calling F for each
2837 sub-expression (including X itself). F is also passed the DATA.
2838 If F returns -1, do not traverse sub-expressions, but continue
2839 traversing the rest of the tree. If F ever returns any other
2840 nonzero value, stop the traversal, and return the value returned
2841 by F. Otherwise, return 0. This function does not traverse inside
2842 tree structure that contains RTX_EXPRs, or into sub-expressions
2843 whose format code is `0' since it is not known whether or not those
2844 codes are actually RTL.
2845
2846 This routine is very general, and could (should?) be used to
2847 implement many of the other routines in this file. */
2848
2849 int
2850 for_each_rtx (rtx *x, rtx_function f, void *data)
2851 {
2852 int result;
2853 int i;
2854
2855 /* Call F on X. */
2856 result = (*f) (x, data);
2857 if (result == -1)
2858 /* Do not traverse sub-expressions. */
2859 return 0;
2860 else if (result != 0)
2861 /* Stop the traversal. */
2862 return result;
2863
2864 if (*x == NULL_RTX)
2865 /* There are no sub-expressions. */
2866 return 0;
2867
2868 i = non_rtx_starting_operands[GET_CODE (*x)];
2869 if (i < 0)
2870 return 0;
2871
2872 return for_each_rtx_1 (*x, i, f, data);
2873 }
2874
2875
2876 /* Searches X for any reference to REGNO, returning the rtx of the
2877 reference found if any. Otherwise, returns NULL_RTX. */
2878
2879 rtx
2880 regno_use_in (unsigned int regno, rtx x)
2881 {
2882 const char *fmt;
2883 int i, j;
2884 rtx tem;
2885
2886 if (REG_P (x) && REGNO (x) == regno)
2887 return x;
2888
2889 fmt = GET_RTX_FORMAT (GET_CODE (x));
2890 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2891 {
2892 if (fmt[i] == 'e')
2893 {
2894 if ((tem = regno_use_in (regno, XEXP (x, i))))
2895 return tem;
2896 }
2897 else if (fmt[i] == 'E')
2898 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2899 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2900 return tem;
2901 }
2902
2903 return NULL_RTX;
2904 }
2905
2906 /* Return a value indicating whether OP, an operand of a commutative
2907 operation, is preferred as the first or second operand. The higher
2908 the value, the stronger the preference for being the first operand.
2909 We use negative values to indicate a preference for the first operand
2910 and positive values for the second operand. */
2911
2912 int
2913 commutative_operand_precedence (rtx op)
2914 {
2915 enum rtx_code code = GET_CODE (op);
2916
2917 /* Constants always come the second operand. Prefer "nice" constants. */
2918 if (code == CONST_INT)
2919 return -8;
2920 if (code == CONST_DOUBLE)
2921 return -7;
2922 if (code == CONST_FIXED)
2923 return -7;
2924 op = avoid_constant_pool_reference (op);
2925 code = GET_CODE (op);
2926
2927 switch (GET_RTX_CLASS (code))
2928 {
2929 case RTX_CONST_OBJ:
2930 if (code == CONST_INT)
2931 return -6;
2932 if (code == CONST_DOUBLE)
2933 return -5;
2934 if (code == CONST_FIXED)
2935 return -5;
2936 return -4;
2937
2938 case RTX_EXTRA:
2939 /* SUBREGs of objects should come second. */
2940 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2941 return -3;
2942 return 0;
2943
2944 case RTX_OBJ:
2945 /* Complex expressions should be the first, so decrease priority
2946 of objects. Prefer pointer objects over non pointer objects. */
2947 if ((REG_P (op) && REG_POINTER (op))
2948 || (MEM_P (op) && MEM_POINTER (op)))
2949 return -1;
2950 return -2;
2951
2952 case RTX_COMM_ARITH:
2953 /* Prefer operands that are themselves commutative to be first.
2954 This helps to make things linear. In particular,
2955 (and (and (reg) (reg)) (not (reg))) is canonical. */
2956 return 4;
2957
2958 case RTX_BIN_ARITH:
2959 /* If only one operand is a binary expression, it will be the first
2960 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2961 is canonical, although it will usually be further simplified. */
2962 return 2;
2963
2964 case RTX_UNARY:
2965 /* Then prefer NEG and NOT. */
2966 if (code == NEG || code == NOT)
2967 return 1;
2968
2969 default:
2970 return 0;
2971 }
2972 }
2973
2974 /* Return 1 iff it is necessary to swap operands of commutative operation
2975 in order to canonicalize expression. */
2976
2977 bool
2978 swap_commutative_operands_p (rtx x, rtx y)
2979 {
2980 return (commutative_operand_precedence (x)
2981 < commutative_operand_precedence (y));
2982 }
2983
2984 /* Return 1 if X is an autoincrement side effect and the register is
2985 not the stack pointer. */
2986 int
2987 auto_inc_p (const_rtx x)
2988 {
2989 switch (GET_CODE (x))
2990 {
2991 case PRE_INC:
2992 case POST_INC:
2993 case PRE_DEC:
2994 case POST_DEC:
2995 case PRE_MODIFY:
2996 case POST_MODIFY:
2997 /* There are no REG_INC notes for SP. */
2998 if (XEXP (x, 0) != stack_pointer_rtx)
2999 return 1;
3000 default:
3001 break;
3002 }
3003 return 0;
3004 }
3005
3006 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
3007 int
3008 loc_mentioned_in_p (rtx *loc, const_rtx in)
3009 {
3010 enum rtx_code code;
3011 const char *fmt;
3012 int i, j;
3013
3014 if (!in)
3015 return 0;
3016
3017 code = GET_CODE (in);
3018 fmt = GET_RTX_FORMAT (code);
3019 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3020 {
3021 if (fmt[i] == 'e')
3022 {
3023 if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i)))
3024 return 1;
3025 }
3026 else if (fmt[i] == 'E')
3027 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3028 if (loc == &XVECEXP (in, i, j)
3029 || loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3030 return 1;
3031 }
3032 return 0;
3033 }
3034
3035 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3036 and SUBREG_BYTE, return the bit offset where the subreg begins
3037 (counting from the least significant bit of the operand). */
3038
3039 unsigned int
3040 subreg_lsb_1 (enum machine_mode outer_mode,
3041 enum machine_mode inner_mode,
3042 unsigned int subreg_byte)
3043 {
3044 unsigned int bitpos;
3045 unsigned int byte;
3046 unsigned int word;
3047
3048 /* A paradoxical subreg begins at bit position 0. */
3049 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
3050 return 0;
3051
3052 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3053 /* If the subreg crosses a word boundary ensure that
3054 it also begins and ends on a word boundary. */
3055 gcc_assert (!((subreg_byte % UNITS_PER_WORD
3056 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3057 && (subreg_byte % UNITS_PER_WORD
3058 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
3059
3060 if (WORDS_BIG_ENDIAN)
3061 word = (GET_MODE_SIZE (inner_mode)
3062 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3063 else
3064 word = subreg_byte / UNITS_PER_WORD;
3065 bitpos = word * BITS_PER_WORD;
3066
3067 if (BYTES_BIG_ENDIAN)
3068 byte = (GET_MODE_SIZE (inner_mode)
3069 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3070 else
3071 byte = subreg_byte % UNITS_PER_WORD;
3072 bitpos += byte * BITS_PER_UNIT;
3073
3074 return bitpos;
3075 }
3076
3077 /* Given a subreg X, return the bit offset where the subreg begins
3078 (counting from the least significant bit of the reg). */
3079
3080 unsigned int
3081 subreg_lsb (const_rtx x)
3082 {
3083 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3084 SUBREG_BYTE (x));
3085 }
3086
3087 /* Fill in information about a subreg of a hard register.
3088 xregno - A regno of an inner hard subreg_reg (or what will become one).
3089 xmode - The mode of xregno.
3090 offset - The byte offset.
3091 ymode - The mode of a top level SUBREG (or what may become one).
3092 info - Pointer to structure to fill in. */
3093 static void
3094 subreg_get_info (unsigned int xregno, enum machine_mode xmode,
3095 unsigned int offset, enum machine_mode ymode,
3096 struct subreg_info *info)
3097 {
3098 int nregs_xmode, nregs_ymode;
3099 int mode_multiple, nregs_multiple;
3100 int offset_adj, y_offset, y_offset_adj;
3101 int regsize_xmode, regsize_ymode;
3102 bool rknown;
3103
3104 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3105
3106 rknown = false;
3107
3108 /* If there are holes in a non-scalar mode in registers, we expect
3109 that it is made up of its units concatenated together. */
3110 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
3111 {
3112 enum machine_mode xmode_unit;
3113
3114 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3115 if (GET_MODE_INNER (xmode) == VOIDmode)
3116 xmode_unit = xmode;
3117 else
3118 xmode_unit = GET_MODE_INNER (xmode);
3119 gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3120 gcc_assert (nregs_xmode
3121 == (GET_MODE_NUNITS (xmode)
3122 * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3123 gcc_assert (hard_regno_nregs[xregno][xmode]
3124 == (hard_regno_nregs[xregno][xmode_unit]
3125 * GET_MODE_NUNITS (xmode)));
3126
3127 /* You can only ask for a SUBREG of a value with holes in the middle
3128 if you don't cross the holes. (Such a SUBREG should be done by
3129 picking a different register class, or doing it in memory if
3130 necessary.) An example of a value with holes is XCmode on 32-bit
3131 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3132 3 for each part, but in memory it's two 128-bit parts.
3133 Padding is assumed to be at the end (not necessarily the 'high part')
3134 of each unit. */
3135 if ((offset / GET_MODE_SIZE (xmode_unit) + 1
3136 < GET_MODE_NUNITS (xmode))
3137 && (offset / GET_MODE_SIZE (xmode_unit)
3138 != ((offset + GET_MODE_SIZE (ymode) - 1)
3139 / GET_MODE_SIZE (xmode_unit))))
3140 {
3141 info->representable_p = false;
3142 rknown = true;
3143 }
3144 }
3145 else
3146 nregs_xmode = hard_regno_nregs[xregno][xmode];
3147
3148 nregs_ymode = hard_regno_nregs[xregno][ymode];
3149
3150 /* Paradoxical subregs are otherwise valid. */
3151 if (!rknown
3152 && offset == 0
3153 && GET_MODE_SIZE (ymode) > GET_MODE_SIZE (xmode))
3154 {
3155 info->representable_p = true;
3156 /* If this is a big endian paradoxical subreg, which uses more
3157 actual hard registers than the original register, we must
3158 return a negative offset so that we find the proper highpart
3159 of the register. */
3160 if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3161 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3162 info->offset = nregs_xmode - nregs_ymode;
3163 else
3164 info->offset = 0;
3165 info->nregs = nregs_ymode;
3166 return;
3167 }
3168
3169 /* If registers store different numbers of bits in the different
3170 modes, we cannot generally form this subreg. */
3171 if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
3172 && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
3173 && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0
3174 && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0)
3175 {
3176 regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
3177 regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
3178 if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1)
3179 {
3180 info->representable_p = false;
3181 info->nregs
3182 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3183 info->offset = offset / regsize_xmode;
3184 return;
3185 }
3186 if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1)
3187 {
3188 info->representable_p = false;
3189 info->nregs
3190 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3191 info->offset = offset / regsize_xmode;
3192 return;
3193 }
3194 }
3195
3196 /* Lowpart subregs are otherwise valid. */
3197 if (!rknown && offset == subreg_lowpart_offset (ymode, xmode))
3198 {
3199 info->representable_p = true;
3200 rknown = true;
3201
3202 if (offset == 0 || nregs_xmode == nregs_ymode)
3203 {
3204 info->offset = 0;
3205 info->nregs = nregs_ymode;
3206 return;
3207 }
3208 }
3209
3210 /* This should always pass, otherwise we don't know how to verify
3211 the constraint. These conditions may be relaxed but
3212 subreg_regno_offset would need to be redesigned. */
3213 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3214 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3215
3216 /* The XMODE value can be seen as a vector of NREGS_XMODE
3217 values. The subreg must represent a lowpart of given field.
3218 Compute what field it is. */
3219 offset_adj = offset;
3220 offset_adj -= subreg_lowpart_offset (ymode,
3221 mode_for_size (GET_MODE_BITSIZE (xmode)
3222 / nregs_xmode,
3223 MODE_INT, 0));
3224
3225 /* Size of ymode must not be greater than the size of xmode. */
3226 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3227 gcc_assert (mode_multiple != 0);
3228
3229 y_offset = offset / GET_MODE_SIZE (ymode);
3230 y_offset_adj = offset_adj / GET_MODE_SIZE (ymode);
3231 nregs_multiple = nregs_xmode / nregs_ymode;
3232
3233 gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0);
3234 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3235
3236 if (!rknown)
3237 {
3238 info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple)));
3239 rknown = true;
3240 }
3241 info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3242 info->nregs = nregs_ymode;
3243 }
3244
3245 /* This function returns the regno offset of a subreg expression.
3246 xregno - A regno of an inner hard subreg_reg (or what will become one).
3247 xmode - The mode of xregno.
3248 offset - The byte offset.
3249 ymode - The mode of a top level SUBREG (or what may become one).
3250 RETURN - The regno offset which would be used. */
3251 unsigned int
3252 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3253 unsigned int offset, enum machine_mode ymode)
3254 {
3255 struct subreg_info info;
3256 subreg_get_info (xregno, xmode, offset, ymode, &info);
3257 return info.offset;
3258 }
3259
3260 /* This function returns true when the offset is representable via
3261 subreg_offset in the given regno.
3262 xregno - A regno of an inner hard subreg_reg (or what will become one).
3263 xmode - The mode of xregno.
3264 offset - The byte offset.
3265 ymode - The mode of a top level SUBREG (or what may become one).
3266 RETURN - Whether the offset is representable. */
3267 bool
3268 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3269 unsigned int offset, enum machine_mode ymode)
3270 {
3271 struct subreg_info info;
3272 subreg_get_info (xregno, xmode, offset, ymode, &info);
3273 return info.representable_p;
3274 }
3275
3276 /* Return the number of a YMODE register to which
3277
3278 (subreg:YMODE (reg:XMODE XREGNO) OFFSET)
3279
3280 can be simplified. Return -1 if the subreg can't be simplified.
3281
3282 XREGNO is a hard register number. */
3283
3284 int
3285 simplify_subreg_regno (unsigned int xregno, enum machine_mode xmode,
3286 unsigned int offset, enum machine_mode ymode)
3287 {
3288 struct subreg_info info;
3289 unsigned int yregno;
3290
3291 #ifdef CANNOT_CHANGE_MODE_CLASS
3292 /* Give the backend a chance to disallow the mode change. */
3293 if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT
3294 && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT
3295 && REG_CANNOT_CHANGE_MODE_P (xregno, xmode, ymode))
3296 return -1;
3297 #endif
3298
3299 /* We shouldn't simplify stack-related registers. */
3300 if ((!reload_completed || frame_pointer_needed)
3301 && (xregno == FRAME_POINTER_REGNUM
3302 || xregno == HARD_FRAME_POINTER_REGNUM))
3303 return -1;
3304
3305 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3306 && xregno == ARG_POINTER_REGNUM)
3307 return -1;
3308
3309 if (xregno == STACK_POINTER_REGNUM)
3310 return -1;
3311
3312 /* Try to get the register offset. */
3313 subreg_get_info (xregno, xmode, offset, ymode, &info);
3314 if (!info.representable_p)
3315 return -1;
3316
3317 /* Make sure that the offsetted register value is in range. */
3318 yregno = xregno + info.offset;
3319 if (!HARD_REGISTER_NUM_P (yregno))
3320 return -1;
3321
3322 /* See whether (reg:YMODE YREGNO) is valid.
3323
3324 ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid.
3325 This is a kludge to work around how float/complex arguments are passed
3326 on 32-bit SPARC and should be fixed. */
3327 if (!HARD_REGNO_MODE_OK (yregno, ymode)
3328 && HARD_REGNO_MODE_OK (xregno, xmode))
3329 return -1;
3330
3331 return (int) yregno;
3332 }
3333
3334 /* Return the final regno that a subreg expression refers to. */
3335 unsigned int
3336 subreg_regno (const_rtx x)
3337 {
3338 unsigned int ret;
3339 rtx subreg = SUBREG_REG (x);
3340 int regno = REGNO (subreg);
3341
3342 ret = regno + subreg_regno_offset (regno,
3343 GET_MODE (subreg),
3344 SUBREG_BYTE (x),
3345 GET_MODE (x));
3346 return ret;
3347
3348 }
3349
3350 /* Return the number of registers that a subreg expression refers
3351 to. */
3352 unsigned int
3353 subreg_nregs (const_rtx x)
3354 {
3355 return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
3356 }
3357
3358 /* Return the number of registers that a subreg REG with REGNO
3359 expression refers to. This is a copy of the rtlanal.c:subreg_nregs
3360 changed so that the regno can be passed in. */
3361
3362 unsigned int
3363 subreg_nregs_with_regno (unsigned int regno, const_rtx x)
3364 {
3365 struct subreg_info info;
3366 rtx subreg = SUBREG_REG (x);
3367
3368 subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
3369 &info);
3370 return info.nregs;
3371 }
3372
3373
3374 struct parms_set_data
3375 {
3376 int nregs;
3377 HARD_REG_SET regs;
3378 };
3379
3380 /* Helper function for noticing stores to parameter registers. */
3381 static void
3382 parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
3383 {
3384 struct parms_set_data *const d = (struct parms_set_data *) data;
3385 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3386 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3387 {
3388 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3389 d->nregs--;
3390 }
3391 }
3392
3393 /* Look backward for first parameter to be loaded.
3394 Note that loads of all parameters will not necessarily be
3395 found if CSE has eliminated some of them (e.g., an argument
3396 to the outer function is passed down as a parameter).
3397 Do not skip BOUNDARY. */
3398 rtx
3399 find_first_parameter_load (rtx call_insn, rtx boundary)
3400 {
3401 struct parms_set_data parm;
3402 rtx p, before, first_set;
3403
3404 /* Since different machines initialize their parameter registers
3405 in different orders, assume nothing. Collect the set of all
3406 parameter registers. */
3407 CLEAR_HARD_REG_SET (parm.regs);
3408 parm.nregs = 0;
3409 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3410 if (GET_CODE (XEXP (p, 0)) == USE
3411 && REG_P (XEXP (XEXP (p, 0), 0)))
3412 {
3413 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3414
3415 /* We only care about registers which can hold function
3416 arguments. */
3417 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3418 continue;
3419
3420 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3421 parm.nregs++;
3422 }
3423 before = call_insn;
3424 first_set = call_insn;
3425
3426 /* Search backward for the first set of a register in this set. */
3427 while (parm.nregs && before != boundary)
3428 {
3429 before = PREV_INSN (before);
3430
3431 /* It is possible that some loads got CSEed from one call to
3432 another. Stop in that case. */
3433 if (CALL_P (before))
3434 break;
3435
3436 /* Our caller needs either ensure that we will find all sets
3437 (in case code has not been optimized yet), or take care
3438 for possible labels in a way by setting boundary to preceding
3439 CODE_LABEL. */
3440 if (LABEL_P (before))
3441 {
3442 gcc_assert (before == boundary);
3443 break;
3444 }
3445
3446 if (INSN_P (before))
3447 {
3448 int nregs_old = parm.nregs;
3449 note_stores (PATTERN (before), parms_set, &parm);
3450 /* If we found something that did not set a parameter reg,
3451 we're done. Do not keep going, as that might result
3452 in hoisting an insn before the setting of a pseudo
3453 that is used by the hoisted insn. */
3454 if (nregs_old != parm.nregs)
3455 first_set = before;
3456 else
3457 break;
3458 }
3459 }
3460 return first_set;
3461 }
3462
3463 /* Return true if we should avoid inserting code between INSN and preceding
3464 call instruction. */
3465
3466 bool
3467 keep_with_call_p (const_rtx insn)
3468 {
3469 rtx set;
3470
3471 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3472 {
3473 if (REG_P (SET_DEST (set))
3474 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3475 && fixed_regs[REGNO (SET_DEST (set))]
3476 && general_operand (SET_SRC (set), VOIDmode))
3477 return true;
3478 if (REG_P (SET_SRC (set))
3479 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3480 && REG_P (SET_DEST (set))
3481 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3482 return true;
3483 /* There may be a stack pop just after the call and before the store
3484 of the return register. Search for the actual store when deciding
3485 if we can break or not. */
3486 if (SET_DEST (set) == stack_pointer_rtx)
3487 {
3488 /* This CONST_CAST is okay because next_nonnote_insn just
3489 returns its argument and we assign it to a const_rtx
3490 variable. */
3491 const_rtx i2 = next_nonnote_insn (CONST_CAST_RTX(insn));
3492 if (i2 && keep_with_call_p (i2))
3493 return true;
3494 }
3495 }
3496 return false;
3497 }
3498
3499 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3500 to non-complex jumps. That is, direct unconditional, conditional,
3501 and tablejumps, but not computed jumps or returns. It also does
3502 not apply to the fallthru case of a conditional jump. */
3503
3504 bool
3505 label_is_jump_target_p (const_rtx label, const_rtx jump_insn)
3506 {
3507 rtx tmp = JUMP_LABEL (jump_insn);
3508
3509 if (label == tmp)
3510 return true;
3511
3512 if (tablejump_p (jump_insn, NULL, &tmp))
3513 {
3514 rtvec vec = XVEC (PATTERN (tmp),
3515 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3516 int i, veclen = GET_NUM_ELEM (vec);
3517
3518 for (i = 0; i < veclen; ++i)
3519 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3520 return true;
3521 }
3522
3523 if (find_reg_note (jump_insn, REG_LABEL_TARGET, label))
3524 return true;
3525
3526 return false;
3527 }
3528
3529
3530 /* Return an estimate of the cost of computing rtx X.
3531 One use is in cse, to decide which expression to keep in the hash table.
3532 Another is in rtl generation, to pick the cheapest way to multiply.
3533 Other uses like the latter are expected in the future.
3534
3535 SPEED parameter specify whether costs optimized for speed or size should
3536 be returned. */
3537
3538 int
3539 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED, bool speed)
3540 {
3541 int i, j;
3542 enum rtx_code code;
3543 const char *fmt;
3544 int total;
3545
3546 if (x == 0)
3547 return 0;
3548
3549 /* Compute the default costs of certain things.
3550 Note that targetm.rtx_costs can override the defaults. */
3551
3552 code = GET_CODE (x);
3553 switch (code)
3554 {
3555 case MULT:
3556 total = COSTS_N_INSNS (5);
3557 break;
3558 case DIV:
3559 case UDIV:
3560 case MOD:
3561 case UMOD:
3562 total = COSTS_N_INSNS (7);
3563 break;
3564 case USE:
3565 /* Used in combine.c as a marker. */
3566 total = 0;
3567 break;
3568 default:
3569 total = COSTS_N_INSNS (1);
3570 }
3571
3572 switch (code)
3573 {
3574 case REG:
3575 return 0;
3576
3577 case SUBREG:
3578 total = 0;
3579 /* If we can't tie these modes, make this expensive. The larger
3580 the mode, the more expensive it is. */
3581 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3582 return COSTS_N_INSNS (2
3583 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3584 break;
3585
3586 default:
3587 if (targetm.rtx_costs (x, code, outer_code, &total, speed))
3588 return total;
3589 break;
3590 }
3591
3592 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3593 which is already in total. */
3594
3595 fmt = GET_RTX_FORMAT (code);
3596 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3597 if (fmt[i] == 'e')
3598 total += rtx_cost (XEXP (x, i), code, speed);
3599 else if (fmt[i] == 'E')
3600 for (j = 0; j < XVECLEN (x, i); j++)
3601 total += rtx_cost (XVECEXP (x, i, j), code, speed);
3602
3603 return total;
3604 }
3605
3606 /* Return cost of address expression X.
3607 Expect that X is properly formed address reference.
3608
3609 SPEED parameter specify whether costs optimized for speed or size should
3610 be returned. */
3611
3612 int
3613 address_cost (rtx x, enum machine_mode mode, bool speed)
3614 {
3615 /* We may be asked for cost of various unusual addresses, such as operands
3616 of push instruction. It is not worthwhile to complicate writing
3617 of the target hook by such cases. */
3618
3619 if (!memory_address_p (mode, x))
3620 return 1000;
3621
3622 return targetm.address_cost (x, speed);
3623 }
3624
3625 /* If the target doesn't override, compute the cost as with arithmetic. */
3626
3627 int
3628 default_address_cost (rtx x, bool speed)
3629 {
3630 return rtx_cost (x, MEM, speed);
3631 }
3632
3633
3634 unsigned HOST_WIDE_INT
3635 nonzero_bits (const_rtx x, enum machine_mode mode)
3636 {
3637 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3638 }
3639
3640 unsigned int
3641 num_sign_bit_copies (const_rtx x, enum machine_mode mode)
3642 {
3643 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3644 }
3645
3646 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3647 It avoids exponential behavior in nonzero_bits1 when X has
3648 identical subexpressions on the first or the second level. */
3649
3650 static unsigned HOST_WIDE_INT
3651 cached_nonzero_bits (const_rtx x, enum machine_mode mode, const_rtx known_x,
3652 enum machine_mode known_mode,
3653 unsigned HOST_WIDE_INT known_ret)
3654 {
3655 if (x == known_x && mode == known_mode)
3656 return known_ret;
3657
3658 /* Try to find identical subexpressions. If found call
3659 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3660 precomputed value for the subexpression as KNOWN_RET. */
3661
3662 if (ARITHMETIC_P (x))
3663 {
3664 rtx x0 = XEXP (x, 0);
3665 rtx x1 = XEXP (x, 1);
3666
3667 /* Check the first level. */
3668 if (x0 == x1)
3669 return nonzero_bits1 (x, mode, x0, mode,
3670 cached_nonzero_bits (x0, mode, known_x,
3671 known_mode, known_ret));
3672
3673 /* Check the second level. */
3674 if (ARITHMETIC_P (x0)
3675 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3676 return nonzero_bits1 (x, mode, x1, mode,
3677 cached_nonzero_bits (x1, mode, known_x,
3678 known_mode, known_ret));
3679
3680 if (ARITHMETIC_P (x1)
3681 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3682 return nonzero_bits1 (x, mode, x0, mode,
3683 cached_nonzero_bits (x0, mode, known_x,
3684 known_mode, known_ret));
3685 }
3686
3687 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3688 }
3689
3690 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3691 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3692 is less useful. We can't allow both, because that results in exponential
3693 run time recursion. There is a nullstone testcase that triggered
3694 this. This macro avoids accidental uses of num_sign_bit_copies. */
3695 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3696
3697 /* Given an expression, X, compute which bits in X can be nonzero.
3698 We don't care about bits outside of those defined in MODE.
3699
3700 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3701 an arithmetic operation, we can do better. */
3702
3703 static unsigned HOST_WIDE_INT
3704 nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
3705 enum machine_mode known_mode,
3706 unsigned HOST_WIDE_INT known_ret)
3707 {
3708 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3709 unsigned HOST_WIDE_INT inner_nz;
3710 enum rtx_code code;
3711 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3712
3713 /* For floating-point and vector values, assume all bits are needed. */
3714 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode)
3715 || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
3716 return nonzero;
3717
3718 /* If X is wider than MODE, use its mode instead. */
3719 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3720 {
3721 mode = GET_MODE (x);
3722 nonzero = GET_MODE_MASK (mode);
3723 mode_width = GET_MODE_BITSIZE (mode);
3724 }
3725
3726 if (mode_width > HOST_BITS_PER_WIDE_INT)
3727 /* Our only callers in this case look for single bit values. So
3728 just return the mode mask. Those tests will then be false. */
3729 return nonzero;
3730
3731 #ifndef WORD_REGISTER_OPERATIONS
3732 /* If MODE is wider than X, but both are a single word for both the host
3733 and target machines, we can compute this from which bits of the
3734 object might be nonzero in its own mode, taking into account the fact
3735 that on many CISC machines, accessing an object in a wider mode
3736 causes the high-order bits to become undefined. So they are
3737 not known to be zero. */
3738
3739 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3740 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3741 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3742 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3743 {
3744 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3745 known_x, known_mode, known_ret);
3746 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3747 return nonzero;
3748 }
3749 #endif
3750
3751 code = GET_CODE (x);
3752 switch (code)
3753 {
3754 case REG:
3755 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3756 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3757 all the bits above ptr_mode are known to be zero. */
3758 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3759 && REG_POINTER (x))
3760 nonzero &= GET_MODE_MASK (ptr_mode);
3761 #endif
3762
3763 /* Include declared information about alignment of pointers. */
3764 /* ??? We don't properly preserve REG_POINTER changes across
3765 pointer-to-integer casts, so we can't trust it except for
3766 things that we know must be pointers. See execute/960116-1.c. */
3767 if ((x == stack_pointer_rtx
3768 || x == frame_pointer_rtx
3769 || x == arg_pointer_rtx)
3770 && REGNO_POINTER_ALIGN (REGNO (x)))
3771 {
3772 unsigned HOST_WIDE_INT alignment
3773 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3774
3775 #ifdef PUSH_ROUNDING
3776 /* If PUSH_ROUNDING is defined, it is possible for the
3777 stack to be momentarily aligned only to that amount,
3778 so we pick the least alignment. */
3779 if (x == stack_pointer_rtx && PUSH_ARGS)
3780 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3781 alignment);
3782 #endif
3783
3784 nonzero &= ~(alignment - 1);
3785 }
3786
3787 {
3788 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3789 rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3790 known_mode, known_ret,
3791 &nonzero_for_hook);
3792
3793 if (new_rtx)
3794 nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x,
3795 known_mode, known_ret);
3796
3797 return nonzero_for_hook;
3798 }
3799
3800 case CONST_INT:
3801 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3802 /* If X is negative in MODE, sign-extend the value. */
3803 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3804 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3805 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3806 #endif
3807
3808 return INTVAL (x);
3809
3810 case MEM:
3811 #ifdef LOAD_EXTEND_OP
3812 /* In many, if not most, RISC machines, reading a byte from memory
3813 zeros the rest of the register. Noticing that fact saves a lot
3814 of extra zero-extends. */
3815 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3816 nonzero &= GET_MODE_MASK (GET_MODE (x));
3817 #endif
3818 break;
3819
3820 case EQ: case NE:
3821 case UNEQ: case LTGT:
3822 case GT: case GTU: case UNGT:
3823 case LT: case LTU: case UNLT:
3824 case GE: case GEU: case UNGE:
3825 case LE: case LEU: case UNLE:
3826 case UNORDERED: case ORDERED:
3827 /* If this produces an integer result, we know which bits are set.
3828 Code here used to clear bits outside the mode of X, but that is
3829 now done above. */
3830 /* Mind that MODE is the mode the caller wants to look at this
3831 operation in, and not the actual operation mode. We can wind
3832 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3833 that describes the results of a vector compare. */
3834 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
3835 && mode_width <= HOST_BITS_PER_WIDE_INT)
3836 nonzero = STORE_FLAG_VALUE;
3837 break;
3838
3839 case NEG:
3840 #if 0
3841 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3842 and num_sign_bit_copies. */
3843 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3844 == GET_MODE_BITSIZE (GET_MODE (x)))
3845 nonzero = 1;
3846 #endif
3847
3848 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3849 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3850 break;
3851
3852 case ABS:
3853 #if 0
3854 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3855 and num_sign_bit_copies. */
3856 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3857 == GET_MODE_BITSIZE (GET_MODE (x)))
3858 nonzero = 1;
3859 #endif
3860 break;
3861
3862 case TRUNCATE:
3863 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3864 known_x, known_mode, known_ret)
3865 & GET_MODE_MASK (mode));
3866 break;
3867
3868 case ZERO_EXTEND:
3869 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3870 known_x, known_mode, known_ret);
3871 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3872 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3873 break;
3874
3875 case SIGN_EXTEND:
3876 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3877 Otherwise, show all the bits in the outer mode but not the inner
3878 may be nonzero. */
3879 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3880 known_x, known_mode, known_ret);
3881 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3882 {
3883 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3884 if (inner_nz
3885 & (((HOST_WIDE_INT) 1
3886 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3887 inner_nz |= (GET_MODE_MASK (mode)
3888 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3889 }
3890
3891 nonzero &= inner_nz;
3892 break;
3893
3894 case AND:
3895 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3896 known_x, known_mode, known_ret)
3897 & cached_nonzero_bits (XEXP (x, 1), mode,
3898 known_x, known_mode, known_ret);
3899 break;
3900
3901 case XOR: case IOR:
3902 case UMIN: case UMAX: case SMIN: case SMAX:
3903 {
3904 unsigned HOST_WIDE_INT nonzero0 =
3905 cached_nonzero_bits (XEXP (x, 0), mode,
3906 known_x, known_mode, known_ret);
3907
3908 /* Don't call nonzero_bits for the second time if it cannot change
3909 anything. */
3910 if ((nonzero & nonzero0) != nonzero)
3911 nonzero &= nonzero0
3912 | cached_nonzero_bits (XEXP (x, 1), mode,
3913 known_x, known_mode, known_ret);
3914 }
3915 break;
3916
3917 case PLUS: case MINUS:
3918 case MULT:
3919 case DIV: case UDIV:
3920 case MOD: case UMOD:
3921 /* We can apply the rules of arithmetic to compute the number of
3922 high- and low-order zero bits of these operations. We start by
3923 computing the width (position of the highest-order nonzero bit)
3924 and the number of low-order zero bits for each value. */
3925 {
3926 unsigned HOST_WIDE_INT nz0 =
3927 cached_nonzero_bits (XEXP (x, 0), mode,
3928 known_x, known_mode, known_ret);
3929 unsigned HOST_WIDE_INT nz1 =
3930 cached_nonzero_bits (XEXP (x, 1), mode,
3931 known_x, known_mode, known_ret);
3932 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3933 int width0 = floor_log2 (nz0) + 1;
3934 int width1 = floor_log2 (nz1) + 1;
3935 int low0 = floor_log2 (nz0 & -nz0);
3936 int low1 = floor_log2 (nz1 & -nz1);
3937 HOST_WIDE_INT op0_maybe_minusp
3938 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3939 HOST_WIDE_INT op1_maybe_minusp
3940 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3941 unsigned int result_width = mode_width;
3942 int result_low = 0;
3943
3944 switch (code)
3945 {
3946 case PLUS:
3947 result_width = MAX (width0, width1) + 1;
3948 result_low = MIN (low0, low1);
3949 break;
3950 case MINUS:
3951 result_low = MIN (low0, low1);
3952 break;
3953 case MULT:
3954 result_width = width0 + width1;
3955 result_low = low0 + low1;
3956 break;
3957 case DIV:
3958 if (width1 == 0)
3959 break;
3960 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3961 result_width = width0;
3962 break;
3963 case UDIV:
3964 if (width1 == 0)
3965 break;
3966 result_width = width0;
3967 break;
3968 case MOD:
3969 if (width1 == 0)
3970 break;
3971 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3972 result_width = MIN (width0, width1);
3973 result_low = MIN (low0, low1);
3974 break;
3975 case UMOD:
3976 if (width1 == 0)
3977 break;
3978 result_width = MIN (width0, width1);
3979 result_low = MIN (low0, low1);
3980 break;
3981 default:
3982 gcc_unreachable ();
3983 }
3984
3985 if (result_width < mode_width)
3986 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3987
3988 if (result_low > 0)
3989 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3990
3991 #ifdef POINTERS_EXTEND_UNSIGNED
3992 /* If pointers extend unsigned and this is an addition or subtraction
3993 to a pointer in Pmode, all the bits above ptr_mode are known to be
3994 zero. */
3995 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3996 && (code == PLUS || code == MINUS)
3997 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
3998 nonzero &= GET_MODE_MASK (ptr_mode);
3999 #endif
4000 }
4001 break;
4002
4003 case ZERO_EXTRACT:
4004 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4005 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
4006 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
4007 break;
4008
4009 case SUBREG:
4010 /* If this is a SUBREG formed for a promoted variable that has
4011 been zero-extended, we know that at least the high-order bits
4012 are zero, though others might be too. */
4013
4014 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
4015 nonzero = GET_MODE_MASK (GET_MODE (x))
4016 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
4017 known_x, known_mode, known_ret);
4018
4019 /* If the inner mode is a single word for both the host and target
4020 machines, we can compute this from which bits of the inner
4021 object might be nonzero. */
4022 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
4023 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4024 <= HOST_BITS_PER_WIDE_INT))
4025 {
4026 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
4027 known_x, known_mode, known_ret);
4028
4029 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
4030 /* If this is a typical RISC machine, we only have to worry
4031 about the way loads are extended. */
4032 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4033 ? (((nonzero
4034 & (((unsigned HOST_WIDE_INT) 1
4035 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
4036 != 0))
4037 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
4038 || !MEM_P (SUBREG_REG (x)))
4039 #endif
4040 {
4041 /* On many CISC machines, accessing an object in a wider mode
4042 causes the high-order bits to become undefined. So they are
4043 not known to be zero. */
4044 if (GET_MODE_SIZE (GET_MODE (x))
4045 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4046 nonzero |= (GET_MODE_MASK (GET_MODE (x))
4047 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
4048 }
4049 }
4050 break;
4051
4052 case ASHIFTRT:
4053 case LSHIFTRT:
4054 case ASHIFT:
4055 case ROTATE:
4056 /* The nonzero bits are in two classes: any bits within MODE
4057 that aren't in GET_MODE (x) are always significant. The rest of the
4058 nonzero bits are those that are significant in the operand of
4059 the shift when shifted the appropriate number of bits. This
4060 shows that high-order bits are cleared by the right shift and
4061 low-order bits by left shifts. */
4062 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4063 && INTVAL (XEXP (x, 1)) >= 0
4064 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
4065 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (GET_MODE (x)))
4066 {
4067 enum machine_mode inner_mode = GET_MODE (x);
4068 unsigned int width = GET_MODE_BITSIZE (inner_mode);
4069 int count = INTVAL (XEXP (x, 1));
4070 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
4071 unsigned HOST_WIDE_INT op_nonzero =
4072 cached_nonzero_bits (XEXP (x, 0), mode,
4073 known_x, known_mode, known_ret);
4074 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
4075 unsigned HOST_WIDE_INT outer = 0;
4076
4077 if (mode_width > width)
4078 outer = (op_nonzero & nonzero & ~mode_mask);
4079
4080 if (code == LSHIFTRT)
4081 inner >>= count;
4082 else if (code == ASHIFTRT)
4083 {
4084 inner >>= count;
4085
4086 /* If the sign bit may have been nonzero before the shift, we
4087 need to mark all the places it could have been copied to
4088 by the shift as possibly nonzero. */
4089 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
4090 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
4091 }
4092 else if (code == ASHIFT)
4093 inner <<= count;
4094 else
4095 inner = ((inner << (count % width)
4096 | (inner >> (width - (count % width)))) & mode_mask);
4097
4098 nonzero &= (outer | inner);
4099 }
4100 break;
4101
4102 case FFS:
4103 case POPCOUNT:
4104 /* This is at most the number of bits in the mode. */
4105 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
4106 break;
4107
4108 case CLZ:
4109 /* If CLZ has a known value at zero, then the nonzero bits are
4110 that value, plus the number of bits in the mode minus one. */
4111 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4112 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4113 else
4114 nonzero = -1;
4115 break;
4116
4117 case CTZ:
4118 /* If CTZ has a known value at zero, then the nonzero bits are
4119 that value, plus the number of bits in the mode minus one. */
4120 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4121 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4122 else
4123 nonzero = -1;
4124 break;
4125
4126 case PARITY:
4127 nonzero = 1;
4128 break;
4129
4130 case IF_THEN_ELSE:
4131 {
4132 unsigned HOST_WIDE_INT nonzero_true =
4133 cached_nonzero_bits (XEXP (x, 1), mode,
4134 known_x, known_mode, known_ret);
4135
4136 /* Don't call nonzero_bits for the second time if it cannot change
4137 anything. */
4138 if ((nonzero & nonzero_true) != nonzero)
4139 nonzero &= nonzero_true
4140 | cached_nonzero_bits (XEXP (x, 2), mode,
4141 known_x, known_mode, known_ret);
4142 }
4143 break;
4144
4145 default:
4146 break;
4147 }
4148
4149 return nonzero;
4150 }
4151
4152 /* See the macro definition above. */
4153 #undef cached_num_sign_bit_copies
4154
4155
4156 /* The function cached_num_sign_bit_copies is a wrapper around
4157 num_sign_bit_copies1. It avoids exponential behavior in
4158 num_sign_bit_copies1 when X has identical subexpressions on the
4159 first or the second level. */
4160
4161 static unsigned int
4162 cached_num_sign_bit_copies (const_rtx x, enum machine_mode mode, const_rtx known_x,
4163 enum machine_mode known_mode,
4164 unsigned int known_ret)
4165 {
4166 if (x == known_x && mode == known_mode)
4167 return known_ret;
4168
4169 /* Try to find identical subexpressions. If found call
4170 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4171 the precomputed value for the subexpression as KNOWN_RET. */
4172
4173 if (ARITHMETIC_P (x))
4174 {
4175 rtx x0 = XEXP (x, 0);
4176 rtx x1 = XEXP (x, 1);
4177
4178 /* Check the first level. */
4179 if (x0 == x1)
4180 return
4181 num_sign_bit_copies1 (x, mode, x0, mode,
4182 cached_num_sign_bit_copies (x0, mode, known_x,
4183 known_mode,
4184 known_ret));
4185
4186 /* Check the second level. */
4187 if (ARITHMETIC_P (x0)
4188 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4189 return
4190 num_sign_bit_copies1 (x, mode, x1, mode,
4191 cached_num_sign_bit_copies (x1, mode, known_x,
4192 known_mode,
4193 known_ret));
4194
4195 if (ARITHMETIC_P (x1)
4196 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4197 return
4198 num_sign_bit_copies1 (x, mode, x0, mode,
4199 cached_num_sign_bit_copies (x0, mode, known_x,
4200 known_mode,
4201 known_ret));
4202 }
4203
4204 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4205 }
4206
4207 /* Return the number of bits at the high-order end of X that are known to
4208 be equal to the sign bit. X will be used in mode MODE; if MODE is
4209 VOIDmode, X will be used in its own mode. The returned value will always
4210 be between 1 and the number of bits in MODE. */
4211
4212 static unsigned int
4213 num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
4214 enum machine_mode known_mode,
4215 unsigned int known_ret)
4216 {
4217 enum rtx_code code = GET_CODE (x);
4218 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
4219 int num0, num1, result;
4220 unsigned HOST_WIDE_INT nonzero;
4221
4222 /* If we weren't given a mode, use the mode of X. If the mode is still
4223 VOIDmode, we don't know anything. Likewise if one of the modes is
4224 floating-point. */
4225
4226 if (mode == VOIDmode)
4227 mode = GET_MODE (x);
4228
4229 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x))
4230 || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
4231 return 1;
4232
4233 /* For a smaller object, just ignore the high bits. */
4234 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
4235 {
4236 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4237 known_x, known_mode, known_ret);
4238 return MAX (1,
4239 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
4240 }
4241
4242 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
4243 {
4244 #ifndef WORD_REGISTER_OPERATIONS
4245 /* If this machine does not do all register operations on the entire
4246 register and MODE is wider than the mode of X, we can say nothing
4247 at all about the high-order bits. */
4248 return 1;
4249 #else
4250 /* Likewise on machines that do, if the mode of the object is smaller
4251 than a word and loads of that size don't sign extend, we can say
4252 nothing about the high order bits. */
4253 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
4254 #ifdef LOAD_EXTEND_OP
4255 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4256 #endif
4257 )
4258 return 1;
4259 #endif
4260 }
4261
4262 switch (code)
4263 {
4264 case REG:
4265
4266 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4267 /* If pointers extend signed and this is a pointer in Pmode, say that
4268 all the bits above ptr_mode are known to be sign bit copies. */
4269 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
4270 && REG_POINTER (x))
4271 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4272 #endif
4273
4274 {
4275 unsigned int copies_for_hook = 1, copies = 1;
4276 rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4277 known_mode, known_ret,
4278 &copies_for_hook);
4279
4280 if (new_rtx)
4281 copies = cached_num_sign_bit_copies (new_rtx, mode, known_x,
4282 known_mode, known_ret);
4283
4284 if (copies > 1 || copies_for_hook > 1)
4285 return MAX (copies, copies_for_hook);
4286
4287 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4288 }
4289 break;
4290
4291 case MEM:
4292 #ifdef LOAD_EXTEND_OP
4293 /* Some RISC machines sign-extend all loads of smaller than a word. */
4294 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4295 return MAX (1, ((int) bitwidth
4296 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4297 #endif
4298 break;
4299
4300 case CONST_INT:
4301 /* If the constant is negative, take its 1's complement and remask.
4302 Then see how many zero bits we have. */
4303 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4304 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4305 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4306 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4307
4308 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4309
4310 case SUBREG:
4311 /* If this is a SUBREG for a promoted object that is sign-extended
4312 and we are looking at it in a wider mode, we know that at least the
4313 high-order bits are known to be sign bit copies. */
4314
4315 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4316 {
4317 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4318 known_x, known_mode, known_ret);
4319 return MAX ((int) bitwidth
4320 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4321 num0);
4322 }
4323
4324 /* For a smaller object, just ignore the high bits. */
4325 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4326 {
4327 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4328 known_x, known_mode, known_ret);
4329 return MAX (1, (num0
4330 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4331 - bitwidth)));
4332 }
4333
4334 #ifdef WORD_REGISTER_OPERATIONS
4335 #ifdef LOAD_EXTEND_OP
4336 /* For paradoxical SUBREGs on machines where all register operations
4337 affect the entire register, just look inside. Note that we are
4338 passing MODE to the recursive call, so the number of sign bit copies
4339 will remain relative to that mode, not the inner mode. */
4340
4341 /* This works only if loads sign extend. Otherwise, if we get a
4342 reload for the inner part, it may be loaded from the stack, and
4343 then we lose all sign bit copies that existed before the store
4344 to the stack. */
4345
4346 if ((GET_MODE_SIZE (GET_MODE (x))
4347 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4348 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4349 && MEM_P (SUBREG_REG (x)))
4350 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4351 known_x, known_mode, known_ret);
4352 #endif
4353 #endif
4354 break;
4355
4356 case SIGN_EXTRACT:
4357 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4358 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4359 break;
4360
4361 case SIGN_EXTEND:
4362 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4363 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4364 known_x, known_mode, known_ret));
4365
4366 case TRUNCATE:
4367 /* For a smaller object, just ignore the high bits. */
4368 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4369 known_x, known_mode, known_ret);
4370 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4371 - bitwidth)));
4372
4373 case NOT:
4374 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4375 known_x, known_mode, known_ret);
4376
4377 case ROTATE: case ROTATERT:
4378 /* If we are rotating left by a number of bits less than the number
4379 of sign bit copies, we can just subtract that amount from the
4380 number. */
4381 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4382 && INTVAL (XEXP (x, 1)) >= 0
4383 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4384 {
4385 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4386 known_x, known_mode, known_ret);
4387 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4388 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4389 }
4390 break;
4391
4392 case NEG:
4393 /* In general, this subtracts one sign bit copy. But if the value
4394 is known to be positive, the number of sign bit copies is the
4395 same as that of the input. Finally, if the input has just one bit
4396 that might be nonzero, all the bits are copies of the sign bit. */
4397 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4398 known_x, known_mode, known_ret);
4399 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4400 return num0 > 1 ? num0 - 1 : 1;
4401
4402 nonzero = nonzero_bits (XEXP (x, 0), mode);
4403 if (nonzero == 1)
4404 return bitwidth;
4405
4406 if (num0 > 1
4407 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4408 num0--;
4409
4410 return num0;
4411
4412 case IOR: case AND: case XOR:
4413 case SMIN: case SMAX: case UMIN: case UMAX:
4414 /* Logical operations will preserve the number of sign-bit copies.
4415 MIN and MAX operations always return one of the operands. */
4416 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4417 known_x, known_mode, known_ret);
4418 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4419 known_x, known_mode, known_ret);
4420
4421 /* If num1 is clearing some of the top bits then regardless of
4422 the other term, we are guaranteed to have at least that many
4423 high-order zero bits. */
4424 if (code == AND
4425 && num1 > 1
4426 && bitwidth <= HOST_BITS_PER_WIDE_INT
4427 && GET_CODE (XEXP (x, 1)) == CONST_INT
4428 && !(INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
4429 return num1;
4430
4431 /* Similarly for IOR when setting high-order bits. */
4432 if (code == IOR
4433 && num1 > 1
4434 && bitwidth <= HOST_BITS_PER_WIDE_INT
4435 && GET_CODE (XEXP (x, 1)) == CONST_INT
4436 && (INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
4437 return num1;
4438
4439 return MIN (num0, num1);
4440
4441 case PLUS: case MINUS:
4442 /* For addition and subtraction, we can have a 1-bit carry. However,
4443 if we are subtracting 1 from a positive number, there will not
4444 be such a carry. Furthermore, if the positive number is known to
4445 be 0 or 1, we know the result is either -1 or 0. */
4446
4447 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4448 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4449 {
4450 nonzero = nonzero_bits (XEXP (x, 0), mode);
4451 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4452 return (nonzero == 1 || nonzero == 0 ? bitwidth
4453 : bitwidth - floor_log2 (nonzero) - 1);
4454 }
4455
4456 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4457 known_x, known_mode, known_ret);
4458 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4459 known_x, known_mode, known_ret);
4460 result = MAX (1, MIN (num0, num1) - 1);
4461
4462 #ifdef POINTERS_EXTEND_UNSIGNED
4463 /* If pointers extend signed and this is an addition or subtraction
4464 to a pointer in Pmode, all the bits above ptr_mode are known to be
4465 sign bit copies. */
4466 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4467 && (code == PLUS || code == MINUS)
4468 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4469 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4470 - GET_MODE_BITSIZE (ptr_mode) + 1),
4471 result);
4472 #endif
4473 return result;
4474
4475 case MULT:
4476 /* The number of bits of the product is the sum of the number of
4477 bits of both terms. However, unless one of the terms if known
4478 to be positive, we must allow for an additional bit since negating
4479 a negative number can remove one sign bit copy. */
4480
4481 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4482 known_x, known_mode, known_ret);
4483 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4484 known_x, known_mode, known_ret);
4485
4486 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4487 if (result > 0
4488 && (bitwidth > HOST_BITS_PER_WIDE_INT
4489 || (((nonzero_bits (XEXP (x, 0), mode)
4490 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4491 && ((nonzero_bits (XEXP (x, 1), mode)
4492 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4493 result--;
4494
4495 return MAX (1, result);
4496
4497 case UDIV:
4498 /* The result must be <= the first operand. If the first operand
4499 has the high bit set, we know nothing about the number of sign
4500 bit copies. */
4501 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4502 return 1;
4503 else if ((nonzero_bits (XEXP (x, 0), mode)
4504 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4505 return 1;
4506 else
4507 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4508 known_x, known_mode, known_ret);
4509
4510 case UMOD:
4511 /* The result must be <= the second operand. */
4512 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4513 known_x, known_mode, known_ret);
4514
4515 case DIV:
4516 /* Similar to unsigned division, except that we have to worry about
4517 the case where the divisor is negative, in which case we have
4518 to add 1. */
4519 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4520 known_x, known_mode, known_ret);
4521 if (result > 1
4522 && (bitwidth > HOST_BITS_PER_WIDE_INT
4523 || (nonzero_bits (XEXP (x, 1), mode)
4524 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4525 result--;
4526
4527 return result;
4528
4529 case MOD:
4530 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4531 known_x, known_mode, known_ret);
4532 if (result > 1
4533 && (bitwidth > HOST_BITS_PER_WIDE_INT
4534 || (nonzero_bits (XEXP (x, 1), mode)
4535 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4536 result--;
4537
4538 return result;
4539
4540 case ASHIFTRT:
4541 /* Shifts by a constant add to the number of bits equal to the
4542 sign bit. */
4543 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4544 known_x, known_mode, known_ret);
4545 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4546 && INTVAL (XEXP (x, 1)) > 0
4547 && INTVAL (XEXP (x, 1)) < GET_MODE_BITSIZE (GET_MODE (x)))
4548 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4549
4550 return num0;
4551
4552 case ASHIFT:
4553 /* Left shifts destroy copies. */
4554 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4555 || INTVAL (XEXP (x, 1)) < 0
4556 || INTVAL (XEXP (x, 1)) >= (int) bitwidth
4557 || INTVAL (XEXP (x, 1)) >= GET_MODE_BITSIZE (GET_MODE (x)))
4558 return 1;
4559
4560 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4561 known_x, known_mode, known_ret);
4562 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4563
4564 case IF_THEN_ELSE:
4565 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4566 known_x, known_mode, known_ret);
4567 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4568 known_x, known_mode, known_ret);
4569 return MIN (num0, num1);
4570
4571 case EQ: case NE: case GE: case GT: case LE: case LT:
4572 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4573 case GEU: case GTU: case LEU: case LTU:
4574 case UNORDERED: case ORDERED:
4575 /* If the constant is negative, take its 1's complement and remask.
4576 Then see how many zero bits we have. */
4577 nonzero = STORE_FLAG_VALUE;
4578 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4579 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4580 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4581
4582 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4583
4584 default:
4585 break;
4586 }
4587
4588 /* If we haven't been able to figure it out by one of the above rules,
4589 see if some of the high-order bits are known to be zero. If so,
4590 count those bits and return one less than that amount. If we can't
4591 safely compute the mask for this mode, always return BITWIDTH. */
4592
4593 bitwidth = GET_MODE_BITSIZE (mode);
4594 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4595 return 1;
4596
4597 nonzero = nonzero_bits (x, mode);
4598 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4599 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4600 }
4601
4602 /* Calculate the rtx_cost of a single instruction. A return value of
4603 zero indicates an instruction pattern without a known cost. */
4604
4605 int
4606 insn_rtx_cost (rtx pat, bool speed)
4607 {
4608 int i, cost;
4609 rtx set;
4610
4611 /* Extract the single set rtx from the instruction pattern.
4612 We can't use single_set since we only have the pattern. */
4613 if (GET_CODE (pat) == SET)
4614 set = pat;
4615 else if (GET_CODE (pat) == PARALLEL)
4616 {
4617 set = NULL_RTX;
4618 for (i = 0; i < XVECLEN (pat, 0); i++)
4619 {
4620 rtx x = XVECEXP (pat, 0, i);
4621 if (GET_CODE (x) == SET)
4622 {
4623 if (set)
4624 return 0;
4625 set = x;
4626 }
4627 }
4628 if (!set)
4629 return 0;
4630 }
4631 else
4632 return 0;
4633
4634 cost = rtx_cost (SET_SRC (set), SET, speed);
4635 return cost > 0 ? cost : COSTS_N_INSNS (1);
4636 }
4637
4638 /* Given an insn INSN and condition COND, return the condition in a
4639 canonical form to simplify testing by callers. Specifically:
4640
4641 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4642 (2) Both operands will be machine operands; (cc0) will have been replaced.
4643 (3) If an operand is a constant, it will be the second operand.
4644 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4645 for GE, GEU, and LEU.
4646
4647 If the condition cannot be understood, or is an inequality floating-point
4648 comparison which needs to be reversed, 0 will be returned.
4649
4650 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4651
4652 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4653 insn used in locating the condition was found. If a replacement test
4654 of the condition is desired, it should be placed in front of that
4655 insn and we will be sure that the inputs are still valid.
4656
4657 If WANT_REG is nonzero, we wish the condition to be relative to that
4658 register, if possible. Therefore, do not canonicalize the condition
4659 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4660 to be a compare to a CC mode register.
4661
4662 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4663 and at INSN. */
4664
4665 rtx
4666 canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4667 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4668 {
4669 enum rtx_code code;
4670 rtx prev = insn;
4671 const_rtx set;
4672 rtx tem;
4673 rtx op0, op1;
4674 int reverse_code = 0;
4675 enum machine_mode mode;
4676 basic_block bb = BLOCK_FOR_INSN (insn);
4677
4678 code = GET_CODE (cond);
4679 mode = GET_MODE (cond);
4680 op0 = XEXP (cond, 0);
4681 op1 = XEXP (cond, 1);
4682
4683 if (reverse)
4684 code = reversed_comparison_code (cond, insn);
4685 if (code == UNKNOWN)
4686 return 0;
4687
4688 if (earliest)
4689 *earliest = insn;
4690
4691 /* If we are comparing a register with zero, see if the register is set
4692 in the previous insn to a COMPARE or a comparison operation. Perform
4693 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4694 in cse.c */
4695
4696 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4697 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4698 && op1 == CONST0_RTX (GET_MODE (op0))
4699 && op0 != want_reg)
4700 {
4701 /* Set nonzero when we find something of interest. */
4702 rtx x = 0;
4703
4704 #ifdef HAVE_cc0
4705 /* If comparison with cc0, import actual comparison from compare
4706 insn. */
4707 if (op0 == cc0_rtx)
4708 {
4709 if ((prev = prev_nonnote_insn (prev)) == 0
4710 || !NONJUMP_INSN_P (prev)
4711 || (set = single_set (prev)) == 0
4712 || SET_DEST (set) != cc0_rtx)
4713 return 0;
4714
4715 op0 = SET_SRC (set);
4716 op1 = CONST0_RTX (GET_MODE (op0));
4717 if (earliest)
4718 *earliest = prev;
4719 }
4720 #endif
4721
4722 /* If this is a COMPARE, pick up the two things being compared. */
4723 if (GET_CODE (op0) == COMPARE)
4724 {
4725 op1 = XEXP (op0, 1);
4726 op0 = XEXP (op0, 0);
4727 continue;
4728 }
4729 else if (!REG_P (op0))
4730 break;
4731
4732 /* Go back to the previous insn. Stop if it is not an INSN. We also
4733 stop if it isn't a single set or if it has a REG_INC note because
4734 we don't want to bother dealing with it. */
4735
4736 if ((prev = prev_nonnote_insn (prev)) == 0
4737 || !NONJUMP_INSN_P (prev)
4738 || FIND_REG_INC_NOTE (prev, NULL_RTX)
4739 /* In cfglayout mode, there do not have to be labels at the
4740 beginning of a block, or jumps at the end, so the previous
4741 conditions would not stop us when we reach bb boundary. */
4742 || BLOCK_FOR_INSN (prev) != bb)
4743 break;
4744
4745 set = set_of (op0, prev);
4746
4747 if (set
4748 && (GET_CODE (set) != SET
4749 || !rtx_equal_p (SET_DEST (set), op0)))
4750 break;
4751
4752 /* If this is setting OP0, get what it sets it to if it looks
4753 relevant. */
4754 if (set)
4755 {
4756 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4757 #ifdef FLOAT_STORE_FLAG_VALUE
4758 REAL_VALUE_TYPE fsfv;
4759 #endif
4760
4761 /* ??? We may not combine comparisons done in a CCmode with
4762 comparisons not done in a CCmode. This is to aid targets
4763 like Alpha that have an IEEE compliant EQ instruction, and
4764 a non-IEEE compliant BEQ instruction. The use of CCmode is
4765 actually artificial, simply to prevent the combination, but
4766 should not affect other platforms.
4767
4768 However, we must allow VOIDmode comparisons to match either
4769 CCmode or non-CCmode comparison, because some ports have
4770 modeless comparisons inside branch patterns.
4771
4772 ??? This mode check should perhaps look more like the mode check
4773 in simplify_comparison in combine. */
4774
4775 if ((GET_CODE (SET_SRC (set)) == COMPARE
4776 || (((code == NE
4777 || (code == LT
4778 && GET_MODE_CLASS (inner_mode) == MODE_INT
4779 && (GET_MODE_BITSIZE (inner_mode)
4780 <= HOST_BITS_PER_WIDE_INT)
4781 && (STORE_FLAG_VALUE
4782 & ((HOST_WIDE_INT) 1
4783 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4784 #ifdef FLOAT_STORE_FLAG_VALUE
4785 || (code == LT
4786 && SCALAR_FLOAT_MODE_P (inner_mode)
4787 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4788 REAL_VALUE_NEGATIVE (fsfv)))
4789 #endif
4790 ))
4791 && COMPARISON_P (SET_SRC (set))))
4792 && (((GET_MODE_CLASS (mode) == MODE_CC)
4793 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4794 || mode == VOIDmode || inner_mode == VOIDmode))
4795 x = SET_SRC (set);
4796 else if (((code == EQ
4797 || (code == GE
4798 && (GET_MODE_BITSIZE (inner_mode)
4799 <= HOST_BITS_PER_WIDE_INT)
4800 && GET_MODE_CLASS (inner_mode) == MODE_INT
4801 && (STORE_FLAG_VALUE
4802 & ((HOST_WIDE_INT) 1
4803 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4804 #ifdef FLOAT_STORE_FLAG_VALUE
4805 || (code == GE
4806 && SCALAR_FLOAT_MODE_P (inner_mode)
4807 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4808 REAL_VALUE_NEGATIVE (fsfv)))
4809 #endif
4810 ))
4811 && COMPARISON_P (SET_SRC (set))
4812 && (((GET_MODE_CLASS (mode) == MODE_CC)
4813 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4814 || mode == VOIDmode || inner_mode == VOIDmode))
4815
4816 {
4817 reverse_code = 1;
4818 x = SET_SRC (set);
4819 }
4820 else
4821 break;
4822 }
4823
4824 else if (reg_set_p (op0, prev))
4825 /* If this sets OP0, but not directly, we have to give up. */
4826 break;
4827
4828 if (x)
4829 {
4830 /* If the caller is expecting the condition to be valid at INSN,
4831 make sure X doesn't change before INSN. */
4832 if (valid_at_insn_p)
4833 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4834 break;
4835 if (COMPARISON_P (x))
4836 code = GET_CODE (x);
4837 if (reverse_code)
4838 {
4839 code = reversed_comparison_code (x, prev);
4840 if (code == UNKNOWN)
4841 return 0;
4842 reverse_code = 0;
4843 }
4844
4845 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4846 if (earliest)
4847 *earliest = prev;
4848 }
4849 }
4850
4851 /* If constant is first, put it last. */
4852 if (CONSTANT_P (op0))
4853 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4854
4855 /* If OP0 is the result of a comparison, we weren't able to find what
4856 was really being compared, so fail. */
4857 if (!allow_cc_mode
4858 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4859 return 0;
4860
4861 /* Canonicalize any ordered comparison with integers involving equality
4862 if we can do computations in the relevant mode and we do not
4863 overflow. */
4864
4865 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4866 && GET_CODE (op1) == CONST_INT
4867 && GET_MODE (op0) != VOIDmode
4868 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4869 {
4870 HOST_WIDE_INT const_val = INTVAL (op1);
4871 unsigned HOST_WIDE_INT uconst_val = const_val;
4872 unsigned HOST_WIDE_INT max_val
4873 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4874
4875 switch (code)
4876 {
4877 case LE:
4878 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4879 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4880 break;
4881
4882 /* When cross-compiling, const_val might be sign-extended from
4883 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4884 case GE:
4885 if ((HOST_WIDE_INT) (const_val & max_val)
4886 != (((HOST_WIDE_INT) 1
4887 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4888 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4889 break;
4890
4891 case LEU:
4892 if (uconst_val < max_val)
4893 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4894 break;
4895
4896 case GEU:
4897 if (uconst_val != 0)
4898 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4899 break;
4900
4901 default:
4902 break;
4903 }
4904 }
4905
4906 /* Never return CC0; return zero instead. */
4907 if (CC0_P (op0))
4908 return 0;
4909
4910 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4911 }
4912
4913 /* Given a jump insn JUMP, return the condition that will cause it to branch
4914 to its JUMP_LABEL. If the condition cannot be understood, or is an
4915 inequality floating-point comparison which needs to be reversed, 0 will
4916 be returned.
4917
4918 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4919 insn used in locating the condition was found. If a replacement test
4920 of the condition is desired, it should be placed in front of that
4921 insn and we will be sure that the inputs are still valid. If EARLIEST
4922 is null, the returned condition will be valid at INSN.
4923
4924 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4925 compare CC mode register.
4926
4927 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4928
4929 rtx
4930 get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4931 {
4932 rtx cond;
4933 int reverse;
4934 rtx set;
4935
4936 /* If this is not a standard conditional jump, we can't parse it. */
4937 if (!JUMP_P (jump)
4938 || ! any_condjump_p (jump))
4939 return 0;
4940 set = pc_set (jump);
4941
4942 cond = XEXP (SET_SRC (set), 0);
4943
4944 /* If this branches to JUMP_LABEL when the condition is false, reverse
4945 the condition. */
4946 reverse
4947 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4948 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4949
4950 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4951 allow_cc_mode, valid_at_insn_p);
4952 }
4953
4954 /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
4955 TARGET_MODE_REP_EXTENDED.
4956
4957 Note that we assume that the property of
4958 TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
4959 narrower than mode B. I.e., if A is a mode narrower than B then in
4960 order to be able to operate on it in mode B, mode A needs to
4961 satisfy the requirements set by the representation of mode B. */
4962
4963 static void
4964 init_num_sign_bit_copies_in_rep (void)
4965 {
4966 enum machine_mode mode, in_mode;
4967
4968 for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
4969 in_mode = GET_MODE_WIDER_MODE (mode))
4970 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
4971 mode = GET_MODE_WIDER_MODE (mode))
4972 {
4973 enum machine_mode i;
4974
4975 /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
4976 extends to the next widest mode. */
4977 gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
4978 || GET_MODE_WIDER_MODE (mode) == in_mode);
4979
4980 /* We are in in_mode. Count how many bits outside of mode
4981 have to be copies of the sign-bit. */
4982 for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
4983 {
4984 enum machine_mode wider = GET_MODE_WIDER_MODE (i);
4985
4986 if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
4987 /* We can only check sign-bit copies starting from the
4988 top-bit. In order to be able to check the bits we
4989 have already seen we pretend that subsequent bits
4990 have to be sign-bit copies too. */
4991 || num_sign_bit_copies_in_rep [in_mode][mode])
4992 num_sign_bit_copies_in_rep [in_mode][mode]
4993 += GET_MODE_BITSIZE (wider) - GET_MODE_BITSIZE (i);
4994 }
4995 }
4996 }
4997
4998 /* Suppose that truncation from the machine mode of X to MODE is not a
4999 no-op. See if there is anything special about X so that we can
5000 assume it already contains a truncated value of MODE. */
5001
5002 bool
5003 truncated_to_mode (enum machine_mode mode, const_rtx x)
5004 {
5005 /* This register has already been used in MODE without explicit
5006 truncation. */
5007 if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
5008 return true;
5009
5010 /* See if we already satisfy the requirements of MODE. If yes we
5011 can just switch to MODE. */
5012 if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
5013 && (num_sign_bit_copies (x, GET_MODE (x))
5014 >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
5015 return true;
5016
5017 return false;
5018 }
5019
5020 /* Initialize non_rtx_starting_operands, which is used to speed up
5021 for_each_rtx. */
5022 void
5023 init_rtlanal (void)
5024 {
5025 int i;
5026 for (i = 0; i < NUM_RTX_CODE; i++)
5027 {
5028 const char *format = GET_RTX_FORMAT (i);
5029 const char *first = strpbrk (format, "eEV");
5030 non_rtx_starting_operands[i] = first ? first - format : -1;
5031 }
5032
5033 init_num_sign_bit_copies_in_rep ();
5034 }
5035
5036 /* Check whether this is a constant pool constant. */
5037 bool
5038 constant_pool_constant_p (rtx x)
5039 {
5040 x = avoid_constant_pool_reference (x);
5041 return GET_CODE (x) == CONST_DOUBLE;
5042 }
5043