comparison gcc/rtlanal.c @ 131:84e7813d76e9

gcc-8.2
author mir3636
date Thu, 25 Oct 2018 07:37:49 +0900
parents 04ced10e8804
children 1830386684a0
comparison
equal deleted inserted replaced
111:04ced10e8804 131:84e7813d76e9
1 /* Analyze RTL for GNU compiler. 1 /* Analyze RTL for GNU compiler.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc. 2 Copyright (C) 1987-2018 Free Software Foundation, Inc.
3 3
4 This file is part of GCC. 4 This file is part of GCC.
5 5
6 GCC is free software; you can redistribute it and/or modify it under 6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free 7 the terms of the GNU General Public License as published by the Free
33 #include "regs.h" 33 #include "regs.h"
34 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */ 34 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
35 #include "recog.h" 35 #include "recog.h"
36 #include "addresses.h" 36 #include "addresses.h"
37 #include "rtl-iter.h" 37 #include "rtl-iter.h"
38 #include "hard-reg-set.h"
38 39
39 /* Forward declarations */ 40 /* Forward declarations */
40 static void set_of_1 (rtx, const_rtx, void *); 41 static void set_of_1 (rtx, const_rtx, void *);
41 static bool covers_regno_p (const_rtx, unsigned int); 42 static bool covers_regno_p (const_rtx, unsigned int);
42 static bool covers_regno_no_parallel_p (const_rtx, unsigned int); 43 static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
342 343
343 /* Compute an approximation for the offset between the register 344 /* Compute an approximation for the offset between the register
344 FROM and TO for the current function, as it was at the start 345 FROM and TO for the current function, as it was at the start
345 of the routine. */ 346 of the routine. */
346 347
347 static HOST_WIDE_INT 348 static poly_int64
348 get_initial_register_offset (int from, int to) 349 get_initial_register_offset (int from, int to)
349 { 350 {
350 static const struct elim_table_t 351 static const struct elim_table_t
351 { 352 {
352 const int from; 353 const int from;
353 const int to; 354 const int to;
354 } table[] = ELIMINABLE_REGS; 355 } table[] = ELIMINABLE_REGS;
355 HOST_WIDE_INT offset1, offset2; 356 poly_int64 offset1, offset2;
356 unsigned int i, j; 357 unsigned int i, j;
357 358
358 if (to == from) 359 if (to == from)
359 return 0; 360 return 0;
360 361
455 bytes can cause a trap. MODE is the mode of the MEM (not that of X) and 456 bytes can cause a trap. MODE is the mode of the MEM (not that of X) and
456 UNALIGNED_MEMS controls whether nonzero is returned for unaligned memory 457 UNALIGNED_MEMS controls whether nonzero is returned for unaligned memory
457 references on strict alignment machines. */ 458 references on strict alignment machines. */
458 459
459 static int 460 static int
460 rtx_addr_can_trap_p_1 (const_rtx x, HOST_WIDE_INT offset, HOST_WIDE_INT size, 461 rtx_addr_can_trap_p_1 (const_rtx x, poly_int64 offset, poly_int64 size,
461 machine_mode mode, bool unaligned_mems) 462 machine_mode mode, bool unaligned_mems)
462 { 463 {
463 enum rtx_code code = GET_CODE (x); 464 enum rtx_code code = GET_CODE (x);
465 gcc_checking_assert (mode == BLKmode || known_size_p (size));
466 poly_int64 const_x1;
464 467
465 /* The offset must be a multiple of the mode size if we are considering 468 /* The offset must be a multiple of the mode size if we are considering
466 unaligned memory references on strict alignment machines. */ 469 unaligned memory references on strict alignment machines. */
467 if (STRICT_ALIGNMENT && unaligned_mems && GET_MODE_SIZE (mode) != 0) 470 if (STRICT_ALIGNMENT && unaligned_mems && mode != BLKmode)
468 { 471 {
469 HOST_WIDE_INT actual_offset = offset; 472 poly_int64 actual_offset = offset;
470 473
471 #ifdef SPARC_STACK_BOUNDARY_HACK 474 #ifdef SPARC_STACK_BOUNDARY_HACK
472 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than 475 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
473 the real alignment of %sp. However, when it does this, the 476 the real alignment of %sp. However, when it does this, the
474 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */ 477 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
475 if (SPARC_STACK_BOUNDARY_HACK 478 if (SPARC_STACK_BOUNDARY_HACK
476 && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx)) 479 && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx))
477 actual_offset -= STACK_POINTER_OFFSET; 480 actual_offset -= STACK_POINTER_OFFSET;
478 #endif 481 #endif
479 482
480 if (actual_offset % GET_MODE_SIZE (mode) != 0) 483 if (!multiple_p (actual_offset, GET_MODE_SIZE (mode)))
481 return 1; 484 return 1;
482 } 485 }
483 486
484 switch (code) 487 switch (code)
485 { 488 {
487 if (SYMBOL_REF_WEAK (x)) 490 if (SYMBOL_REF_WEAK (x))
488 return 1; 491 return 1;
489 if (!CONSTANT_POOL_ADDRESS_P (x) && !SYMBOL_REF_FUNCTION_P (x)) 492 if (!CONSTANT_POOL_ADDRESS_P (x) && !SYMBOL_REF_FUNCTION_P (x))
490 { 493 {
491 tree decl; 494 tree decl;
492 HOST_WIDE_INT decl_size; 495 poly_int64 decl_size;
493 496
494 if (offset < 0) 497 if (maybe_lt (offset, 0))
495 return 1; 498 return 1;
496 if (size == 0) 499 if (!known_size_p (size))
497 size = GET_MODE_SIZE (mode); 500 return maybe_ne (offset, 0);
498 if (size == 0)
499 return offset != 0;
500 501
501 /* If the size of the access or of the symbol is unknown, 502 /* If the size of the access or of the symbol is unknown,
502 assume the worst. */ 503 assume the worst. */
503 decl = SYMBOL_REF_DECL (x); 504 decl = SYMBOL_REF_DECL (x);
504 505
505 /* Else check that the access is in bounds. TODO: restructure 506 /* Else check that the access is in bounds. TODO: restructure
506 expr_size/tree_expr_size/int_expr_size and just use the latter. */ 507 expr_size/tree_expr_size/int_expr_size and just use the latter. */
507 if (!decl) 508 if (!decl)
508 decl_size = -1; 509 decl_size = -1;
509 else if (DECL_P (decl) && DECL_SIZE_UNIT (decl)) 510 else if (DECL_P (decl) && DECL_SIZE_UNIT (decl))
510 decl_size = (tree_fits_shwi_p (DECL_SIZE_UNIT (decl)) 511 {
511 ? tree_to_shwi (DECL_SIZE_UNIT (decl)) 512 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &decl_size))
512 : -1); 513 decl_size = -1;
514 }
513 else if (TREE_CODE (decl) == STRING_CST) 515 else if (TREE_CODE (decl) == STRING_CST)
514 decl_size = TREE_STRING_LENGTH (decl); 516 decl_size = TREE_STRING_LENGTH (decl);
515 else if (TYPE_SIZE_UNIT (TREE_TYPE (decl))) 517 else if (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
516 decl_size = int_size_in_bytes (TREE_TYPE (decl)); 518 decl_size = int_size_in_bytes (TREE_TYPE (decl));
517 else 519 else
518 decl_size = -1; 520 decl_size = -1;
519 521
520 return (decl_size <= 0 ? offset != 0 : offset + size > decl_size); 522 return (!known_size_p (decl_size) || known_eq (decl_size, 0)
523 ? maybe_ne (offset, 0)
524 : maybe_gt (offset + size, decl_size));
521 } 525 }
522 526
523 return 0; 527 return 0;
524 528
525 case LABEL_REF: 529 case LABEL_REF:
532 || x == stack_pointer_rtx 536 || x == stack_pointer_rtx
533 /* The arg pointer varies if it is not a fixed register. */ 537 /* The arg pointer varies if it is not a fixed register. */
534 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])) 538 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
535 { 539 {
536 #ifdef RED_ZONE_SIZE 540 #ifdef RED_ZONE_SIZE
537 HOST_WIDE_INT red_zone_size = RED_ZONE_SIZE; 541 poly_int64 red_zone_size = RED_ZONE_SIZE;
538 #else 542 #else
539 HOST_WIDE_INT red_zone_size = 0; 543 poly_int64 red_zone_size = 0;
540 #endif 544 #endif
541 HOST_WIDE_INT stack_boundary = PREFERRED_STACK_BOUNDARY 545 poly_int64 stack_boundary = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
542 / BITS_PER_UNIT; 546 poly_int64 low_bound, high_bound;
543 HOST_WIDE_INT low_bound, high_bound; 547
544 548 if (!known_size_p (size))
545 if (size == 0)
546 size = GET_MODE_SIZE (mode);
547 if (size == 0)
548 return 1; 549 return 1;
549 550
550 if (x == frame_pointer_rtx) 551 if (x == frame_pointer_rtx)
551 { 552 {
552 if (FRAME_GROWS_DOWNWARD) 553 if (FRAME_GROWS_DOWNWARD)
560 high_bound = low_bound + get_frame_size (); 561 high_bound = low_bound + get_frame_size ();
561 } 562 }
562 } 563 }
563 else if (x == hard_frame_pointer_rtx) 564 else if (x == hard_frame_pointer_rtx)
564 { 565 {
565 HOST_WIDE_INT sp_offset 566 poly_int64 sp_offset
566 = get_initial_register_offset (STACK_POINTER_REGNUM, 567 = get_initial_register_offset (STACK_POINTER_REGNUM,
567 HARD_FRAME_POINTER_REGNUM); 568 HARD_FRAME_POINTER_REGNUM);
568 HOST_WIDE_INT ap_offset 569 poly_int64 ap_offset
569 = get_initial_register_offset (ARG_POINTER_REGNUM, 570 = get_initial_register_offset (ARG_POINTER_REGNUM,
570 HARD_FRAME_POINTER_REGNUM); 571 HARD_FRAME_POINTER_REGNUM);
571 572
572 #if STACK_GROWS_DOWNWARD 573 #if STACK_GROWS_DOWNWARD
573 low_bound = sp_offset - red_zone_size - stack_boundary; 574 low_bound = sp_offset - red_zone_size - stack_boundary;
587 - stack_boundary; 588 - stack_boundary;
588 #endif 589 #endif
589 } 590 }
590 else if (x == stack_pointer_rtx) 591 else if (x == stack_pointer_rtx)
591 { 592 {
592 HOST_WIDE_INT ap_offset 593 poly_int64 ap_offset
593 = get_initial_register_offset (ARG_POINTER_REGNUM, 594 = get_initial_register_offset (ARG_POINTER_REGNUM,
594 STACK_POINTER_REGNUM); 595 STACK_POINTER_REGNUM);
595 596
596 #if STACK_GROWS_DOWNWARD 597 #if STACK_GROWS_DOWNWARD
597 low_bound = - red_zone_size - stack_boundary; 598 low_bound = - red_zone_size - stack_boundary;
627 high_bound = FIRST_PARM_OFFSET (current_function_decl) 628 high_bound = FIRST_PARM_OFFSET (current_function_decl)
628 + crtl->args.size + stack_boundary; 629 + crtl->args.size + stack_boundary;
629 #endif 630 #endif
630 } 631 }
631 632
632 if (offset >= low_bound && offset <= high_bound - size) 633 if (known_ge (offset, low_bound)
634 && known_le (offset, high_bound - size))
633 return 0; 635 return 0;
634 return 1; 636 return 1;
635 } 637 }
636 /* All of the virtual frame registers are stack references. */ 638 /* All of the virtual frame registers are stack references. */
637 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER 639 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
647 /* An address is assumed not to trap if: 649 /* An address is assumed not to trap if:
648 - it is the pic register plus a const unspec without offset. */ 650 - it is the pic register plus a const unspec without offset. */
649 if (XEXP (x, 0) == pic_offset_table_rtx 651 if (XEXP (x, 0) == pic_offset_table_rtx
650 && GET_CODE (XEXP (x, 1)) == CONST 652 && GET_CODE (XEXP (x, 1)) == CONST
651 && GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC 653 && GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
652 && offset == 0) 654 && known_eq (offset, 0))
653 return 0; 655 return 0;
654 656
655 /* - or it is an address that can't trap plus a constant integer. */ 657 /* - or it is an address that can't trap plus a constant integer. */
656 if (CONST_INT_P (XEXP (x, 1)) 658 if (poly_int_rtx_p (XEXP (x, 1), &const_x1)
657 && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + INTVAL (XEXP (x, 1)), 659 && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + const_x1,
658 size, mode, unaligned_mems)) 660 size, mode, unaligned_mems))
659 return 0; 661 return 0;
660 662
661 return 1; 663 return 1;
662 664
684 /* Return nonzero if the use of X as an address in a MEM can cause a trap. */ 686 /* Return nonzero if the use of X as an address in a MEM can cause a trap. */
685 687
686 int 688 int
687 rtx_addr_can_trap_p (const_rtx x) 689 rtx_addr_can_trap_p (const_rtx x)
688 { 690 {
689 return rtx_addr_can_trap_p_1 (x, 0, 0, VOIDmode, false); 691 return rtx_addr_can_trap_p_1 (x, 0, -1, BLKmode, false);
690 } 692 }
691 693
692 /* Return true if X contains a MEM subrtx. */ 694 /* Return true if X contains a MEM subrtx. */
693 695
694 bool 696 bool
913 } 915 }
914 } 916 }
915 *base_out = x; 917 *base_out = x;
916 *offset_out = const0_rtx; 918 *offset_out = const0_rtx;
917 } 919 }
920
921 /* Express integer value X as some value Y plus a polynomial offset,
922 where Y is either const0_rtx, X or something within X (as opposed
923 to a new rtx). Return the Y and store the offset in *OFFSET_OUT. */
924
925 rtx
926 strip_offset (rtx x, poly_int64_pod *offset_out)
927 {
928 rtx base = const0_rtx;
929 rtx test = x;
930 if (GET_CODE (test) == CONST)
931 test = XEXP (test, 0);
932 if (GET_CODE (test) == PLUS)
933 {
934 base = XEXP (test, 0);
935 test = XEXP (test, 1);
936 }
937 if (poly_int_rtx_p (test, offset_out))
938 return base;
939 *offset_out = 0;
940 return x;
941 }
942
943 /* Return the argument size in REG_ARGS_SIZE note X. */
944
945 poly_int64
946 get_args_size (const_rtx x)
947 {
948 gcc_checking_assert (REG_NOTE_KIND (x) == REG_ARGS_SIZE);
949 return rtx_to_poly_int64 (XEXP (x, 0));
950 }
918 951
919 /* Return the number of places FIND appears within X. If COUNT_DEST is 952 /* Return the number of places FIND appears within X. If COUNT_DEST is
920 zero, we do not count occurrences inside the destination of a SET. */ 953 zero, we do not count occurrences inside the destination of a SET. */
921 954
922 int 955 int
1163 if (MEM_P (XEXP (body, 0))) 1196 if (MEM_P (XEXP (body, 0)))
1164 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0))) 1197 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
1165 return 1; 1198 return 1;
1166 return 0; 1199 return 0;
1167 1200
1201 case CLOBBER_HIGH:
1202 gcc_assert (REG_P (XEXP (body, 0)));
1203 return 0;
1204
1168 case COND_EXEC: 1205 case COND_EXEC:
1169 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body))) 1206 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
1170 return 1; 1207 return 1;
1171 return reg_referenced_p (x, COND_EXEC_CODE (body)); 1208 return reg_referenced_p (x, COND_EXEC_CODE (body));
1172 1209
1362 preserve the other half. */ 1399 preserve the other half. */
1363 1400
1364 bool 1401 bool
1365 read_modify_subreg_p (const_rtx x) 1402 read_modify_subreg_p (const_rtx x)
1366 { 1403 {
1367 unsigned int isize, osize;
1368 if (GET_CODE (x) != SUBREG) 1404 if (GET_CODE (x) != SUBREG)
1369 return false; 1405 return false;
1370 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))); 1406 poly_uint64 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
1371 osize = GET_MODE_SIZE (GET_MODE (x)); 1407 poly_uint64 osize = GET_MODE_SIZE (GET_MODE (x));
1372 return isize > osize 1408 poly_uint64 regsize = REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
1373 && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x))); 1409 /* The inner and outer modes of a subreg must be ordered, so that we
1410 can tell whether they're paradoxical or partial. */
1411 gcc_checking_assert (ordered_p (isize, osize));
1412 return (maybe_gt (isize, osize) && maybe_gt (isize, regsize));
1374 } 1413 }
1375 1414
1376 /* Helper function for set_of. */ 1415 /* Helper function for set_of. */
1377 struct set_of_data 1416 struct set_of_data
1378 { 1417 {
1383 static void 1422 static void
1384 set_of_1 (rtx x, const_rtx pat, void *data1) 1423 set_of_1 (rtx x, const_rtx pat, void *data1)
1385 { 1424 {
1386 struct set_of_data *const data = (struct set_of_data *) (data1); 1425 struct set_of_data *const data = (struct set_of_data *) (data1);
1387 if (rtx_equal_p (x, data->pat) 1426 if (rtx_equal_p (x, data->pat)
1388 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x))) 1427 || (GET_CODE (pat) == CLOBBER_HIGH
1428 && REGNO(data->pat) == REGNO(XEXP (pat, 0))
1429 && reg_is_clobbered_by_clobber_high (data->pat, XEXP (pat, 0)))
1430 || (GET_CODE (pat) != CLOBBER_HIGH && !MEM_P (x)
1431 && reg_overlap_mentioned_p (data->pat, x)))
1389 data->found = pat; 1432 data->found = pat;
1390 } 1433 }
1391 1434
1392 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT 1435 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1393 (either directly or via STRICT_LOW_PART and similar modifiers). */ 1436 (either directly or via STRICT_LOW_PART and similar modifiers). */
1472 rtx sub = XVECEXP (pat, 0, i); 1515 rtx sub = XVECEXP (pat, 0, i);
1473 switch (GET_CODE (sub)) 1516 switch (GET_CODE (sub))
1474 { 1517 {
1475 case USE: 1518 case USE:
1476 case CLOBBER: 1519 case CLOBBER:
1520 case CLOBBER_HIGH:
1477 break; 1521 break;
1478 1522
1479 case SET: 1523 case SET:
1480 /* We can consider insns having multiple sets, where all 1524 /* We can consider insns having multiple sets, where all
1481 but one are dead as single set insns. In common case 1525 but one are dead as single set insns. In common case
1562 if (GET_CODE (dst) == STRICT_LOW_PART) 1606 if (GET_CODE (dst) == STRICT_LOW_PART)
1563 dst = XEXP (dst, 0); 1607 dst = XEXP (dst, 0);
1564 1608
1565 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG) 1609 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1566 { 1610 {
1567 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst)) 1611 if (maybe_ne (SUBREG_BYTE (src), SUBREG_BYTE (dst)))
1568 return 0; 1612 return 0;
1569 src = SUBREG_REG (src); 1613 src = SUBREG_REG (src);
1570 dst = SUBREG_REG (dst); 1614 dst = SUBREG_REG (dst);
1571 } 1615 }
1572 1616
1578 && HARD_REGISTER_P (dst)) 1622 && HARD_REGISTER_P (dst))
1579 { 1623 {
1580 int i; 1624 int i;
1581 rtx par = XEXP (src, 1); 1625 rtx par = XEXP (src, 1);
1582 rtx src0 = XEXP (src, 0); 1626 rtx src0 = XEXP (src, 0);
1583 int c0 = INTVAL (XVECEXP (par, 0, 0)); 1627 poly_int64 c0 = rtx_to_poly_int64 (XVECEXP (par, 0, 0));
1584 HOST_WIDE_INT offset = GET_MODE_UNIT_SIZE (GET_MODE (src0)) * c0; 1628 poly_int64 offset = GET_MODE_UNIT_SIZE (GET_MODE (src0)) * c0;
1585 1629
1586 for (i = 1; i < XVECLEN (par, 0); i++) 1630 for (i = 1; i < XVECLEN (par, 0); i++)
1587 if (INTVAL (XVECEXP (par, 0, i)) != c0 + i) 1631 if (maybe_ne (rtx_to_poly_int64 (XVECEXP (par, 0, i)), c0 + i))
1588 return 0; 1632 return 0;
1589 return 1633 return
1590 simplify_subreg_regno (REGNO (src0), GET_MODE (src0), 1634 REG_CAN_CHANGE_MODE_P (REGNO (dst), GET_MODE (src0), GET_MODE (dst))
1591 offset, GET_MODE (dst)) == (int) REGNO (dst); 1635 && simplify_subreg_regno (REGNO (src0), GET_MODE (src0),
1636 offset, GET_MODE (dst)) == (int) REGNO (dst);
1592 } 1637 }
1593 1638
1594 return (REG_P (src) && REG_P (dst) 1639 return (REG_P (src) && REG_P (dst)
1595 && REGNO (src) == REGNO (dst)); 1640 && REGNO (src) == REGNO (dst));
1596 } 1641 }
1625 for (i = 0; i < XVECLEN (pat, 0); i++) 1670 for (i = 0; i < XVECLEN (pat, 0); i++)
1626 { 1671 {
1627 rtx tem = XVECEXP (pat, 0, i); 1672 rtx tem = XVECEXP (pat, 0, i);
1628 1673
1629 if (GET_CODE (tem) == USE 1674 if (GET_CODE (tem) == USE
1630 || GET_CODE (tem) == CLOBBER) 1675 || GET_CODE (tem) == CLOBBER
1676 || GET_CODE (tem) == CLOBBER_HIGH)
1631 continue; 1677 continue;
1632 1678
1633 if (GET_CODE (tem) != SET || ! set_noop_p (tem)) 1679 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1634 return 0; 1680 return 0;
1635 } 1681 }
1767 return 0; 1813 return 0;
1768 1814
1769 recurse: 1815 recurse:
1770 switch (GET_CODE (x)) 1816 switch (GET_CODE (x))
1771 { 1817 {
1818 case CLOBBER:
1772 case STRICT_LOW_PART: 1819 case STRICT_LOW_PART:
1773 case ZERO_EXTRACT: 1820 case ZERO_EXTRACT:
1774 case SIGN_EXTRACT: 1821 case SIGN_EXTRACT:
1775 /* Overly conservative. */ 1822 /* Overly conservative. */
1776 x = XEXP (x, 0); 1823 x = XEXP (x, 0);
1857 int i; 1904 int i;
1858 1905
1859 if (GET_CODE (x) == COND_EXEC) 1906 if (GET_CODE (x) == COND_EXEC)
1860 x = COND_EXEC_CODE (x); 1907 x = COND_EXEC_CODE (x);
1861 1908
1862 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER) 1909 if (GET_CODE (x) == SET
1910 || GET_CODE (x) == CLOBBER
1911 || GET_CODE (x) == CLOBBER_HIGH)
1863 { 1912 {
1864 rtx dest = SET_DEST (x); 1913 rtx dest = SET_DEST (x);
1865 1914
1866 while ((GET_CODE (dest) == SUBREG 1915 while ((GET_CODE (dest) == SUBREG
1867 && (!REG_P (SUBREG_REG (dest)) 1916 && (!REG_P (SUBREG_REG (dest))
2080 2129
2081 /* If a COND_EXEC is not executed, the value survives. */ 2130 /* If a COND_EXEC is not executed, the value survives. */
2082 if (GET_CODE (pattern) == COND_EXEC) 2131 if (GET_CODE (pattern) == COND_EXEC)
2083 return 0; 2132 return 0;
2084 2133
2085 if (GET_CODE (pattern) == SET) 2134 if (GET_CODE (pattern) == SET || GET_CODE (pattern) == CLOBBER)
2086 return covers_regno_p (SET_DEST (pattern), test_regno); 2135 return covers_regno_p (SET_DEST (pattern), test_regno);
2087 else if (GET_CODE (pattern) == PARALLEL) 2136 else if (GET_CODE (pattern) == PARALLEL)
2088 { 2137 {
2089 int i; 2138 int i;
2090 2139
2336 add_int_reg_note (rtx_insn *insn, enum reg_note kind, int datum) 2385 add_int_reg_note (rtx_insn *insn, enum reg_note kind, int datum)
2337 { 2386 {
2338 gcc_checking_assert (int_reg_note_p (kind)); 2387 gcc_checking_assert (int_reg_note_p (kind));
2339 REG_NOTES (insn) = gen_rtx_INT_LIST ((machine_mode) kind, 2388 REG_NOTES (insn) = gen_rtx_INT_LIST ((machine_mode) kind,
2340 datum, REG_NOTES (insn)); 2389 datum, REG_NOTES (insn));
2390 }
2391
2392 /* Add a REG_ARGS_SIZE note to INSN with value VALUE. */
2393
2394 void
2395 add_args_size_note (rtx_insn *insn, poly_int64 value)
2396 {
2397 gcc_checking_assert (!find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX));
2398 add_reg_note (insn, REG_ARGS_SIZE, gen_int_mode (value, Pmode));
2341 } 2399 }
2342 2400
2343 /* Add a register note like NOTE to INSN. */ 2401 /* Add a register note like NOTE to INSN. */
2344 2402
2345 void 2403 void
2772 reference; moving it out of context such as when moving code 2830 reference; moving it out of context such as when moving code
2773 when optimizing, might cause its address to become invalid. */ 2831 when optimizing, might cause its address to become invalid. */
2774 code_changed 2832 code_changed
2775 || !MEM_NOTRAP_P (x)) 2833 || !MEM_NOTRAP_P (x))
2776 { 2834 {
2777 HOST_WIDE_INT size = MEM_SIZE_KNOWN_P (x) ? MEM_SIZE (x) : 0; 2835 poly_int64 size = MEM_SIZE_KNOWN_P (x) ? MEM_SIZE (x) : -1;
2778 return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size, 2836 return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size,
2779 GET_MODE (x), code_changed); 2837 GET_MODE (x), code_changed);
2780 } 2838 }
2781 2839
2782 return 0; 2840 return 0;
3302 switch (GET_CODE (x)) 3360 switch (GET_CODE (x))
3303 { 3361 {
3304 case PRE_INC: 3362 case PRE_INC:
3305 case POST_INC: 3363 case POST_INC:
3306 { 3364 {
3307 int size = GET_MODE_SIZE (GET_MODE (mem)); 3365 poly_int64 size = GET_MODE_SIZE (GET_MODE (mem));
3308 rtx r1 = XEXP (x, 0); 3366 rtx r1 = XEXP (x, 0);
3309 rtx c = gen_int_mode (size, GET_MODE (r1)); 3367 rtx c = gen_int_mode (size, GET_MODE (r1));
3310 return fn (mem, x, r1, r1, c, data); 3368 return fn (mem, x, r1, r1, c, data);
3311 } 3369 }
3312 3370
3313 case PRE_DEC: 3371 case PRE_DEC:
3314 case POST_DEC: 3372 case POST_DEC:
3315 { 3373 {
3316 int size = GET_MODE_SIZE (GET_MODE (mem)); 3374 poly_int64 size = GET_MODE_SIZE (GET_MODE (mem));
3317 rtx r1 = XEXP (x, 0); 3375 rtx r1 = XEXP (x, 0);
3318 rtx c = gen_int_mode (-size, GET_MODE (r1)); 3376 rtx c = gen_int_mode (-size, GET_MODE (r1));
3319 return fn (mem, x, r1, r1, c, data); 3377 return fn (mem, x, r1, r1, c, data);
3320 } 3378 }
3321 3379
3404 { 3462 {
3405 enum rtx_code code = GET_CODE (op); 3463 enum rtx_code code = GET_CODE (op);
3406 3464
3407 /* Constants always become the second operand. Prefer "nice" constants. */ 3465 /* Constants always become the second operand. Prefer "nice" constants. */
3408 if (code == CONST_INT) 3466 if (code == CONST_INT)
3467 return -10;
3468 if (code == CONST_WIDE_INT)
3469 return -9;
3470 if (code == CONST_POLY_INT)
3409 return -8; 3471 return -8;
3410 if (code == CONST_WIDE_INT)
3411 return -7;
3412 if (code == CONST_DOUBLE) 3472 if (code == CONST_DOUBLE)
3413 return -7; 3473 return -8;
3414 if (code == CONST_FIXED) 3474 if (code == CONST_FIXED)
3415 return -7; 3475 return -8;
3416 op = avoid_constant_pool_reference (op); 3476 op = avoid_constant_pool_reference (op);
3417 code = GET_CODE (op); 3477 code = GET_CODE (op);
3418 3478
3419 switch (GET_RTX_CLASS (code)) 3479 switch (GET_RTX_CLASS (code))
3420 { 3480 {
3421 case RTX_CONST_OBJ: 3481 case RTX_CONST_OBJ:
3422 if (code == CONST_INT) 3482 if (code == CONST_INT)
3423 return -6; 3483 return -7;
3424 if (code == CONST_WIDE_INT) 3484 if (code == CONST_WIDE_INT)
3425 return -6; 3485 return -6;
3486 if (code == CONST_POLY_INT)
3487 return -5;
3426 if (code == CONST_DOUBLE) 3488 if (code == CONST_DOUBLE)
3427 return -5; 3489 return -5;
3428 if (code == CONST_FIXED) 3490 if (code == CONST_FIXED)
3429 return -5; 3491 return -5;
3430 return -4; 3492 return -4;
3431 3493
3432 case RTX_EXTRA: 3494 case RTX_EXTRA:
3433 /* SUBREGs of objects should come second. */ 3495 /* SUBREGs of objects should come second. */
3434 if (code == SUBREG && OBJECT_P (SUBREG_REG (op))) 3496 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
3529 3591
3530 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE, 3592 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3531 and SUBREG_BYTE, return the bit offset where the subreg begins 3593 and SUBREG_BYTE, return the bit offset where the subreg begins
3532 (counting from the least significant bit of the operand). */ 3594 (counting from the least significant bit of the operand). */
3533 3595
3534 unsigned int 3596 poly_uint64
3535 subreg_lsb_1 (machine_mode outer_mode, 3597 subreg_lsb_1 (machine_mode outer_mode,
3536 machine_mode inner_mode, 3598 machine_mode inner_mode,
3537 unsigned int subreg_byte) 3599 poly_uint64 subreg_byte)
3538 { 3600 {
3539 unsigned int bitpos; 3601 poly_uint64 subreg_end, trailing_bytes, byte_pos;
3540 unsigned int byte;
3541 unsigned int word;
3542 3602
3543 /* A paradoxical subreg begins at bit position 0. */ 3603 /* A paradoxical subreg begins at bit position 0. */
3544 if (paradoxical_subreg_p (outer_mode, inner_mode)) 3604 if (paradoxical_subreg_p (outer_mode, inner_mode))
3545 return 0; 3605 return 0;
3546 3606
3547 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN) 3607 subreg_end = subreg_byte + GET_MODE_SIZE (outer_mode);
3548 /* If the subreg crosses a word boundary ensure that 3608 trailing_bytes = GET_MODE_SIZE (inner_mode) - subreg_end;
3549 it also begins and ends on a word boundary. */ 3609 if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
3550 gcc_assert (!((subreg_byte % UNITS_PER_WORD 3610 byte_pos = trailing_bytes;
3551 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD 3611 else if (!WORDS_BIG_ENDIAN && !BYTES_BIG_ENDIAN)
3552 && (subreg_byte % UNITS_PER_WORD 3612 byte_pos = subreg_byte;
3553 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
3554
3555 if (WORDS_BIG_ENDIAN)
3556 word = (GET_MODE_SIZE (inner_mode)
3557 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3558 else 3613 else
3559 word = subreg_byte / UNITS_PER_WORD; 3614 {
3560 bitpos = word * BITS_PER_WORD; 3615 /* When bytes and words have opposite endianness, we must be able
3561 3616 to split offsets into words and bytes at compile time. */
3562 if (BYTES_BIG_ENDIAN) 3617 poly_uint64 leading_word_part
3563 byte = (GET_MODE_SIZE (inner_mode) 3618 = force_align_down (subreg_byte, UNITS_PER_WORD);
3564 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD; 3619 poly_uint64 trailing_word_part
3565 else 3620 = force_align_down (trailing_bytes, UNITS_PER_WORD);
3566 byte = subreg_byte % UNITS_PER_WORD; 3621 /* If the subreg crosses a word boundary ensure that
3567 bitpos += byte * BITS_PER_UNIT; 3622 it also begins and ends on a word boundary. */
3568 3623 gcc_assert (known_le (subreg_end - leading_word_part,
3569 return bitpos; 3624 (unsigned int) UNITS_PER_WORD)
3625 || (known_eq (leading_word_part, subreg_byte)
3626 && known_eq (trailing_word_part, trailing_bytes)));
3627 if (WORDS_BIG_ENDIAN)
3628 byte_pos = trailing_word_part + (subreg_byte - leading_word_part);
3629 else
3630 byte_pos = leading_word_part + (trailing_bytes - trailing_word_part);
3631 }
3632
3633 return byte_pos * BITS_PER_UNIT;
3570 } 3634 }
3571 3635
3572 /* Given a subreg X, return the bit offset where the subreg begins 3636 /* Given a subreg X, return the bit offset where the subreg begins
3573 (counting from the least significant bit of the reg). */ 3637 (counting from the least significant bit of the reg). */
3574 3638
3575 unsigned int 3639 poly_uint64
3576 subreg_lsb (const_rtx x) 3640 subreg_lsb (const_rtx x)
3577 { 3641 {
3578 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)), 3642 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3579 SUBREG_BYTE (x)); 3643 SUBREG_BYTE (x));
3580 } 3644 }
3583 OUTER_BYTES bytes, whose inner value has INNER_BYTES bytes, and where 3647 OUTER_BYTES bytes, whose inner value has INNER_BYTES bytes, and where
3584 there are LSB_SHIFT *bits* between the lsb of the outer value and the 3648 there are LSB_SHIFT *bits* between the lsb of the outer value and the
3585 lsb of the inner value. This is the inverse of the calculation 3649 lsb of the inner value. This is the inverse of the calculation
3586 performed by subreg_lsb_1 (which converts byte offsets to bit shifts). */ 3650 performed by subreg_lsb_1 (which converts byte offsets to bit shifts). */
3587 3651
3588 unsigned int 3652 poly_uint64
3589 subreg_size_offset_from_lsb (unsigned int outer_bytes, 3653 subreg_size_offset_from_lsb (poly_uint64 outer_bytes, poly_uint64 inner_bytes,
3590 unsigned int inner_bytes, 3654 poly_uint64 lsb_shift)
3591 unsigned int lsb_shift)
3592 { 3655 {
3593 /* A paradoxical subreg begins at bit position 0. */ 3656 /* A paradoxical subreg begins at bit position 0. */
3594 if (outer_bytes > inner_bytes) 3657 gcc_checking_assert (ordered_p (outer_bytes, inner_bytes));
3595 { 3658 if (maybe_gt (outer_bytes, inner_bytes))
3596 gcc_checking_assert (lsb_shift == 0); 3659 {
3660 gcc_checking_assert (known_eq (lsb_shift, 0U));
3597 return 0; 3661 return 0;
3598 } 3662 }
3599 3663
3600 gcc_assert (lsb_shift % BITS_PER_UNIT == 0); 3664 poly_uint64 lower_bytes = exact_div (lsb_shift, BITS_PER_UNIT);
3601 unsigned int lower_bytes = lsb_shift / BITS_PER_UNIT; 3665 poly_uint64 upper_bytes = inner_bytes - (lower_bytes + outer_bytes);
3602 unsigned int upper_bytes = inner_bytes - (lower_bytes + outer_bytes);
3603 if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN) 3666 if (WORDS_BIG_ENDIAN && BYTES_BIG_ENDIAN)
3604 return upper_bytes; 3667 return upper_bytes;
3605 else if (!WORDS_BIG_ENDIAN && !BYTES_BIG_ENDIAN) 3668 else if (!WORDS_BIG_ENDIAN && !BYTES_BIG_ENDIAN)
3606 return lower_bytes; 3669 return lower_bytes;
3607 else 3670 else
3608 { 3671 {
3609 unsigned int lower_word_part = lower_bytes & -UNITS_PER_WORD; 3672 /* When bytes and words have opposite endianness, we must be able
3610 unsigned int upper_word_part = upper_bytes & -UNITS_PER_WORD; 3673 to split offsets into words and bytes at compile time. */
3674 poly_uint64 lower_word_part = force_align_down (lower_bytes,
3675 UNITS_PER_WORD);
3676 poly_uint64 upper_word_part = force_align_down (upper_bytes,
3677 UNITS_PER_WORD);
3611 if (WORDS_BIG_ENDIAN) 3678 if (WORDS_BIG_ENDIAN)
3612 return upper_word_part + (lower_bytes - lower_word_part); 3679 return upper_word_part + (lower_bytes - lower_word_part);
3613 else 3680 else
3614 return lower_word_part + (upper_bytes - upper_word_part); 3681 return lower_word_part + (upper_bytes - upper_word_part);
3615 } 3682 }
3634 this particular subreg can be replaced by a simple (reg ...) should 3701 this particular subreg can be replaced by a simple (reg ...) should
3635 use simplify_subreg_regno. */ 3702 use simplify_subreg_regno. */
3636 3703
3637 void 3704 void
3638 subreg_get_info (unsigned int xregno, machine_mode xmode, 3705 subreg_get_info (unsigned int xregno, machine_mode xmode,
3639 unsigned int offset, machine_mode ymode, 3706 poly_uint64 offset, machine_mode ymode,
3640 struct subreg_info *info) 3707 struct subreg_info *info)
3641 { 3708 {
3642 unsigned int nregs_xmode, nregs_ymode; 3709 unsigned int nregs_xmode, nregs_ymode;
3643 3710
3644 gcc_assert (xregno < FIRST_PSEUDO_REGISTER); 3711 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3645 3712
3646 unsigned int xsize = GET_MODE_SIZE (xmode); 3713 poly_uint64 xsize = GET_MODE_SIZE (xmode);
3647 unsigned int ysize = GET_MODE_SIZE (ymode); 3714 poly_uint64 ysize = GET_MODE_SIZE (ymode);
3715
3648 bool rknown = false; 3716 bool rknown = false;
3649 3717
3650 /* If the register representation of a non-scalar mode has holes in it, 3718 /* If the register representation of a non-scalar mode has holes in it,
3651 we expect the scalar units to be concatenated together, with the holes 3719 we expect the scalar units to be concatenated together, with the holes
3652 distributed evenly among the scalar units. Each scalar unit must occupy 3720 distributed evenly among the scalar units. Each scalar unit must occupy
3653 at least one register. */ 3721 at least one register. */
3654 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)) 3722 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
3655 { 3723 {
3724 /* As a consequence, we must be dealing with a constant number of
3725 scalars, and thus a constant offset and number of units. */
3726 HOST_WIDE_INT coffset = offset.to_constant ();
3727 HOST_WIDE_INT cysize = ysize.to_constant ();
3656 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode); 3728 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3657 unsigned int nunits = GET_MODE_NUNITS (xmode); 3729 unsigned int nunits = GET_MODE_NUNITS (xmode).to_constant ();
3658 scalar_mode xmode_unit = GET_MODE_INNER (xmode); 3730 scalar_mode xmode_unit = GET_MODE_INNER (xmode);
3659 gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit)); 3731 gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3660 gcc_assert (nregs_xmode 3732 gcc_assert (nregs_xmode
3661 == (nunits 3733 == (nunits
3662 * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit))); 3734 * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3669 necessary.) An example of a value with holes is XCmode on 32-bit 3741 necessary.) An example of a value with holes is XCmode on 32-bit
3670 x86 with -m128bit-long-double; it's represented in 6 32-bit registers, 3742 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3671 3 for each part, but in memory it's two 128-bit parts. 3743 3 for each part, but in memory it's two 128-bit parts.
3672 Padding is assumed to be at the end (not necessarily the 'high part') 3744 Padding is assumed to be at the end (not necessarily the 'high part')
3673 of each unit. */ 3745 of each unit. */
3674 if ((offset / GET_MODE_SIZE (xmode_unit) + 1 < nunits) 3746 if ((coffset / GET_MODE_SIZE (xmode_unit) + 1 < nunits)
3675 && (offset / GET_MODE_SIZE (xmode_unit) 3747 && (coffset / GET_MODE_SIZE (xmode_unit)
3676 != ((offset + ysize - 1) / GET_MODE_SIZE (xmode_unit)))) 3748 != ((coffset + cysize - 1) / GET_MODE_SIZE (xmode_unit))))
3677 { 3749 {
3678 info->representable_p = false; 3750 info->representable_p = false;
3679 rknown = true; 3751 rknown = true;
3680 } 3752 }
3681 } 3753 }
3682 else 3754 else
3683 nregs_xmode = hard_regno_nregs (xregno, xmode); 3755 nregs_xmode = hard_regno_nregs (xregno, xmode);
3684 3756
3685 nregs_ymode = hard_regno_nregs (xregno, ymode); 3757 nregs_ymode = hard_regno_nregs (xregno, ymode);
3686 3758
3759 /* Subreg sizes must be ordered, so that we can tell whether they are
3760 partial, paradoxical or complete. */
3761 gcc_checking_assert (ordered_p (xsize, ysize));
3762
3687 /* Paradoxical subregs are otherwise valid. */ 3763 /* Paradoxical subregs are otherwise valid. */
3688 if (!rknown && offset == 0 && ysize > xsize) 3764 if (!rknown && known_eq (offset, 0U) && maybe_gt (ysize, xsize))
3689 { 3765 {
3690 info->representable_p = true; 3766 info->representable_p = true;
3691 /* If this is a big endian paradoxical subreg, which uses more 3767 /* If this is a big endian paradoxical subreg, which uses more
3692 actual hard registers than the original register, we must 3768 actual hard registers than the original register, we must
3693 return a negative offset so that we find the proper highpart 3769 return a negative offset so that we find the proper highpart
3705 return; 3781 return;
3706 } 3782 }
3707 3783
3708 /* If registers store different numbers of bits in the different 3784 /* If registers store different numbers of bits in the different
3709 modes, we cannot generally form this subreg. */ 3785 modes, we cannot generally form this subreg. */
3786 poly_uint64 regsize_xmode, regsize_ymode;
3710 if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode) 3787 if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
3711 && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode) 3788 && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
3712 && (xsize % nregs_xmode) == 0 3789 && multiple_p (xsize, nregs_xmode, &regsize_xmode)
3713 && (ysize % nregs_ymode) == 0) 3790 && multiple_p (ysize, nregs_ymode, &regsize_ymode))
3714 { 3791 {
3715 int regsize_xmode = xsize / nregs_xmode;
3716 int regsize_ymode = ysize / nregs_ymode;
3717 if (!rknown 3792 if (!rknown
3718 && ((nregs_ymode > 1 && regsize_xmode > regsize_ymode) 3793 && ((nregs_ymode > 1 && maybe_gt (regsize_xmode, regsize_ymode))
3719 || (nregs_xmode > 1 && regsize_ymode > regsize_xmode))) 3794 || (nregs_xmode > 1 && maybe_gt (regsize_ymode, regsize_xmode))))
3720 { 3795 {
3721 info->representable_p = false; 3796 info->representable_p = false;
3722 info->nregs = CEIL (ysize, regsize_xmode); 3797 if (!can_div_away_from_zero_p (ysize, regsize_xmode, &info->nregs)
3723 info->offset = offset / regsize_xmode; 3798 || !can_div_trunc_p (offset, regsize_xmode, &info->offset))
3799 /* Checked by validate_subreg. We must know at compile time
3800 which inner registers are being accessed. */
3801 gcc_unreachable ();
3724 return; 3802 return;
3725 } 3803 }
3726 /* It's not valid to extract a subreg of mode YMODE at OFFSET that 3804 /* It's not valid to extract a subreg of mode YMODE at OFFSET that
3727 would go outside of XMODE. */ 3805 would go outside of XMODE. */
3728 if (!rknown && ysize + offset > xsize) 3806 if (!rknown && maybe_gt (ysize + offset, xsize))
3729 { 3807 {
3730 info->representable_p = false; 3808 info->representable_p = false;
3731 info->nregs = nregs_ymode; 3809 info->nregs = nregs_ymode;
3732 info->offset = offset / regsize_xmode; 3810 if (!can_div_trunc_p (offset, regsize_xmode, &info->offset))
3811 /* Checked by validate_subreg. We must know at compile time
3812 which inner registers are being accessed. */
3813 gcc_unreachable ();
3733 return; 3814 return;
3734 } 3815 }
3735 /* Quick exit for the simple and common case of extracting whole 3816 /* Quick exit for the simple and common case of extracting whole
3736 subregisters from a multiregister value. */ 3817 subregisters from a multiregister value. */
3737 /* ??? It would be better to integrate this into the code below, 3818 /* ??? It would be better to integrate this into the code below,
3738 if we can generalize the concept enough and figure out how 3819 if we can generalize the concept enough and figure out how
3739 odd-sized modes can coexist with the other weird cases we support. */ 3820 odd-sized modes can coexist with the other weird cases we support. */
3821 HOST_WIDE_INT count;
3740 if (!rknown 3822 if (!rknown
3741 && WORDS_BIG_ENDIAN == REG_WORDS_BIG_ENDIAN 3823 && WORDS_BIG_ENDIAN == REG_WORDS_BIG_ENDIAN
3742 && regsize_xmode == regsize_ymode 3824 && known_eq (regsize_xmode, regsize_ymode)
3743 && (offset % regsize_ymode) == 0) 3825 && constant_multiple_p (offset, regsize_ymode, &count))
3744 { 3826 {
3745 info->representable_p = true; 3827 info->representable_p = true;
3746 info->nregs = nregs_ymode; 3828 info->nregs = nregs_ymode;
3747 info->offset = offset / regsize_ymode; 3829 info->offset = count;
3748 gcc_assert (info->offset + info->nregs <= (int) nregs_xmode); 3830 gcc_assert (info->offset + info->nregs <= (int) nregs_xmode);
3749 return; 3831 return;
3750 } 3832 }
3751 } 3833 }
3752 3834
3753 /* Lowpart subregs are otherwise valid. */ 3835 /* Lowpart subregs are otherwise valid. */
3754 if (!rknown && offset == subreg_lowpart_offset (ymode, xmode)) 3836 if (!rknown && known_eq (offset, subreg_lowpart_offset (ymode, xmode)))
3755 { 3837 {
3756 info->representable_p = true; 3838 info->representable_p = true;
3757 rknown = true; 3839 rknown = true;
3758 3840
3759 if (offset == 0 || nregs_xmode == nregs_ymode) 3841 if (known_eq (offset, 0U) || nregs_xmode == nregs_ymode)
3760 { 3842 {
3761 info->offset = 0; 3843 info->offset = 0;
3762 info->nregs = nregs_ymode; 3844 info->nregs = nregs_ymode;
3763 return; 3845 return;
3764 } 3846 }
3774 3856
3775 /* Calculate the number of bytes in each block. This must always 3857 /* Calculate the number of bytes in each block. This must always
3776 be exact, otherwise we don't know how to verify the constraint. 3858 be exact, otherwise we don't know how to verify the constraint.
3777 These conditions may be relaxed but subreg_regno_offset would 3859 These conditions may be relaxed but subreg_regno_offset would
3778 need to be redesigned. */ 3860 need to be redesigned. */
3779 gcc_assert ((xsize % num_blocks) == 0); 3861 poly_uint64 bytes_per_block = exact_div (xsize, num_blocks);
3780 unsigned int bytes_per_block = xsize / num_blocks;
3781 3862
3782 /* Get the number of the first block that contains the subreg and the byte 3863 /* Get the number of the first block that contains the subreg and the byte
3783 offset of the subreg from the start of that block. */ 3864 offset of the subreg from the start of that block. */
3784 unsigned int block_number = offset / bytes_per_block; 3865 unsigned int block_number;
3785 unsigned int subblock_offset = offset % bytes_per_block; 3866 poly_uint64 subblock_offset;
3867 if (!can_div_trunc_p (offset, bytes_per_block, &block_number,
3868 &subblock_offset))
3869 /* Checked by validate_subreg. We must know at compile time which
3870 inner registers are being accessed. */
3871 gcc_unreachable ();
3786 3872
3787 if (!rknown) 3873 if (!rknown)
3788 { 3874 {
3789 /* Only the lowpart of each block is representable. */ 3875 /* Only the lowpart of each block is representable. */
3790 info->representable_p 3876 info->representable_p
3791 = (subblock_offset 3877 = known_eq (subblock_offset,
3792 == subreg_size_lowpart_offset (ysize, bytes_per_block)); 3878 subreg_size_lowpart_offset (ysize, bytes_per_block));
3793 rknown = true; 3879 rknown = true;
3794 } 3880 }
3795 3881
3796 /* We assume that the ordering of registers within a multi-register 3882 /* We assume that the ordering of registers within a multi-register
3797 value has a consistent endianness: if bytes and register words 3883 value has a consistent endianness: if bytes and register words
3814 offset - The byte offset. 3900 offset - The byte offset.
3815 ymode - The mode of a top level SUBREG (or what may become one). 3901 ymode - The mode of a top level SUBREG (or what may become one).
3816 RETURN - The regno offset which would be used. */ 3902 RETURN - The regno offset which would be used. */
3817 unsigned int 3903 unsigned int
3818 subreg_regno_offset (unsigned int xregno, machine_mode xmode, 3904 subreg_regno_offset (unsigned int xregno, machine_mode xmode,
3819 unsigned int offset, machine_mode ymode) 3905 poly_uint64 offset, machine_mode ymode)
3820 { 3906 {
3821 struct subreg_info info; 3907 struct subreg_info info;
3822 subreg_get_info (xregno, xmode, offset, ymode, &info); 3908 subreg_get_info (xregno, xmode, offset, ymode, &info);
3823 return info.offset; 3909 return info.offset;
3824 } 3910 }
3830 offset - The byte offset. 3916 offset - The byte offset.
3831 ymode - The mode of a top level SUBREG (or what may become one). 3917 ymode - The mode of a top level SUBREG (or what may become one).
3832 RETURN - Whether the offset is representable. */ 3918 RETURN - Whether the offset is representable. */
3833 bool 3919 bool
3834 subreg_offset_representable_p (unsigned int xregno, machine_mode xmode, 3920 subreg_offset_representable_p (unsigned int xregno, machine_mode xmode,
3835 unsigned int offset, machine_mode ymode) 3921 poly_uint64 offset, machine_mode ymode)
3836 { 3922 {
3837 struct subreg_info info; 3923 struct subreg_info info;
3838 subreg_get_info (xregno, xmode, offset, ymode, &info); 3924 subreg_get_info (xregno, xmode, offset, ymode, &info);
3839 return info.representable_p; 3925 return info.representable_p;
3840 } 3926 }
3847 3933
3848 XREGNO is a hard register number. */ 3934 XREGNO is a hard register number. */
3849 3935
3850 int 3936 int
3851 simplify_subreg_regno (unsigned int xregno, machine_mode xmode, 3937 simplify_subreg_regno (unsigned int xregno, machine_mode xmode,
3852 unsigned int offset, machine_mode ymode) 3938 poly_uint64 offset, machine_mode ymode)
3853 { 3939 {
3854 struct subreg_info info; 3940 struct subreg_info info;
3855 unsigned int yregno; 3941 unsigned int yregno;
3856 3942
3857 /* Give the backend a chance to disallow the mode change. */ 3943 /* Give the backend a chance to disallow the mode change. */
4122 if (GET_MODE (x) != VOIDmode) 4208 if (GET_MODE (x) != VOIDmode)
4123 mode = GET_MODE (x); 4209 mode = GET_MODE (x);
4124 4210
4125 /* A size N times larger than UNITS_PER_WORD likely needs N times as 4211 /* A size N times larger than UNITS_PER_WORD likely needs N times as
4126 many insns, taking N times as long. */ 4212 many insns, taking N times as long. */
4127 factor = GET_MODE_SIZE (mode) / UNITS_PER_WORD; 4213 factor = estimated_poly_value (GET_MODE_SIZE (mode)) / UNITS_PER_WORD;
4128 if (factor == 0) 4214 if (factor == 0)
4129 factor = 1; 4215 factor = 1;
4130 4216
4131 /* Compute the default costs of certain things. 4217 /* Compute the default costs of certain things.
4132 Note that targetm.rtx_costs can override the defaults. */ 4218 Note that targetm.rtx_costs can override the defaults. */
4153 break; 4239 break;
4154 case SET: 4240 case SET:
4155 /* A SET doesn't have a mode, so let's look at the SET_DEST to get 4241 /* A SET doesn't have a mode, so let's look at the SET_DEST to get
4156 the mode for the factor. */ 4242 the mode for the factor. */
4157 mode = GET_MODE (SET_DEST (x)); 4243 mode = GET_MODE (SET_DEST (x));
4158 factor = GET_MODE_SIZE (mode) / UNITS_PER_WORD; 4244 factor = estimated_poly_value (GET_MODE_SIZE (mode)) / UNITS_PER_WORD;
4159 if (factor == 0) 4245 if (factor == 0)
4160 factor = 1; 4246 factor = 1;
4161 /* FALLTHRU */ 4247 /* FALLTHRU */
4162 default: 4248 default:
4163 total = factor * COSTS_N_INSNS (1); 4249 total = factor * COSTS_N_INSNS (1);
4357 machine_mode known_mode, 4443 machine_mode known_mode,
4358 unsigned HOST_WIDE_INT known_ret) 4444 unsigned HOST_WIDE_INT known_ret)
4359 { 4445 {
4360 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode); 4446 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
4361 unsigned HOST_WIDE_INT inner_nz; 4447 unsigned HOST_WIDE_INT inner_nz;
4362 enum rtx_code code; 4448 enum rtx_code code = GET_CODE (x);
4363 machine_mode inner_mode; 4449 machine_mode inner_mode;
4450 unsigned int inner_width;
4364 scalar_int_mode xmode; 4451 scalar_int_mode xmode;
4365 4452
4366 unsigned int mode_width = GET_MODE_PRECISION (mode); 4453 unsigned int mode_width = GET_MODE_PRECISION (mode);
4367 4454
4368 if (CONST_INT_P (x)) 4455 if (CONST_INT_P (x))
4392 /* Our only callers in this case look for single bit values. So 4479 /* Our only callers in this case look for single bit values. So
4393 just return the mode mask. Those tests will then be false. */ 4480 just return the mode mask. Those tests will then be false. */
4394 return nonzero; 4481 return nonzero;
4395 4482
4396 /* If MODE is wider than X, but both are a single word for both the host 4483 /* If MODE is wider than X, but both are a single word for both the host
4397 and target machines, we can compute this from which bits of the 4484 and target machines, we can compute this from which bits of the object
4398 object might be nonzero in its own mode, taking into account the fact 4485 might be nonzero in its own mode, taking into account the fact that, on
4399 that on many CISC machines, accessing an object in a wider mode 4486 CISC machines, accessing an object in a wider mode generally causes the
4400 causes the high-order bits to become undefined. So they are 4487 high-order bits to become undefined, so they are not known to be zero.
4401 not known to be zero. */ 4488 We extend this reasoning to RISC machines for rotate operations since the
4402 4489 semantics of the operations in the larger mode is not well defined. */
4403 if (!WORD_REGISTER_OPERATIONS 4490 if (mode_width > xmode_width
4404 && mode_width > xmode_width
4405 && xmode_width <= BITS_PER_WORD 4491 && xmode_width <= BITS_PER_WORD
4406 && xmode_width <= HOST_BITS_PER_WIDE_INT) 4492 && xmode_width <= HOST_BITS_PER_WIDE_INT
4493 && (!WORD_REGISTER_OPERATIONS || code == ROTATE || code == ROTATERT))
4407 { 4494 {
4408 nonzero &= cached_nonzero_bits (x, xmode, 4495 nonzero &= cached_nonzero_bits (x, xmode,
4409 known_x, known_mode, known_ret); 4496 known_x, known_mode, known_ret);
4410 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (xmode); 4497 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (xmode);
4411 return nonzero; 4498 return nonzero;
4412 } 4499 }
4413 4500
4414 /* Please keep nonzero_bits_binary_arith_p above in sync with 4501 /* Please keep nonzero_bits_binary_arith_p above in sync with
4415 the code in the switch below. */ 4502 the code in the switch below. */
4416 code = GET_CODE (x);
4417 switch (code) 4503 switch (code)
4418 { 4504 {
4419 case REG: 4505 case REG:
4420 #if defined(POINTERS_EXTEND_UNSIGNED) 4506 #if defined(POINTERS_EXTEND_UNSIGNED)
4421 /* If pointers extend unsigned and this is a pointer in Pmode, say that 4507 /* If pointers extend unsigned and this is a pointer in Pmode, say that
4446 #ifdef PUSH_ROUNDING 4532 #ifdef PUSH_ROUNDING
4447 /* If PUSH_ROUNDING is defined, it is possible for the 4533 /* If PUSH_ROUNDING is defined, it is possible for the
4448 stack to be momentarily aligned only to that amount, 4534 stack to be momentarily aligned only to that amount,
4449 so we pick the least alignment. */ 4535 so we pick the least alignment. */
4450 if (x == stack_pointer_rtx && PUSH_ARGS) 4536 if (x == stack_pointer_rtx && PUSH_ARGS)
4451 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1), 4537 {
4452 alignment); 4538 poly_uint64 rounded_1 = PUSH_ROUNDING (poly_int64 (1));
4539 alignment = MIN (known_alignment (rounded_1), alignment);
4540 }
4453 #endif 4541 #endif
4454 4542
4455 nonzero &= ~(alignment - 1); 4543 nonzero &= ~(alignment - 1);
4456 } 4544 }
4457 4545
4661 4749
4662 /* If the inner mode is a single word for both the host and target 4750 /* If the inner mode is a single word for both the host and target
4663 machines, we can compute this from which bits of the inner 4751 machines, we can compute this from which bits of the inner
4664 object might be nonzero. */ 4752 object might be nonzero. */
4665 inner_mode = GET_MODE (SUBREG_REG (x)); 4753 inner_mode = GET_MODE (SUBREG_REG (x));
4666 if (GET_MODE_PRECISION (inner_mode) <= BITS_PER_WORD 4754 if (GET_MODE_PRECISION (inner_mode).is_constant (&inner_width)
4667 && GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT) 4755 && inner_width <= BITS_PER_WORD
4756 && inner_width <= HOST_BITS_PER_WIDE_INT)
4668 { 4757 {
4669 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode, 4758 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
4670 known_x, known_mode, known_ret); 4759 known_x, known_mode, known_ret);
4671 4760
4672 /* On many CISC machines, accessing an object in a wider mode 4761 /* On many CISC machines, accessing an object in a wider mode
4678 about the way loads are extended. */ 4767 about the way loads are extended. */
4679 || ((extend_op = load_extend_op (inner_mode)) == SIGN_EXTEND 4768 || ((extend_op = load_extend_op (inner_mode)) == SIGN_EXTEND
4680 ? val_signbit_known_set_p (inner_mode, nonzero) 4769 ? val_signbit_known_set_p (inner_mode, nonzero)
4681 : extend_op != ZERO_EXTEND) 4770 : extend_op != ZERO_EXTEND)
4682 || (!MEM_P (SUBREG_REG (x)) && !REG_P (SUBREG_REG (x)))) 4771 || (!MEM_P (SUBREG_REG (x)) && !REG_P (SUBREG_REG (x))))
4683 && xmode_width > GET_MODE_PRECISION (inner_mode)) 4772 && xmode_width > inner_width)
4684 nonzero |= (GET_MODE_MASK (xmode) & ~GET_MODE_MASK (inner_mode)); 4773 nonzero
4774 |= (GET_MODE_MASK (GET_MODE (x)) & ~GET_MODE_MASK (inner_mode));
4685 } 4775 }
4686 break; 4776 break;
4687 4777
4778 case ASHIFT:
4688 case ASHIFTRT: 4779 case ASHIFTRT:
4689 case LSHIFTRT: 4780 case LSHIFTRT:
4690 case ASHIFT:
4691 case ROTATE: 4781 case ROTATE:
4782 case ROTATERT:
4692 /* The nonzero bits are in two classes: any bits within MODE 4783 /* The nonzero bits are in two classes: any bits within MODE
4693 that aren't in xmode are always significant. The rest of the 4784 that aren't in xmode are always significant. The rest of the
4694 nonzero bits are those that are significant in the operand of 4785 nonzero bits are those that are significant in the operand of
4695 the shift when shifted the appropriate number of bits. This 4786 the shift when shifted the appropriate number of bits. This
4696 shows that high-order bits are cleared by the right shift and 4787 shows that high-order bits are cleared by the right shift and
4709 unsigned HOST_WIDE_INT outer = 0; 4800 unsigned HOST_WIDE_INT outer = 0;
4710 4801
4711 if (mode_width > xmode_width) 4802 if (mode_width > xmode_width)
4712 outer = (op_nonzero & nonzero & ~mode_mask); 4803 outer = (op_nonzero & nonzero & ~mode_mask);
4713 4804
4714 if (code == LSHIFTRT) 4805 switch (code)
4715 inner >>= count;
4716 else if (code == ASHIFTRT)
4717 { 4806 {
4807 case ASHIFT:
4808 inner <<= count;
4809 break;
4810
4811 case LSHIFTRT:
4812 inner >>= count;
4813 break;
4814
4815 case ASHIFTRT:
4718 inner >>= count; 4816 inner >>= count;
4719 4817
4720 /* If the sign bit may have been nonzero before the shift, we 4818 /* If the sign bit may have been nonzero before the shift, we
4721 need to mark all the places it could have been copied to 4819 need to mark all the places it could have been copied to
4722 by the shift as possibly nonzero. */ 4820 by the shift as possibly nonzero. */
4723 if (inner & (HOST_WIDE_INT_1U << (xmode_width - 1 - count))) 4821 if (inner & (HOST_WIDE_INT_1U << (xmode_width - 1 - count)))
4724 inner |= (((HOST_WIDE_INT_1U << count) - 1) 4822 inner |= (((HOST_WIDE_INT_1U << count) - 1)
4725 << (xmode_width - count)); 4823 << (xmode_width - count));
4824 break;
4825
4826 case ROTATE:
4827 inner = (inner << (count % xmode_width)
4828 | (inner >> (xmode_width - (count % xmode_width))))
4829 & mode_mask;
4830 break;
4831
4832 case ROTATERT:
4833 inner = (inner >> (count % xmode_width)
4834 | (inner << (xmode_width - (count % xmode_width))))
4835 & mode_mask;
4836 break;
4837
4838 default:
4839 gcc_unreachable ();
4726 } 4840 }
4727 else if (code == ASHIFT)
4728 inner <<= count;
4729 else
4730 inner = ((inner << (count % xmode_width)
4731 | (inner >> (xmode_width - (count % xmode_width))))
4732 & mode_mask);
4733 4841
4734 nonzero &= (outer | inner); 4842 nonzero &= (outer | inner);
4735 } 4843 }
4736 break; 4844 break;
4737 4845
4915 5023
4916 if (bitwidth > xmode_width) 5024 if (bitwidth > xmode_width)
4917 { 5025 {
4918 /* If this machine does not do all register operations on the entire 5026 /* If this machine does not do all register operations on the entire
4919 register and MODE is wider than the mode of X, we can say nothing 5027 register and MODE is wider than the mode of X, we can say nothing
4920 at all about the high-order bits. */ 5028 at all about the high-order bits. We extend this reasoning to every
4921 if (!WORD_REGISTER_OPERATIONS) 5029 machine for rotate operations since the semantics of the operations
5030 in the larger mode is not well defined. */
5031 if (!WORD_REGISTER_OPERATIONS || code == ROTATE || code == ROTATERT)
4922 return 1; 5032 return 1;
4923 5033
4924 /* Likewise on machines that do, if the mode of the object is smaller 5034 /* Likewise on machines that do, if the mode of the object is smaller
4925 than a word and loads of that size don't sign extend, we can say 5035 than a word and loads of that size don't sign extend, we can say
4926 nothing about the high order bits. */ 5036 nothing about the high order bits. */
5005 to the stack. */ 5115 to the stack. */
5006 5116
5007 if (WORD_REGISTER_OPERATIONS 5117 if (WORD_REGISTER_OPERATIONS
5008 && load_extend_op (inner_mode) == SIGN_EXTEND 5118 && load_extend_op (inner_mode) == SIGN_EXTEND
5009 && paradoxical_subreg_p (x) 5119 && paradoxical_subreg_p (x)
5010 && (MEM_P (SUBREG_REG (x)) || REG_P (SUBREG_REG (x)))) 5120 && MEM_P (SUBREG_REG (x)))
5011 return cached_num_sign_bit_copies (SUBREG_REG (x), mode, 5121 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
5012 known_x, known_mode, known_ret); 5122 known_x, known_mode, known_ret);
5013 } 5123 }
5014 break; 5124 break;
5015 5125
5339 for (; seq; seq = NEXT_INSN (seq)) 5449 for (; seq; seq = NEXT_INSN (seq))
5340 { 5450 {
5341 set = single_set (seq); 5451 set = single_set (seq);
5342 if (set) 5452 if (set)
5343 cost += set_rtx_cost (set, speed); 5453 cost += set_rtx_cost (set, speed);
5344 else 5454 else if (NONDEBUG_INSN_P (seq))
5345 cost++; 5455 {
5456 int this_cost = insn_cost (CONST_CAST_RTX_INSN (seq), speed);
5457 if (this_cost > 0)
5458 cost += this_cost;
5459 else
5460 cost++;
5461 }
5346 } 5462 }
5347 5463
5348 return cost; 5464 return cost;
5349 } 5465 }
5350 5466
5621 5737
5622 /* Never return CC0; return zero instead. */ 5738 /* Never return CC0; return zero instead. */
5623 if (CC0_P (op0)) 5739 if (CC0_P (op0))
5624 return 0; 5740 return 0;
5625 5741
5626 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1); 5742 /* We promised to return a comparison. */
5743 rtx ret = gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
5744 if (COMPARISON_P (ret))
5745 return ret;
5746 return 0;
5627 } 5747 }
5628 5748
5629 /* Given a jump insn JUMP, return the condition that will cause it to branch 5749 /* Given a jump insn JUMP, return the condition that will cause it to branch
5630 to its JUMP_LABEL. If the condition cannot be understood, or is an 5750 to its JUMP_LABEL. If the condition cannot be understood, or is an
5631 inequality floating-point comparison which needs to be reversed, 0 will 5751 inequality floating-point comparison which needs to be reversed, 0 will
5994 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_BITFIELD_OPS) 6114 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_BITFIELD_OPS)
5995 { 6115 {
5996 machine_mode mode = GET_MODE (XEXP (x, 0)); 6116 machine_mode mode = GET_MODE (XEXP (x, 0));
5997 HOST_WIDE_INT len = INTVAL (XEXP (x, 1)); 6117 HOST_WIDE_INT len = INTVAL (XEXP (x, 1));
5998 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2)); 6118 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
5999 6119 poly_int64 remaining_bits = GET_MODE_PRECISION (mode) - len;
6000 return (pos == (BITS_BIG_ENDIAN ? GET_MODE_PRECISION (mode) - len : 0)); 6120
6121 return known_eq (pos, BITS_BIG_ENDIAN ? remaining_bits : 0);
6001 } 6122 }
6002 return false; 6123 return false;
6003 } 6124 }
6004 6125
6005 /* Strip outer address "mutations" from LOC and return a pointer to the 6126 /* Strip outer address "mutations" from LOC and return a pointer to the
6441 FOR_EACH_SUBRTX (iter, array, x, ALL) 6562 FOR_EACH_SUBRTX (iter, array, x, ALL)
6442 if (GET_CODE (*iter) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (*iter) != 0) 6563 if (GET_CODE (*iter) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (*iter) != 0)
6443 return true; 6564 return true;
6444 return false; 6565 return false;
6445 } 6566 }
6567
6568 /* Return true if reg REGNO with mode REG_MODE would be clobbered by the
6569 clobber_high operand in CLOBBER_HIGH_OP. */
6570
6571 bool
6572 reg_is_clobbered_by_clobber_high (unsigned int regno, machine_mode reg_mode,
6573 const_rtx clobber_high_op)
6574 {
6575 unsigned int clobber_regno = REGNO (clobber_high_op);
6576 machine_mode clobber_mode = GET_MODE (clobber_high_op);
6577 unsigned char regno_nregs = hard_regno_nregs (regno, reg_mode);
6578
6579 /* Clobber high should always span exactly one register. */
6580 gcc_assert (REG_NREGS (clobber_high_op) == 1);
6581
6582 /* Clobber high needs to match with one of the registers in X. */
6583 if (clobber_regno < regno || clobber_regno >= regno + regno_nregs)
6584 return false;
6585
6586 gcc_assert (reg_mode != BLKmode && clobber_mode != BLKmode);
6587
6588 if (reg_mode == VOIDmode)
6589 return clobber_mode != VOIDmode;
6590
6591 /* Clobber high will clobber if its size might be greater than the size of
6592 register regno. */
6593 return maybe_gt (exact_div (GET_MODE_SIZE (reg_mode), regno_nregs),
6594 GET_MODE_SIZE (clobber_mode));
6595 }