Mercurial > hg > CbC > CbC_gcc
comparison gcc/emit-rtl.c @ 131:84e7813d76e9
gcc-8.2
author | mir3636 |
---|---|
date | Thu, 25 Oct 2018 07:37:49 +0900 |
parents | 04ced10e8804 |
children | 1830386684a0 |
comparison
equal
deleted
inserted
replaced
111:04ced10e8804 | 131:84e7813d76e9 |
---|---|
1 /* Emit RTL for the GCC expander. | 1 /* Emit RTL for the GCC expander. |
2 Copyright (C) 1987-2017 Free Software Foundation, Inc. | 2 Copyright (C) 1987-2018 Free Software Foundation, Inc. |
3 | 3 |
4 This file is part of GCC. | 4 This file is part of GCC. |
5 | 5 |
6 GCC is free software; you can redistribute it and/or modify it under | 6 GCC is free software; you can redistribute it and/or modify it under |
7 the terms of the GNU General Public License as published by the Free | 7 the terms of the GNU General Public License as published by the Free |
58 #include "builtins.h" | 58 #include "builtins.h" |
59 #include "rtl-iter.h" | 59 #include "rtl-iter.h" |
60 #include "stor-layout.h" | 60 #include "stor-layout.h" |
61 #include "opts.h" | 61 #include "opts.h" |
62 #include "predict.h" | 62 #include "predict.h" |
63 #include "rtx-vector-builder.h" | |
63 | 64 |
64 struct target_rtl default_target_rtl; | 65 struct target_rtl default_target_rtl; |
65 #if SWITCHABLE_TARGET | 66 #if SWITCHABLE_TARGET |
66 struct target_rtl *this_target_rtl = &default_target_rtl; | 67 struct target_rtl *this_target_rtl = &default_target_rtl; |
67 #endif | 68 #endif |
145 static hashval_t hash (rtx x); | 146 static hashval_t hash (rtx x); |
146 static bool equal (rtx x, rtx y); | 147 static bool equal (rtx x, rtx y); |
147 }; | 148 }; |
148 | 149 |
149 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab; | 150 static GTY ((cache)) hash_table<const_wide_int_hasher> *const_wide_int_htab; |
151 | |
152 struct const_poly_int_hasher : ggc_cache_ptr_hash<rtx_def> | |
153 { | |
154 typedef std::pair<machine_mode, poly_wide_int_ref> compare_type; | |
155 | |
156 static hashval_t hash (rtx x); | |
157 static bool equal (rtx x, const compare_type &y); | |
158 }; | |
159 | |
160 static GTY ((cache)) hash_table<const_poly_int_hasher> *const_poly_int_htab; | |
150 | 161 |
151 /* A hash table storing register attribute structures. */ | 162 /* A hash table storing register attribute structures. */ |
152 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs> | 163 struct reg_attr_hasher : ggc_cache_ptr_hash<reg_attrs> |
153 { | 164 { |
154 static hashval_t hash (reg_attrs *x); | 165 static hashval_t hash (reg_attrs *x); |
184 #if TARGET_SUPPORTS_WIDE_INT | 195 #if TARGET_SUPPORTS_WIDE_INT |
185 static rtx lookup_const_wide_int (rtx); | 196 static rtx lookup_const_wide_int (rtx); |
186 #endif | 197 #endif |
187 static rtx lookup_const_double (rtx); | 198 static rtx lookup_const_double (rtx); |
188 static rtx lookup_const_fixed (rtx); | 199 static rtx lookup_const_fixed (rtx); |
189 static reg_attrs *get_reg_attrs (tree, int); | |
190 static rtx gen_const_vector (machine_mode, int); | 200 static rtx gen_const_vector (machine_mode, int); |
191 static void copy_rtx_if_shared_1 (rtx *orig); | 201 static void copy_rtx_if_shared_1 (rtx *orig); |
192 | 202 |
193 /* Probability of the conditional branch currently proceeded by try_split. */ | 203 /* Probability of the conditional branch currently proceeded by try_split. */ |
194 profile_probability split_branch_probability; | 204 profile_probability split_branch_probability; |
245 return false; | 255 return false; |
246 | 256 |
247 return true; | 257 return true; |
248 } | 258 } |
249 #endif | 259 #endif |
260 | |
261 /* Returns a hash code for CONST_POLY_INT X. */ | |
262 | |
263 hashval_t | |
264 const_poly_int_hasher::hash (rtx x) | |
265 { | |
266 inchash::hash h; | |
267 h.add_int (GET_MODE (x)); | |
268 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) | |
269 h.add_wide_int (CONST_POLY_INT_COEFFS (x)[i]); | |
270 return h.end (); | |
271 } | |
272 | |
273 /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y. */ | |
274 | |
275 bool | |
276 const_poly_int_hasher::equal (rtx x, const compare_type &y) | |
277 { | |
278 if (GET_MODE (x) != y.first) | |
279 return false; | |
280 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) | |
281 if (CONST_POLY_INT_COEFFS (x)[i] != y.second.coeffs[i]) | |
282 return false; | |
283 return true; | |
284 } | |
250 | 285 |
251 /* Returns a hash code for X (which is really a CONST_DOUBLE). */ | 286 /* Returns a hash code for X (which is really a CONST_DOUBLE). */ |
252 hashval_t | 287 hashval_t |
253 const_double_hasher::hash (rtx x) | 288 const_double_hasher::hash (rtx x) |
254 { | 289 { |
319 return true; | 354 return true; |
320 if (!p || !q) | 355 if (!p || !q) |
321 return false; | 356 return false; |
322 return (p->alias == q->alias | 357 return (p->alias == q->alias |
323 && p->offset_known_p == q->offset_known_p | 358 && p->offset_known_p == q->offset_known_p |
324 && (!p->offset_known_p || p->offset == q->offset) | 359 && (!p->offset_known_p || known_eq (p->offset, q->offset)) |
325 && p->size_known_p == q->size_known_p | 360 && p->size_known_p == q->size_known_p |
326 && (!p->size_known_p || p->size == q->size) | 361 && (!p->size_known_p || known_eq (p->size, q->size)) |
327 && p->align == q->align | 362 && p->align == q->align |
328 && p->addrspace == q->addrspace | 363 && p->addrspace == q->addrspace |
329 && (p->expr == q->expr | 364 && (p->expr == q->expr |
330 || (p->expr != NULL_TREE && q->expr != NULL_TREE | 365 || (p->expr != NULL_TREE && q->expr != NULL_TREE |
331 && operand_equal_p (p->expr, q->expr, 0)))); | 366 && operand_equal_p (p->expr, q->expr, 0)))); |
356 hashval_t | 391 hashval_t |
357 reg_attr_hasher::hash (reg_attrs *x) | 392 reg_attr_hasher::hash (reg_attrs *x) |
358 { | 393 { |
359 const reg_attrs *const p = x; | 394 const reg_attrs *const p = x; |
360 | 395 |
361 return ((p->offset * 1000) ^ (intptr_t) p->decl); | 396 inchash::hash h; |
397 h.add_ptr (p->decl); | |
398 h.add_poly_hwi (p->offset); | |
399 return h.end (); | |
362 } | 400 } |
363 | 401 |
364 /* Returns nonzero if the value represented by X is the same as that given by | 402 /* Returns nonzero if the value represented by X is the same as that given by |
365 Y. */ | 403 Y. */ |
366 | 404 |
368 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y) | 406 reg_attr_hasher::equal (reg_attrs *x, reg_attrs *y) |
369 { | 407 { |
370 const reg_attrs *const p = x; | 408 const reg_attrs *const p = x; |
371 const reg_attrs *const q = y; | 409 const reg_attrs *const q = y; |
372 | 410 |
373 return (p->decl == q->decl && p->offset == q->offset); | 411 return (p->decl == q->decl && known_eq (p->offset, q->offset)); |
374 } | 412 } |
375 /* Allocate a new reg_attrs structure and insert it into the hash table if | 413 /* Allocate a new reg_attrs structure and insert it into the hash table if |
376 one identical to it is not already in the table. We are doing this for | 414 one identical to it is not already in the table. We are doing this for |
377 MEM of mode MODE. */ | 415 MEM of mode MODE. */ |
378 | 416 |
379 static reg_attrs * | 417 static reg_attrs * |
380 get_reg_attrs (tree decl, int offset) | 418 get_reg_attrs (tree decl, poly_int64 offset) |
381 { | 419 { |
382 reg_attrs attrs; | 420 reg_attrs attrs; |
383 | 421 |
384 /* If everything is the default, we can just return zero. */ | 422 /* If everything is the default, we can just return zero. */ |
385 if (decl == 0 && offset == 0) | 423 if (decl == 0 && known_eq (offset, 0)) |
386 return 0; | 424 return 0; |
387 | 425 |
388 attrs.decl = decl; | 426 attrs.decl = decl; |
389 attrs.offset = offset; | 427 attrs.offset = offset; |
390 | 428 |
487 | 525 |
488 return *slot; | 526 return *slot; |
489 } | 527 } |
490 | 528 |
491 rtx | 529 rtx |
492 gen_int_mode (HOST_WIDE_INT c, machine_mode mode) | 530 gen_int_mode (poly_int64 c, machine_mode mode) |
493 { | 531 { |
494 return GEN_INT (trunc_int_for_mode (c, mode)); | 532 c = trunc_int_for_mode (c, mode); |
533 if (c.is_constant ()) | |
534 return GEN_INT (c.coeffs[0]); | |
535 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode)); | |
536 return immed_wide_int_const (poly_wide_int::from (c, prec, SIGNED), mode); | |
495 } | 537 } |
496 | 538 |
497 /* CONST_DOUBLEs might be created from pairs of integers, or from | 539 /* CONST_DOUBLEs might be created from pairs of integers, or from |
498 REAL_VALUE_TYPEs. Also, their length is known only at run time, | 540 REAL_VALUE_TYPEs. Also, their length is known only at run time, |
499 so we cannot use gen_rtx_raw_CONST_DOUBLE. */ | 541 so we cannot use gen_rtx_raw_CONST_DOUBLE. */ |
593 /* Return an rtx constant for V, given that the constant has mode MODE. | 635 /* Return an rtx constant for V, given that the constant has mode MODE. |
594 The returned rtx will be a CONST_INT if V fits, otherwise it will be | 636 The returned rtx will be a CONST_INT if V fits, otherwise it will be |
595 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT | 637 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT |
596 (if TARGET_SUPPORTS_WIDE_INT). */ | 638 (if TARGET_SUPPORTS_WIDE_INT). */ |
597 | 639 |
598 rtx | 640 static rtx |
599 immed_wide_int_const (const wide_int_ref &v, machine_mode mode) | 641 immed_wide_int_const_1 (const wide_int_ref &v, machine_mode mode) |
600 { | 642 { |
601 unsigned int len = v.get_len (); | 643 unsigned int len = v.get_len (); |
602 /* Not scalar_int_mode because we also allow pointer bound modes. */ | 644 /* Not scalar_int_mode because we also allow pointer bound modes. */ |
603 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode)); | 645 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode)); |
604 | 646 |
680 XWINT (value, i) = 0; | 722 XWINT (value, i) = 0; |
681 | 723 |
682 return lookup_const_double (value); | 724 return lookup_const_double (value); |
683 } | 725 } |
684 #endif | 726 #endif |
727 | |
728 /* Return an rtx representation of C in mode MODE. */ | |
729 | |
730 rtx | |
731 immed_wide_int_const (const poly_wide_int_ref &c, machine_mode mode) | |
732 { | |
733 if (c.is_constant ()) | |
734 return immed_wide_int_const_1 (c.coeffs[0], mode); | |
735 | |
736 /* Not scalar_int_mode because we also allow pointer bound modes. */ | |
737 unsigned int prec = GET_MODE_PRECISION (as_a <scalar_mode> (mode)); | |
738 | |
739 /* Allow truncation but not extension since we do not know if the | |
740 number is signed or unsigned. */ | |
741 gcc_assert (prec <= c.coeffs[0].get_precision ()); | |
742 poly_wide_int newc = poly_wide_int::from (c, prec, SIGNED); | |
743 | |
744 /* See whether we already have an rtx for this constant. */ | |
745 inchash::hash h; | |
746 h.add_int (mode); | |
747 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) | |
748 h.add_wide_int (newc.coeffs[i]); | |
749 const_poly_int_hasher::compare_type typed_value (mode, newc); | |
750 rtx *slot = const_poly_int_htab->find_slot_with_hash (typed_value, | |
751 h.end (), INSERT); | |
752 rtx x = *slot; | |
753 if (x) | |
754 return x; | |
755 | |
756 /* Create a new rtx. There's a choice to be made here between installing | |
757 the actual mode of the rtx or leaving it as VOIDmode (for consistency | |
758 with CONST_INT). In practice the handling of the codes is different | |
759 enough that we get no benefit from using VOIDmode, and various places | |
760 assume that VOIDmode implies CONST_INT. Using the real mode seems like | |
761 the right long-term direction anyway. */ | |
762 typedef trailing_wide_ints<NUM_POLY_INT_COEFFS> twi; | |
763 size_t extra_size = twi::extra_size (prec); | |
764 x = rtx_alloc_v (CONST_POLY_INT, | |
765 sizeof (struct const_poly_int_def) + extra_size); | |
766 PUT_MODE (x, mode); | |
767 CONST_POLY_INT_COEFFS (x).set_precision (prec); | |
768 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i) | |
769 CONST_POLY_INT_COEFFS (x)[i] = newc.coeffs[i]; | |
770 | |
771 *slot = x; | |
772 return x; | |
773 } | |
685 | 774 |
686 rtx | 775 rtx |
687 gen_rtx_REG (machine_mode mode, unsigned int regno) | 776 gen_rtx_REG (machine_mode mode, unsigned int regno) |
688 { | 777 { |
689 /* In case the MD file explicitly references the frame pointer, have | 778 /* In case the MD file explicitly references the frame pointer, have |
801 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if | 890 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if |
802 this construct would be valid, and false otherwise. */ | 891 this construct would be valid, and false otherwise. */ |
803 | 892 |
804 bool | 893 bool |
805 validate_subreg (machine_mode omode, machine_mode imode, | 894 validate_subreg (machine_mode omode, machine_mode imode, |
806 const_rtx reg, unsigned int offset) | 895 const_rtx reg, poly_uint64 offset) |
807 { | 896 { |
808 unsigned int isize = GET_MODE_SIZE (imode); | 897 poly_uint64 isize = GET_MODE_SIZE (imode); |
809 unsigned int osize = GET_MODE_SIZE (omode); | 898 poly_uint64 osize = GET_MODE_SIZE (omode); |
899 | |
900 /* The sizes must be ordered, so that we know whether the subreg | |
901 is partial, paradoxical or complete. */ | |
902 if (!ordered_p (isize, osize)) | |
903 return false; | |
810 | 904 |
811 /* All subregs must be aligned. */ | 905 /* All subregs must be aligned. */ |
812 if (offset % osize != 0) | 906 if (!multiple_p (offset, osize)) |
813 return false; | 907 return false; |
814 | 908 |
815 /* The subreg offset cannot be outside the inner object. */ | 909 /* The subreg offset cannot be outside the inner object. */ |
816 if (offset >= isize) | 910 if (maybe_ge (offset, isize)) |
817 return false; | 911 return false; |
912 | |
913 poly_uint64 regsize = REGMODE_NATURAL_SIZE (imode); | |
818 | 914 |
819 /* ??? This should not be here. Temporarily continue to allow word_mode | 915 /* ??? This should not be here. Temporarily continue to allow word_mode |
820 subregs of anything. The most common offender is (subreg:SI (reg:DF)). | 916 subregs of anything. The most common offender is (subreg:SI (reg:DF)). |
821 Generally, backends are doing something sketchy but it'll take time to | 917 Generally, backends are doing something sketchy but it'll take time to |
822 fix them all. */ | 918 fix them all. */ |
823 if (omode == word_mode) | 919 if (omode == word_mode) |
824 ; | 920 ; |
825 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field | 921 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field |
826 is the culprit here, and not the backends. */ | 922 is the culprit here, and not the backends. */ |
827 else if (osize >= UNITS_PER_WORD && isize >= osize) | 923 else if (known_ge (osize, regsize) && known_ge (isize, osize)) |
828 ; | 924 ; |
829 /* Allow component subregs of complex and vector. Though given the below | 925 /* Allow component subregs of complex and vector. Though given the below |
830 extraction rules, it's not always clear what that means. */ | 926 extraction rules, it's not always clear what that means. */ |
831 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode)) | 927 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode)) |
832 && GET_MODE_INNER (imode) == omode) | 928 && GET_MODE_INNER (imode) == omode) |
841 /* Subregs involving floating point modes are not allowed to | 937 /* Subregs involving floating point modes are not allowed to |
842 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but | 938 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but |
843 (subreg:SI (reg:DF) 0) isn't. */ | 939 (subreg:SI (reg:DF) 0) isn't. */ |
844 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode)) | 940 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode)) |
845 { | 941 { |
846 if (! (isize == osize | 942 if (! (known_eq (isize, osize) |
847 /* LRA can use subreg to store a floating point value in | 943 /* LRA can use subreg to store a floating point value in |
848 an integer mode. Although the floating point and the | 944 an integer mode. Although the floating point and the |
849 integer modes need the same number of hard registers, | 945 integer modes need the same number of hard registers, |
850 the size of floating point mode can be less than the | 946 the size of floating point mode can be less than the |
851 integer mode. LRA also uses subregs for a register | 947 integer mode. LRA also uses subregs for a register |
853 || lra_in_progress)) | 949 || lra_in_progress)) |
854 return false; | 950 return false; |
855 } | 951 } |
856 | 952 |
857 /* Paradoxical subregs must have offset zero. */ | 953 /* Paradoxical subregs must have offset zero. */ |
858 if (osize > isize) | 954 if (maybe_gt (osize, isize)) |
859 return offset == 0; | 955 return known_eq (offset, 0U); |
860 | 956 |
861 /* This is a normal subreg. Verify that the offset is representable. */ | 957 /* This is a normal subreg. Verify that the offset is representable. */ |
862 | 958 |
863 /* For hard registers, we already have most of these rules collected in | 959 /* For hard registers, we already have most of these rules collected in |
864 subreg_offset_representable_p. */ | 960 subreg_offset_representable_p. */ |
873 return false; | 969 return false; |
874 | 970 |
875 return subreg_offset_representable_p (regno, imode, offset, omode); | 971 return subreg_offset_representable_p (regno, imode, offset, omode); |
876 } | 972 } |
877 | 973 |
974 /* The outer size must be ordered wrt the register size, otherwise | |
975 we wouldn't know at compile time how many registers the outer | |
976 mode occupies. */ | |
977 if (!ordered_p (osize, regsize)) | |
978 return false; | |
979 | |
878 /* For pseudo registers, we want most of the same checks. Namely: | 980 /* For pseudo registers, we want most of the same checks. Namely: |
879 If the register no larger than a word, the subreg must be lowpart. | 981 |
880 If the register is larger than a word, the subreg must be the lowpart | 982 Assume that the pseudo register will be allocated to hard registers |
881 of a subword. A subreg does *not* perform arbitrary bit extraction. | 983 that can hold REGSIZE bytes each. If OSIZE is not a multiple of REGSIZE, |
882 Given that we've already checked mode/offset alignment, we only have | 984 the remainder must correspond to the lowpart of the containing hard |
883 to check subword subregs here. */ | 985 register. If BYTES_BIG_ENDIAN, the lowpart is at the highest offset, |
884 if (osize < UNITS_PER_WORD | 986 otherwise it is at the lowest offset. |
987 | |
988 Given that we've already checked the mode and offset alignment, | |
989 we only have to check subblock subregs here. */ | |
990 if (maybe_lt (osize, regsize) | |
885 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode)))) | 991 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode)))) |
886 { | 992 { |
887 machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode; | 993 /* It is invalid for the target to pick a register size for a mode |
888 unsigned int low_off = subreg_lowpart_offset (omode, wmode); | 994 that isn't ordered wrt to the size of that mode. */ |
889 if (offset % UNITS_PER_WORD != low_off) | 995 poly_uint64 block_size = ordered_min (isize, regsize); |
996 unsigned int start_reg; | |
997 poly_uint64 offset_within_reg; | |
998 if (!can_div_trunc_p (offset, block_size, &start_reg, &offset_within_reg) | |
999 || (BYTES_BIG_ENDIAN | |
1000 ? maybe_ne (offset_within_reg, block_size - osize) | |
1001 : maybe_ne (offset_within_reg, 0U))) | |
890 return false; | 1002 return false; |
891 } | 1003 } |
892 return true; | 1004 return true; |
893 } | 1005 } |
894 | 1006 |
895 rtx | 1007 rtx |
896 gen_rtx_SUBREG (machine_mode mode, rtx reg, int offset) | 1008 gen_rtx_SUBREG (machine_mode mode, rtx reg, poly_uint64 offset) |
897 { | 1009 { |
898 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset)); | 1010 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset)); |
899 return gen_rtx_raw_SUBREG (mode, reg, offset); | 1011 return gen_rtx_raw_SUBREG (mode, reg, offset); |
900 } | 1012 } |
901 | 1013 |
992 in-memory value and the start of an INNER_MODE in-memory value, | 1104 in-memory value and the start of an INNER_MODE in-memory value, |
993 given that the former is a lowpart of the latter. It may be a | 1105 given that the former is a lowpart of the latter. It may be a |
994 paradoxical lowpart, in which case the offset will be negative | 1106 paradoxical lowpart, in which case the offset will be negative |
995 on big-endian targets. */ | 1107 on big-endian targets. */ |
996 | 1108 |
997 int | 1109 poly_int64 |
998 byte_lowpart_offset (machine_mode outer_mode, | 1110 byte_lowpart_offset (machine_mode outer_mode, |
999 machine_mode inner_mode) | 1111 machine_mode inner_mode) |
1000 { | 1112 { |
1001 if (paradoxical_subreg_p (outer_mode, inner_mode)) | 1113 if (paradoxical_subreg_p (outer_mode, inner_mode)) |
1002 return -subreg_lowpart_offset (inner_mode, outer_mode); | 1114 return -subreg_lowpart_offset (inner_mode, outer_mode); |
1006 | 1118 |
1007 /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET) | 1119 /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET) |
1008 from address X. For paradoxical big-endian subregs this is a | 1120 from address X. For paradoxical big-endian subregs this is a |
1009 negative value, otherwise it's the same as OFFSET. */ | 1121 negative value, otherwise it's the same as OFFSET. */ |
1010 | 1122 |
1011 int | 1123 poly_int64 |
1012 subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode, | 1124 subreg_memory_offset (machine_mode outer_mode, machine_mode inner_mode, |
1013 unsigned int offset) | 1125 poly_uint64 offset) |
1014 { | 1126 { |
1015 if (paradoxical_subreg_p (outer_mode, inner_mode)) | 1127 if (paradoxical_subreg_p (outer_mode, inner_mode)) |
1016 { | 1128 { |
1017 gcc_assert (offset == 0); | 1129 gcc_assert (known_eq (offset, 0U)); |
1018 return -subreg_lowpart_offset (inner_mode, outer_mode); | 1130 return -subreg_lowpart_offset (inner_mode, outer_mode); |
1019 } | 1131 } |
1020 return offset; | 1132 return offset; |
1021 } | 1133 } |
1022 | 1134 |
1023 /* As above, but return the offset that existing subreg X would have | 1135 /* As above, but return the offset that existing subreg X would have |
1024 if SUBREG_REG (X) were stored in memory. The only significant thing | 1136 if SUBREG_REG (X) were stored in memory. The only significant thing |
1025 about the current SUBREG_REG is its mode. */ | 1137 about the current SUBREG_REG is its mode. */ |
1026 | 1138 |
1027 int | 1139 poly_int64 |
1028 subreg_memory_offset (const_rtx x) | 1140 subreg_memory_offset (const_rtx x) |
1029 { | 1141 { |
1030 return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)), | 1142 return subreg_memory_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)), |
1031 SUBREG_BYTE (x)); | 1143 SUBREG_BYTE (x)); |
1032 } | 1144 } |
1122 | 1234 |
1123 /* Update NEW with the same attributes as REG, but with OFFSET added | 1235 /* Update NEW with the same attributes as REG, but with OFFSET added |
1124 to the REG_OFFSET. */ | 1236 to the REG_OFFSET. */ |
1125 | 1237 |
1126 static void | 1238 static void |
1127 update_reg_offset (rtx new_rtx, rtx reg, int offset) | 1239 update_reg_offset (rtx new_rtx, rtx reg, poly_int64 offset) |
1128 { | 1240 { |
1129 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg), | 1241 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg), |
1130 REG_OFFSET (reg) + offset); | 1242 REG_OFFSET (reg) + offset); |
1131 } | 1243 } |
1132 | 1244 |
1133 /* Generate a register with same attributes as REG, but with OFFSET | 1245 /* Generate a register with same attributes as REG, but with OFFSET |
1134 added to the REG_OFFSET. */ | 1246 added to the REG_OFFSET. */ |
1135 | 1247 |
1136 rtx | 1248 rtx |
1137 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno, | 1249 gen_rtx_REG_offset (rtx reg, machine_mode mode, unsigned int regno, |
1138 int offset) | 1250 poly_int64 offset) |
1139 { | 1251 { |
1140 rtx new_rtx = gen_rtx_REG (mode, regno); | 1252 rtx new_rtx = gen_rtx_REG (mode, regno); |
1141 | 1253 |
1142 update_reg_offset (new_rtx, reg, offset); | 1254 update_reg_offset (new_rtx, reg, offset); |
1143 return new_rtx; | 1255 return new_rtx; |
1169 have different modes, REG is a (possibly paradoxical) lowpart of X. */ | 1281 have different modes, REG is a (possibly paradoxical) lowpart of X. */ |
1170 | 1282 |
1171 void | 1283 void |
1172 set_reg_attrs_from_value (rtx reg, rtx x) | 1284 set_reg_attrs_from_value (rtx reg, rtx x) |
1173 { | 1285 { |
1174 int offset; | 1286 poly_int64 offset; |
1175 bool can_be_reg_pointer = true; | 1287 bool can_be_reg_pointer = true; |
1176 | 1288 |
1177 /* Don't call mark_reg_pointer for incompatible pointer sign | 1289 /* Don't call mark_reg_pointer for incompatible pointer sign |
1178 extension. */ | 1290 extension. */ |
1179 while (GET_CODE (x) == SIGN_EXTEND | 1291 while (GET_CODE (x) == SIGN_EXTEND |
1417 If this is not a case we can handle, return 0. */ | 1529 If this is not a case we can handle, return 0. */ |
1418 | 1530 |
1419 rtx | 1531 rtx |
1420 gen_lowpart_common (machine_mode mode, rtx x) | 1532 gen_lowpart_common (machine_mode mode, rtx x) |
1421 { | 1533 { |
1422 int msize = GET_MODE_SIZE (mode); | 1534 poly_uint64 msize = GET_MODE_SIZE (mode); |
1423 int xsize; | |
1424 machine_mode innermode; | 1535 machine_mode innermode; |
1425 | 1536 |
1426 /* Unfortunately, this routine doesn't take a parameter for the mode of X, | 1537 /* Unfortunately, this routine doesn't take a parameter for the mode of X, |
1427 so we have to make one up. Yuk. */ | 1538 so we have to make one up. Yuk. */ |
1428 innermode = GET_MODE (x); | 1539 innermode = GET_MODE (x); |
1429 if (CONST_INT_P (x) | 1540 if (CONST_INT_P (x) |
1430 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT) | 1541 && known_le (msize * BITS_PER_UNIT, |
1542 (unsigned HOST_WIDE_INT) HOST_BITS_PER_WIDE_INT)) | |
1431 innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require (); | 1543 innermode = int_mode_for_size (HOST_BITS_PER_WIDE_INT, 0).require (); |
1432 else if (innermode == VOIDmode) | 1544 else if (innermode == VOIDmode) |
1433 innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require (); | 1545 innermode = int_mode_for_size (HOST_BITS_PER_DOUBLE_INT, 0).require (); |
1434 | 1546 |
1435 xsize = GET_MODE_SIZE (innermode); | |
1436 | |
1437 gcc_assert (innermode != VOIDmode && innermode != BLKmode); | 1547 gcc_assert (innermode != VOIDmode && innermode != BLKmode); |
1438 | 1548 |
1439 if (innermode == mode) | 1549 if (innermode == mode) |
1440 return x; | 1550 return x; |
1441 | 1551 |
1442 /* MODE must occupy no more words than the mode of X. */ | 1552 /* The size of the outer and inner modes must be ordered. */ |
1443 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD | 1553 poly_uint64 xsize = GET_MODE_SIZE (innermode); |
1444 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)) | 1554 if (!ordered_p (msize, xsize)) |
1445 return 0; | 1555 return 0; |
1446 | 1556 |
1447 /* Don't allow generating paradoxical FLOAT_MODE subregs. */ | 1557 if (SCALAR_FLOAT_MODE_P (mode)) |
1448 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize) | 1558 { |
1449 return 0; | 1559 /* Don't allow paradoxical FLOAT_MODE subregs. */ |
1560 if (maybe_gt (msize, xsize)) | |
1561 return 0; | |
1562 } | |
1563 else | |
1564 { | |
1565 /* MODE must occupy no more of the underlying registers than X. */ | |
1566 poly_uint64 regsize = REGMODE_NATURAL_SIZE (innermode); | |
1567 unsigned int mregs, xregs; | |
1568 if (!can_div_away_from_zero_p (msize, regsize, &mregs) | |
1569 || !can_div_away_from_zero_p (xsize, regsize, &xregs) | |
1570 || mregs > xregs) | |
1571 return 0; | |
1572 } | |
1450 | 1573 |
1451 scalar_int_mode int_mode, int_innermode, from_mode; | 1574 scalar_int_mode int_mode, int_innermode, from_mode; |
1452 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND) | 1575 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND) |
1453 && is_a <scalar_int_mode> (mode, &int_mode) | 1576 && is_a <scalar_int_mode> (mode, &int_mode) |
1454 && is_a <scalar_int_mode> (innermode, &int_innermode) | 1577 && is_a <scalar_int_mode> (innermode, &int_innermode) |
1469 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode)) | 1592 else if (GET_MODE_SIZE (int_mode) < GET_MODE_SIZE (int_innermode)) |
1470 return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0)); | 1593 return gen_rtx_fmt_e (GET_CODE (x), int_mode, XEXP (x, 0)); |
1471 } | 1594 } |
1472 else if (GET_CODE (x) == SUBREG || REG_P (x) | 1595 else if (GET_CODE (x) == SUBREG || REG_P (x) |
1473 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR | 1596 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR |
1474 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x)) | 1597 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x) |
1598 || CONST_POLY_INT_P (x)) | |
1475 return lowpart_subreg (mode, x, innermode); | 1599 return lowpart_subreg (mode, x, innermode); |
1476 | 1600 |
1477 /* Otherwise, we can't do this. */ | 1601 /* Otherwise, we can't do this. */ |
1478 return 0; | 1602 return 0; |
1479 } | 1603 } |
1480 | 1604 |
1481 rtx | 1605 rtx |
1482 gen_highpart (machine_mode mode, rtx x) | 1606 gen_highpart (machine_mode mode, rtx x) |
1483 { | 1607 { |
1484 unsigned int msize = GET_MODE_SIZE (mode); | 1608 poly_uint64 msize = GET_MODE_SIZE (mode); |
1485 rtx result; | 1609 rtx result; |
1486 | 1610 |
1487 /* This case loses if X is a subreg. To catch bugs early, | 1611 /* This case loses if X is a subreg. To catch bugs early, |
1488 complain if an invalid MODE is used even in other cases. */ | 1612 complain if an invalid MODE is used even in other cases. */ |
1489 gcc_assert (msize <= UNITS_PER_WORD | 1613 gcc_assert (known_le (msize, (unsigned int) UNITS_PER_WORD) |
1490 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x))); | 1614 || known_eq (msize, GET_MODE_UNIT_SIZE (GET_MODE (x)))); |
1491 | 1615 |
1492 result = simplify_gen_subreg (mode, x, GET_MODE (x), | 1616 result = simplify_gen_subreg (mode, x, GET_MODE (x), |
1493 subreg_highpart_offset (mode, GET_MODE (x))); | 1617 subreg_highpart_offset (mode, GET_MODE (x))); |
1494 gcc_assert (result); | 1618 gcc_assert (result); |
1495 | 1619 |
1520 } | 1644 } |
1521 | 1645 |
1522 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has | 1646 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has |
1523 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */ | 1647 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */ |
1524 | 1648 |
1525 unsigned int | 1649 poly_uint64 |
1526 subreg_size_lowpart_offset (unsigned int outer_bytes, unsigned int inner_bytes) | 1650 subreg_size_lowpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes) |
1527 { | 1651 { |
1528 if (outer_bytes > inner_bytes) | 1652 gcc_checking_assert (ordered_p (outer_bytes, inner_bytes)); |
1653 if (maybe_gt (outer_bytes, inner_bytes)) | |
1529 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */ | 1654 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */ |
1530 return 0; | 1655 return 0; |
1531 | 1656 |
1532 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN) | 1657 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN) |
1533 return inner_bytes - outer_bytes; | 1658 return inner_bytes - outer_bytes; |
1538 } | 1663 } |
1539 | 1664 |
1540 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has | 1665 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has |
1541 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */ | 1666 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */ |
1542 | 1667 |
1543 unsigned int | 1668 poly_uint64 |
1544 subreg_size_highpart_offset (unsigned int outer_bytes, | 1669 subreg_size_highpart_offset (poly_uint64 outer_bytes, poly_uint64 inner_bytes) |
1545 unsigned int inner_bytes) | 1670 { |
1546 { | 1671 gcc_assert (known_ge (inner_bytes, outer_bytes)); |
1547 gcc_assert (inner_bytes >= outer_bytes); | |
1548 | 1672 |
1549 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN) | 1673 if (BYTES_BIG_ENDIAN && WORDS_BIG_ENDIAN) |
1550 return 0; | 1674 return 0; |
1551 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN) | 1675 else if (!BYTES_BIG_ENDIAN && !WORDS_BIG_ENDIAN) |
1552 return inner_bytes - outer_bytes; | 1676 return inner_bytes - outer_bytes; |
1566 if (GET_CODE (x) != SUBREG) | 1690 if (GET_CODE (x) != SUBREG) |
1567 return 1; | 1691 return 1; |
1568 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode) | 1692 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode) |
1569 return 0; | 1693 return 0; |
1570 | 1694 |
1571 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x))) | 1695 return known_eq (subreg_lowpart_offset (GET_MODE (x), |
1572 == SUBREG_BYTE (x)); | 1696 GET_MODE (SUBREG_REG (x))), |
1697 SUBREG_BYTE (x)); | |
1573 } | 1698 } |
1574 | 1699 |
1575 /* Return subword OFFSET of operand OP. | 1700 /* Return subword OFFSET of operand OP. |
1576 The word number, OFFSET, is interpreted as the word number starting | 1701 The word number, OFFSET, is interpreted as the word number starting |
1577 at the low-order address. OFFSET 0 is the low-order word if not | 1702 at the low-order address. OFFSET 0 is the low-order word if not |
1596 Now use of this function can be deprecated by simplify_subreg in most | 1721 Now use of this function can be deprecated by simplify_subreg in most |
1597 cases. | 1722 cases. |
1598 */ | 1723 */ |
1599 | 1724 |
1600 rtx | 1725 rtx |
1601 operand_subword (rtx op, unsigned int offset, int validate_address, machine_mode mode) | 1726 operand_subword (rtx op, poly_uint64 offset, int validate_address, |
1727 machine_mode mode) | |
1602 { | 1728 { |
1603 if (mode == VOIDmode) | 1729 if (mode == VOIDmode) |
1604 mode = GET_MODE (op); | 1730 mode = GET_MODE (op); |
1605 | 1731 |
1606 gcc_assert (mode != VOIDmode); | 1732 gcc_assert (mode != VOIDmode); |
1607 | 1733 |
1608 /* If OP is narrower than a word, fail. */ | 1734 /* If OP is narrower than a word, fail. */ |
1609 if (mode != BLKmode | 1735 if (mode != BLKmode |
1610 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD)) | 1736 && maybe_lt (GET_MODE_SIZE (mode), UNITS_PER_WORD)) |
1611 return 0; | 1737 return 0; |
1612 | 1738 |
1613 /* If we want a word outside OP, return zero. */ | 1739 /* If we want a word outside OP, return zero. */ |
1614 if (mode != BLKmode | 1740 if (mode != BLKmode |
1615 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode)) | 1741 && maybe_gt ((offset + 1) * UNITS_PER_WORD, GET_MODE_SIZE (mode))) |
1616 return const0_rtx; | 1742 return const0_rtx; |
1617 | 1743 |
1618 /* Form a new MEM at the requested address. */ | 1744 /* Form a new MEM at the requested address. */ |
1619 if (MEM_P (op)) | 1745 if (MEM_P (op)) |
1620 { | 1746 { |
1644 this case. | 1770 this case. |
1645 | 1771 |
1646 MODE is the mode of OP, in case it is CONST_INT. */ | 1772 MODE is the mode of OP, in case it is CONST_INT. */ |
1647 | 1773 |
1648 rtx | 1774 rtx |
1649 operand_subword_force (rtx op, unsigned int offset, machine_mode mode) | 1775 operand_subword_force (rtx op, poly_uint64 offset, machine_mode mode) |
1650 { | 1776 { |
1651 rtx result = operand_subword (op, offset, 1, mode); | 1777 rtx result = operand_subword (op, offset, 1, mode); |
1652 | 1778 |
1653 if (result) | 1779 if (result) |
1654 return result; | 1780 return result; |
1667 gcc_assert (result); | 1793 gcc_assert (result); |
1668 | 1794 |
1669 return result; | 1795 return result; |
1670 } | 1796 } |
1671 | 1797 |
1798 mem_attrs::mem_attrs () | |
1799 : expr (NULL_TREE), | |
1800 offset (0), | |
1801 size (0), | |
1802 alias (0), | |
1803 align (0), | |
1804 addrspace (ADDR_SPACE_GENERIC), | |
1805 offset_known_p (false), | |
1806 size_known_p (false) | |
1807 {} | |
1808 | |
1672 /* Returns 1 if both MEM_EXPR can be considered equal | 1809 /* Returns 1 if both MEM_EXPR can be considered equal |
1673 and 0 otherwise. */ | 1810 and 0 otherwise. */ |
1674 | 1811 |
1675 int | 1812 int |
1676 mem_expr_equal_p (const_tree expr1, const_tree expr2) | 1813 mem_expr_equal_p (const_tree expr1, const_tree expr2) |
1693 | 1830 |
1694 int | 1831 int |
1695 get_mem_align_offset (rtx mem, unsigned int align) | 1832 get_mem_align_offset (rtx mem, unsigned int align) |
1696 { | 1833 { |
1697 tree expr; | 1834 tree expr; |
1698 unsigned HOST_WIDE_INT offset; | 1835 poly_uint64 offset; |
1699 | 1836 |
1700 /* This function can't use | 1837 /* This function can't use |
1701 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem) | 1838 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem) |
1702 || (MAX (MEM_ALIGN (mem), | 1839 || (MAX (MEM_ALIGN (mem), |
1703 MAX (align, get_object_alignment (MEM_EXPR (mem)))) | 1840 MAX (align, get_object_alignment (MEM_EXPR (mem)))) |
1735 tree inner = TREE_OPERAND (expr, 0); | 1872 tree inner = TREE_OPERAND (expr, 0); |
1736 tree field = TREE_OPERAND (expr, 1); | 1873 tree field = TREE_OPERAND (expr, 1); |
1737 tree byte_offset = component_ref_field_offset (expr); | 1874 tree byte_offset = component_ref_field_offset (expr); |
1738 tree bit_offset = DECL_FIELD_BIT_OFFSET (field); | 1875 tree bit_offset = DECL_FIELD_BIT_OFFSET (field); |
1739 | 1876 |
1877 poly_uint64 suboffset; | |
1740 if (!byte_offset | 1878 if (!byte_offset |
1741 || !tree_fits_uhwi_p (byte_offset) | 1879 || !poly_int_tree_p (byte_offset, &suboffset) |
1742 || !tree_fits_uhwi_p (bit_offset)) | 1880 || !tree_fits_uhwi_p (bit_offset)) |
1743 return -1; | 1881 return -1; |
1744 | 1882 |
1745 offset += tree_to_uhwi (byte_offset); | 1883 offset += suboffset; |
1746 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT; | 1884 offset += tree_to_uhwi (bit_offset) / BITS_PER_UNIT; |
1747 | 1885 |
1748 if (inner == NULL_TREE) | 1886 if (inner == NULL_TREE) |
1749 { | 1887 { |
1750 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field)) | 1888 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field)) |
1764 } | 1902 } |
1765 } | 1903 } |
1766 else | 1904 else |
1767 return -1; | 1905 return -1; |
1768 | 1906 |
1769 return offset & ((align / BITS_PER_UNIT) - 1); | 1907 HOST_WIDE_INT misalign; |
1908 if (!known_misalignment (offset, align / BITS_PER_UNIT, &misalign)) | |
1909 return -1; | |
1910 return misalign; | |
1770 } | 1911 } |
1771 | 1912 |
1772 /* Given REF (a MEM) and T, either the type of X or the expression | 1913 /* Given REF (a MEM) and T, either the type of X or the expression |
1773 corresponding to REF, set the memory attributes. OBJECTP is nonzero | 1914 corresponding to REF, set the memory attributes. OBJECTP is nonzero |
1774 if we are making a new object of this type. BITPOS is nonzero if | 1915 if we are making a new object of this type. BITPOS is nonzero if |
1775 there is an offset outstanding on T that will be applied later. */ | 1916 there is an offset outstanding on T that will be applied later. */ |
1776 | 1917 |
1777 void | 1918 void |
1778 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, | 1919 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp, |
1779 HOST_WIDE_INT bitpos) | 1920 poly_int64 bitpos) |
1780 { | 1921 { |
1781 HOST_WIDE_INT apply_bitpos = 0; | 1922 poly_int64 apply_bitpos = 0; |
1782 tree type; | 1923 tree type; |
1783 struct mem_attrs attrs, *defattrs, *refattrs; | 1924 struct mem_attrs attrs, *defattrs, *refattrs; |
1784 addr_space_t as; | 1925 addr_space_t as; |
1785 | 1926 |
1786 /* It can happen that type_for_mode was given a mode for which there | 1927 /* It can happen that type_for_mode was given a mode for which there |
1796 /* If we have already set DECL_RTL = ref, get_alias_set will get the | 1937 /* If we have already set DECL_RTL = ref, get_alias_set will get the |
1797 wrong answer, as it assumes that DECL_RTL already has the right alias | 1938 wrong answer, as it assumes that DECL_RTL already has the right alias |
1798 info. Callers should not set DECL_RTL until after the call to | 1939 info. Callers should not set DECL_RTL until after the call to |
1799 set_mem_attributes. */ | 1940 set_mem_attributes. */ |
1800 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t)); | 1941 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t)); |
1801 | |
1802 memset (&attrs, 0, sizeof (attrs)); | |
1803 | 1942 |
1804 /* Get the alias set from the expression or type (perhaps using a | 1943 /* Get the alias set from the expression or type (perhaps using a |
1805 front-end routine) and use it. */ | 1944 front-end routine) and use it. */ |
1806 attrs.alias = get_alias_set (t); | 1945 attrs.alias = get_alias_set (t); |
1807 | 1946 |
1968 covers all valid accesses. */ | 2107 covers all valid accesses. */ |
1969 && ! array_at_struct_end_p (t))) | 2108 && ! array_at_struct_end_p (t))) |
1970 { | 2109 { |
1971 attrs.expr = t2; | 2110 attrs.expr = t2; |
1972 attrs.offset_known_p = false; | 2111 attrs.offset_known_p = false; |
1973 if (tree_fits_uhwi_p (off_tree)) | 2112 if (poly_int_tree_p (off_tree, &attrs.offset)) |
1974 { | 2113 { |
1975 attrs.offset_known_p = true; | 2114 attrs.offset_known_p = true; |
1976 attrs.offset = tree_to_uhwi (off_tree); | |
1977 apply_bitpos = bitpos; | 2115 apply_bitpos = bitpos; |
1978 } | 2116 } |
1979 } | 2117 } |
1980 /* Else do not record a MEM_EXPR. */ | 2118 /* Else do not record a MEM_EXPR. */ |
1981 } | 2119 } |
1992 | 2130 |
1993 /* Compute the alignment. */ | 2131 /* Compute the alignment. */ |
1994 unsigned int obj_align; | 2132 unsigned int obj_align; |
1995 unsigned HOST_WIDE_INT obj_bitpos; | 2133 unsigned HOST_WIDE_INT obj_bitpos; |
1996 get_object_alignment_1 (t, &obj_align, &obj_bitpos); | 2134 get_object_alignment_1 (t, &obj_align, &obj_bitpos); |
1997 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1); | 2135 unsigned int diff_align = known_alignment (obj_bitpos - bitpos); |
1998 if (obj_bitpos != 0) | 2136 if (diff_align != 0) |
1999 obj_align = least_bit_hwi (obj_bitpos); | 2137 obj_align = MIN (obj_align, diff_align); |
2000 attrs.align = MAX (attrs.align, obj_align); | 2138 attrs.align = MAX (attrs.align, obj_align); |
2001 } | 2139 } |
2002 | 2140 |
2003 if (tree_fits_uhwi_p (new_size)) | 2141 poly_uint64 const_size; |
2142 if (poly_int_tree_p (new_size, &const_size)) | |
2004 { | 2143 { |
2005 attrs.size_known_p = true; | 2144 attrs.size_known_p = true; |
2006 attrs.size = tree_to_uhwi (new_size); | 2145 attrs.size = const_size; |
2007 } | 2146 } |
2008 | 2147 |
2009 /* If we modified OFFSET based on T, then subtract the outstanding | 2148 /* If we modified OFFSET based on T, then subtract the outstanding |
2010 bit position offset. Similarly, increase the size of the accessed | 2149 bit position offset. Similarly, increase the size of the accessed |
2011 object to contain the negative offset. */ | 2150 object to contain the negative offset. */ |
2012 if (apply_bitpos) | 2151 if (maybe_ne (apply_bitpos, 0)) |
2013 { | 2152 { |
2014 gcc_assert (attrs.offset_known_p); | 2153 gcc_assert (attrs.offset_known_p); |
2015 attrs.offset -= apply_bitpos / BITS_PER_UNIT; | 2154 poly_int64 bytepos = bits_to_bytes_round_down (apply_bitpos); |
2155 attrs.offset -= bytepos; | |
2016 if (attrs.size_known_p) | 2156 if (attrs.size_known_p) |
2017 attrs.size += apply_bitpos / BITS_PER_UNIT; | 2157 attrs.size += bytepos; |
2018 } | 2158 } |
2019 | 2159 |
2020 /* Now set the attributes we computed above. */ | 2160 /* Now set the attributes we computed above. */ |
2021 attrs.addrspace = as; | 2161 attrs.addrspace = as; |
2022 set_mem_attrs (ref, &attrs); | 2162 set_mem_attrs (ref, &attrs); |
2031 /* Set the alias set of MEM to SET. */ | 2171 /* Set the alias set of MEM to SET. */ |
2032 | 2172 |
2033 void | 2173 void |
2034 set_mem_alias_set (rtx mem, alias_set_type set) | 2174 set_mem_alias_set (rtx mem, alias_set_type set) |
2035 { | 2175 { |
2036 struct mem_attrs attrs; | |
2037 | |
2038 /* If the new and old alias sets don't conflict, something is wrong. */ | 2176 /* If the new and old alias sets don't conflict, something is wrong. */ |
2039 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem))); | 2177 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem))); |
2040 attrs = *get_mem_attrs (mem); | 2178 mem_attrs attrs (*get_mem_attrs (mem)); |
2041 attrs.alias = set; | 2179 attrs.alias = set; |
2042 set_mem_attrs (mem, &attrs); | 2180 set_mem_attrs (mem, &attrs); |
2043 } | 2181 } |
2044 | 2182 |
2045 /* Set the address space of MEM to ADDRSPACE (target-defined). */ | 2183 /* Set the address space of MEM to ADDRSPACE (target-defined). */ |
2046 | 2184 |
2047 void | 2185 void |
2048 set_mem_addr_space (rtx mem, addr_space_t addrspace) | 2186 set_mem_addr_space (rtx mem, addr_space_t addrspace) |
2049 { | 2187 { |
2050 struct mem_attrs attrs; | 2188 mem_attrs attrs (*get_mem_attrs (mem)); |
2051 | |
2052 attrs = *get_mem_attrs (mem); | |
2053 attrs.addrspace = addrspace; | 2189 attrs.addrspace = addrspace; |
2054 set_mem_attrs (mem, &attrs); | 2190 set_mem_attrs (mem, &attrs); |
2055 } | 2191 } |
2056 | 2192 |
2057 /* Set the alignment of MEM to ALIGN bits. */ | 2193 /* Set the alignment of MEM to ALIGN bits. */ |
2058 | 2194 |
2059 void | 2195 void |
2060 set_mem_align (rtx mem, unsigned int align) | 2196 set_mem_align (rtx mem, unsigned int align) |
2061 { | 2197 { |
2062 struct mem_attrs attrs; | 2198 mem_attrs attrs (*get_mem_attrs (mem)); |
2063 | |
2064 attrs = *get_mem_attrs (mem); | |
2065 attrs.align = align; | 2199 attrs.align = align; |
2066 set_mem_attrs (mem, &attrs); | 2200 set_mem_attrs (mem, &attrs); |
2067 } | 2201 } |
2068 | 2202 |
2069 /* Set the expr for MEM to EXPR. */ | 2203 /* Set the expr for MEM to EXPR. */ |
2070 | 2204 |
2071 void | 2205 void |
2072 set_mem_expr (rtx mem, tree expr) | 2206 set_mem_expr (rtx mem, tree expr) |
2073 { | 2207 { |
2074 struct mem_attrs attrs; | 2208 mem_attrs attrs (*get_mem_attrs (mem)); |
2075 | |
2076 attrs = *get_mem_attrs (mem); | |
2077 attrs.expr = expr; | 2209 attrs.expr = expr; |
2078 set_mem_attrs (mem, &attrs); | 2210 set_mem_attrs (mem, &attrs); |
2079 } | 2211 } |
2080 | 2212 |
2081 /* Set the offset of MEM to OFFSET. */ | 2213 /* Set the offset of MEM to OFFSET. */ |
2082 | 2214 |
2083 void | 2215 void |
2084 set_mem_offset (rtx mem, HOST_WIDE_INT offset) | 2216 set_mem_offset (rtx mem, poly_int64 offset) |
2085 { | 2217 { |
2086 struct mem_attrs attrs; | 2218 mem_attrs attrs (*get_mem_attrs (mem)); |
2087 | |
2088 attrs = *get_mem_attrs (mem); | |
2089 attrs.offset_known_p = true; | 2219 attrs.offset_known_p = true; |
2090 attrs.offset = offset; | 2220 attrs.offset = offset; |
2091 set_mem_attrs (mem, &attrs); | 2221 set_mem_attrs (mem, &attrs); |
2092 } | 2222 } |
2093 | 2223 |
2094 /* Clear the offset of MEM. */ | 2224 /* Clear the offset of MEM. */ |
2095 | 2225 |
2096 void | 2226 void |
2097 clear_mem_offset (rtx mem) | 2227 clear_mem_offset (rtx mem) |
2098 { | 2228 { |
2099 struct mem_attrs attrs; | 2229 mem_attrs attrs (*get_mem_attrs (mem)); |
2100 | |
2101 attrs = *get_mem_attrs (mem); | |
2102 attrs.offset_known_p = false; | 2230 attrs.offset_known_p = false; |
2103 set_mem_attrs (mem, &attrs); | 2231 set_mem_attrs (mem, &attrs); |
2104 } | 2232 } |
2105 | 2233 |
2106 /* Set the size of MEM to SIZE. */ | 2234 /* Set the size of MEM to SIZE. */ |
2107 | 2235 |
2108 void | 2236 void |
2109 set_mem_size (rtx mem, HOST_WIDE_INT size) | 2237 set_mem_size (rtx mem, poly_int64 size) |
2110 { | 2238 { |
2111 struct mem_attrs attrs; | 2239 mem_attrs attrs (*get_mem_attrs (mem)); |
2112 | |
2113 attrs = *get_mem_attrs (mem); | |
2114 attrs.size_known_p = true; | 2240 attrs.size_known_p = true; |
2115 attrs.size = size; | 2241 attrs.size = size; |
2116 set_mem_attrs (mem, &attrs); | 2242 set_mem_attrs (mem, &attrs); |
2117 } | 2243 } |
2118 | 2244 |
2119 /* Clear the size of MEM. */ | 2245 /* Clear the size of MEM. */ |
2120 | 2246 |
2121 void | 2247 void |
2122 clear_mem_size (rtx mem) | 2248 clear_mem_size (rtx mem) |
2123 { | 2249 { |
2124 struct mem_attrs attrs; | 2250 mem_attrs attrs (*get_mem_attrs (mem)); |
2125 | |
2126 attrs = *get_mem_attrs (mem); | |
2127 attrs.size_known_p = false; | 2251 attrs.size_known_p = false; |
2128 set_mem_attrs (mem, &attrs); | 2252 set_mem_attrs (mem, &attrs); |
2129 } | 2253 } |
2130 | 2254 |
2131 /* Return a memory reference like MEMREF, but with its mode changed to MODE | 2255 /* Return a memory reference like MEMREF, but with its mode changed to MODE |
2184 rtx | 2308 rtx |
2185 change_address (rtx memref, machine_mode mode, rtx addr) | 2309 change_address (rtx memref, machine_mode mode, rtx addr) |
2186 { | 2310 { |
2187 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false); | 2311 rtx new_rtx = change_address_1 (memref, mode, addr, 1, false); |
2188 machine_mode mmode = GET_MODE (new_rtx); | 2312 machine_mode mmode = GET_MODE (new_rtx); |
2189 struct mem_attrs attrs, *defattrs; | 2313 struct mem_attrs *defattrs; |
2190 | 2314 |
2191 attrs = *get_mem_attrs (memref); | 2315 mem_attrs attrs (*get_mem_attrs (memref)); |
2192 defattrs = mode_mem_attrs[(int) mmode]; | 2316 defattrs = mode_mem_attrs[(int) mmode]; |
2193 attrs.expr = NULL_TREE; | 2317 attrs.expr = NULL_TREE; |
2194 attrs.offset_known_p = false; | 2318 attrs.offset_known_p = false; |
2195 attrs.size_known_p = defattrs->size_known_p; | 2319 attrs.size_known_p = defattrs->size_known_p; |
2196 attrs.size = defattrs->size; | 2320 attrs.size = defattrs->size; |
2221 the underlying object, even partially, then the object is dropped. | 2345 the underlying object, even partially, then the object is dropped. |
2222 SIZE, if nonzero, is the size of an access in cases where MODE | 2346 SIZE, if nonzero, is the size of an access in cases where MODE |
2223 has no inherent size. */ | 2347 has no inherent size. */ |
2224 | 2348 |
2225 rtx | 2349 rtx |
2226 adjust_address_1 (rtx memref, machine_mode mode, HOST_WIDE_INT offset, | 2350 adjust_address_1 (rtx memref, machine_mode mode, poly_int64 offset, |
2227 int validate, int adjust_address, int adjust_object, | 2351 int validate, int adjust_address, int adjust_object, |
2228 HOST_WIDE_INT size) | 2352 poly_int64 size) |
2229 { | 2353 { |
2230 rtx addr = XEXP (memref, 0); | 2354 rtx addr = XEXP (memref, 0); |
2231 rtx new_rtx; | 2355 rtx new_rtx; |
2232 scalar_int_mode address_mode; | 2356 scalar_int_mode address_mode; |
2233 int pbits; | 2357 struct mem_attrs attrs (*get_mem_attrs (memref)), *defattrs; |
2234 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs; | |
2235 unsigned HOST_WIDE_INT max_align; | 2358 unsigned HOST_WIDE_INT max_align; |
2236 #ifdef POINTERS_EXTEND_UNSIGNED | 2359 #ifdef POINTERS_EXTEND_UNSIGNED |
2237 scalar_int_mode pointer_mode | 2360 scalar_int_mode pointer_mode |
2238 = targetm.addr_space.pointer_mode (attrs.addrspace); | 2361 = targetm.addr_space.pointer_mode (attrs.addrspace); |
2239 #endif | 2362 #endif |
2246 defattrs = mode_mem_attrs[(int) mode]; | 2369 defattrs = mode_mem_attrs[(int) mode]; |
2247 if (defattrs->size_known_p) | 2370 if (defattrs->size_known_p) |
2248 size = defattrs->size; | 2371 size = defattrs->size; |
2249 | 2372 |
2250 /* If there are no changes, just return the original memory reference. */ | 2373 /* If there are no changes, just return the original memory reference. */ |
2251 if (mode == GET_MODE (memref) && !offset | 2374 if (mode == GET_MODE (memref) |
2252 && (size == 0 || (attrs.size_known_p && attrs.size == size)) | 2375 && known_eq (offset, 0) |
2376 && (known_eq (size, 0) | |
2377 || (attrs.size_known_p && known_eq (attrs.size, size))) | |
2253 && (!validate || memory_address_addr_space_p (mode, addr, | 2378 && (!validate || memory_address_addr_space_p (mode, addr, |
2254 attrs.addrspace))) | 2379 attrs.addrspace))) |
2255 return memref; | 2380 return memref; |
2256 | 2381 |
2257 /* ??? Prefer to create garbage instead of creating shared rtl. | 2382 /* ??? Prefer to create garbage instead of creating shared rtl. |
2260 addr = copy_rtx (addr); | 2385 addr = copy_rtx (addr); |
2261 | 2386 |
2262 /* Convert a possibly large offset to a signed value within the | 2387 /* Convert a possibly large offset to a signed value within the |
2263 range of the target address space. */ | 2388 range of the target address space. */ |
2264 address_mode = get_address_mode (memref); | 2389 address_mode = get_address_mode (memref); |
2265 pbits = GET_MODE_BITSIZE (address_mode); | 2390 offset = trunc_int_for_mode (offset, address_mode); |
2266 if (HOST_BITS_PER_WIDE_INT > pbits) | |
2267 { | |
2268 int shift = HOST_BITS_PER_WIDE_INT - pbits; | |
2269 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift)) | |
2270 >> shift); | |
2271 } | |
2272 | 2391 |
2273 if (adjust_address) | 2392 if (adjust_address) |
2274 { | 2393 { |
2275 /* If MEMREF is a LO_SUM and the offset is within the alignment of the | 2394 /* If MEMREF is a LO_SUM and the offset is within the alignment of the |
2276 object, we can merge it into the LO_SUM. */ | 2395 object, we can merge it into the LO_SUM. */ |
2277 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM | 2396 if (GET_MODE (memref) != BLKmode |
2278 && offset >= 0 | 2397 && GET_CODE (addr) == LO_SUM |
2279 && (unsigned HOST_WIDE_INT) offset | 2398 && known_in_range_p (offset, |
2280 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT) | 2399 0, (GET_MODE_ALIGNMENT (GET_MODE (memref)) |
2400 / BITS_PER_UNIT))) | |
2281 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0), | 2401 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0), |
2282 plus_constant (address_mode, | 2402 plus_constant (address_mode, |
2283 XEXP (addr, 1), offset)); | 2403 XEXP (addr, 1), offset)); |
2284 #ifdef POINTERS_EXTEND_UNSIGNED | 2404 #ifdef POINTERS_EXTEND_UNSIGNED |
2285 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid | 2405 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid |
2286 in that mode, we merge it into the ZERO_EXTEND. We take advantage of | 2406 in that mode, we merge it into the ZERO_EXTEND. We take advantage of |
2287 the fact that pointers are not allowed to overflow. */ | 2407 the fact that pointers are not allowed to overflow. */ |
2288 else if (POINTERS_EXTEND_UNSIGNED > 0 | 2408 else if (POINTERS_EXTEND_UNSIGNED > 0 |
2289 && GET_CODE (addr) == ZERO_EXTEND | 2409 && GET_CODE (addr) == ZERO_EXTEND |
2290 && GET_MODE (XEXP (addr, 0)) == pointer_mode | 2410 && GET_MODE (XEXP (addr, 0)) == pointer_mode |
2291 && trunc_int_for_mode (offset, pointer_mode) == offset) | 2411 && known_eq (trunc_int_for_mode (offset, pointer_mode), offset)) |
2292 addr = gen_rtx_ZERO_EXTEND (address_mode, | 2412 addr = gen_rtx_ZERO_EXTEND (address_mode, |
2293 plus_constant (pointer_mode, | 2413 plus_constant (pointer_mode, |
2294 XEXP (addr, 0), offset)); | 2414 XEXP (addr, 0), offset)); |
2295 #endif | 2415 #endif |
2296 else | 2416 else |
2299 | 2419 |
2300 new_rtx = change_address_1 (memref, mode, addr, validate, false); | 2420 new_rtx = change_address_1 (memref, mode, addr, validate, false); |
2301 | 2421 |
2302 /* If the address is a REG, change_address_1 rightfully returns memref, | 2422 /* If the address is a REG, change_address_1 rightfully returns memref, |
2303 but this would destroy memref's MEM_ATTRS. */ | 2423 but this would destroy memref's MEM_ATTRS. */ |
2304 if (new_rtx == memref && offset != 0) | 2424 if (new_rtx == memref && maybe_ne (offset, 0)) |
2305 new_rtx = copy_rtx (new_rtx); | 2425 new_rtx = copy_rtx (new_rtx); |
2306 | 2426 |
2307 /* Conservatively drop the object if we don't know where we start from. */ | 2427 /* Conservatively drop the object if we don't know where we start from. */ |
2308 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p)) | 2428 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p)) |
2309 { | 2429 { |
2316 if (attrs.offset_known_p) | 2436 if (attrs.offset_known_p) |
2317 { | 2437 { |
2318 attrs.offset += offset; | 2438 attrs.offset += offset; |
2319 | 2439 |
2320 /* Drop the object if the new left end is not within its bounds. */ | 2440 /* Drop the object if the new left end is not within its bounds. */ |
2321 if (adjust_object && attrs.offset < 0) | 2441 if (adjust_object && maybe_lt (attrs.offset, 0)) |
2322 { | 2442 { |
2323 attrs.expr = NULL_TREE; | 2443 attrs.expr = NULL_TREE; |
2324 attrs.alias = 0; | 2444 attrs.alias = 0; |
2325 } | 2445 } |
2326 } | 2446 } |
2327 | 2447 |
2328 /* Compute the new alignment by taking the MIN of the alignment and the | 2448 /* Compute the new alignment by taking the MIN of the alignment and the |
2329 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET | 2449 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET |
2330 if zero. */ | 2450 if zero. */ |
2331 if (offset != 0) | 2451 if (maybe_ne (offset, 0)) |
2332 { | 2452 { |
2333 max_align = least_bit_hwi (offset) * BITS_PER_UNIT; | 2453 max_align = known_alignment (offset) * BITS_PER_UNIT; |
2334 attrs.align = MIN (attrs.align, max_align); | 2454 attrs.align = MIN (attrs.align, max_align); |
2335 } | 2455 } |
2336 | 2456 |
2337 if (size) | 2457 if (maybe_ne (size, 0)) |
2338 { | 2458 { |
2339 /* Drop the object if the new right end is not within its bounds. */ | 2459 /* Drop the object if the new right end is not within its bounds. */ |
2340 if (adjust_object && (offset + size) > attrs.size) | 2460 if (adjust_object && maybe_gt (offset + size, attrs.size)) |
2341 { | 2461 { |
2342 attrs.expr = NULL_TREE; | 2462 attrs.expr = NULL_TREE; |
2343 attrs.alias = 0; | 2463 attrs.alias = 0; |
2344 } | 2464 } |
2345 attrs.size_known_p = true; | 2465 attrs.size_known_p = true; |
2363 MEMREF offset by OFFSET bytes. If VALIDATE is | 2483 MEMREF offset by OFFSET bytes. If VALIDATE is |
2364 nonzero, the memory address is forced to be valid. */ | 2484 nonzero, the memory address is forced to be valid. */ |
2365 | 2485 |
2366 rtx | 2486 rtx |
2367 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr, | 2487 adjust_automodify_address_1 (rtx memref, machine_mode mode, rtx addr, |
2368 HOST_WIDE_INT offset, int validate) | 2488 poly_int64 offset, int validate) |
2369 { | 2489 { |
2370 memref = change_address_1 (memref, VOIDmode, addr, validate, false); | 2490 memref = change_address_1 (memref, VOIDmode, addr, validate, false); |
2371 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0); | 2491 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0); |
2372 } | 2492 } |
2373 | 2493 |
2378 rtx | 2498 rtx |
2379 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2) | 2499 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2) |
2380 { | 2500 { |
2381 rtx new_rtx, addr = XEXP (memref, 0); | 2501 rtx new_rtx, addr = XEXP (memref, 0); |
2382 machine_mode address_mode; | 2502 machine_mode address_mode; |
2383 struct mem_attrs attrs, *defattrs; | 2503 struct mem_attrs *defattrs; |
2384 | 2504 |
2385 attrs = *get_mem_attrs (memref); | 2505 mem_attrs attrs (*get_mem_attrs (memref)); |
2386 address_mode = get_address_mode (memref); | 2506 address_mode = get_address_mode (memref); |
2387 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset); | 2507 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset); |
2388 | 2508 |
2389 /* At this point we don't know _why_ the address is invalid. It | 2509 /* At this point we don't know _why_ the address is invalid. It |
2390 could have secondary memory references, multiplies or anything. | 2510 could have secondary memory references, multiplies or anything. |
2448 MODE and offset by OFFSET. This would be used by targets that e.g. | 2568 MODE and offset by OFFSET. This would be used by targets that e.g. |
2449 cannot issue QImode memory operations and have to use SImode memory | 2569 cannot issue QImode memory operations and have to use SImode memory |
2450 operations plus masking logic. */ | 2570 operations plus masking logic. */ |
2451 | 2571 |
2452 rtx | 2572 rtx |
2453 widen_memory_access (rtx memref, machine_mode mode, HOST_WIDE_INT offset) | 2573 widen_memory_access (rtx memref, machine_mode mode, poly_int64 offset) |
2454 { | 2574 { |
2455 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0); | 2575 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0); |
2456 struct mem_attrs attrs; | 2576 poly_uint64 size = GET_MODE_SIZE (mode); |
2457 unsigned int size = GET_MODE_SIZE (mode); | |
2458 | 2577 |
2459 /* If there are no changes, just return the original memory reference. */ | 2578 /* If there are no changes, just return the original memory reference. */ |
2460 if (new_rtx == memref) | 2579 if (new_rtx == memref) |
2461 return new_rtx; | 2580 return new_rtx; |
2462 | 2581 |
2463 attrs = *get_mem_attrs (new_rtx); | 2582 mem_attrs attrs (*get_mem_attrs (new_rtx)); |
2464 | 2583 |
2465 /* If we don't know what offset we were at within the expression, then | 2584 /* If we don't know what offset we were at within the expression, then |
2466 we can't know if we've overstepped the bounds. */ | 2585 we can't know if we've overstepped the bounds. */ |
2467 if (! attrs.offset_known_p) | 2586 if (! attrs.offset_known_p) |
2468 attrs.expr = NULL_TREE; | 2587 attrs.expr = NULL_TREE; |
2480 break; | 2599 break; |
2481 } | 2600 } |
2482 | 2601 |
2483 /* Is the field at least as large as the access? If so, ok, | 2602 /* Is the field at least as large as the access? If so, ok, |
2484 otherwise strip back to the containing structure. */ | 2603 otherwise strip back to the containing structure. */ |
2485 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST | 2604 if (poly_int_tree_p (DECL_SIZE_UNIT (field)) |
2486 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0 | 2605 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field)), size) |
2487 && attrs.offset >= 0) | 2606 && known_ge (attrs.offset, 0)) |
2488 break; | 2607 break; |
2489 | 2608 |
2490 if (! tree_fits_uhwi_p (offset)) | 2609 poly_uint64 suboffset; |
2610 if (!poly_int_tree_p (offset, &suboffset)) | |
2491 { | 2611 { |
2492 attrs.expr = NULL_TREE; | 2612 attrs.expr = NULL_TREE; |
2493 break; | 2613 break; |
2494 } | 2614 } |
2495 | 2615 |
2496 attrs.expr = TREE_OPERAND (attrs.expr, 0); | 2616 attrs.expr = TREE_OPERAND (attrs.expr, 0); |
2497 attrs.offset += tree_to_uhwi (offset); | 2617 attrs.offset += suboffset; |
2498 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)) | 2618 attrs.offset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)) |
2499 / BITS_PER_UNIT); | 2619 / BITS_PER_UNIT); |
2500 } | 2620 } |
2501 /* Similarly for the decl. */ | 2621 /* Similarly for the decl. */ |
2502 else if (DECL_P (attrs.expr) | 2622 else if (DECL_P (attrs.expr) |
2503 && DECL_SIZE_UNIT (attrs.expr) | 2623 && DECL_SIZE_UNIT (attrs.expr) |
2504 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST | 2624 && poly_int_tree_p (DECL_SIZE_UNIT (attrs.expr)) |
2505 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0 | 2625 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs.expr)), |
2506 && (! attrs.offset_known_p || attrs.offset >= 0)) | 2626 size) |
2627 && known_ge (attrs.offset, 0)) | |
2507 break; | 2628 break; |
2508 else | 2629 else |
2509 { | 2630 { |
2510 /* The widened memory access overflows the expression, which means | 2631 /* The widened memory access overflows the expression, which means |
2511 that it could alias another expression. Zap it. */ | 2632 that it could alias another expression. Zap it. */ |
2532 tree | 2653 tree |
2533 get_spill_slot_decl (bool force_build_p) | 2654 get_spill_slot_decl (bool force_build_p) |
2534 { | 2655 { |
2535 tree d = spill_slot_decl; | 2656 tree d = spill_slot_decl; |
2536 rtx rd; | 2657 rtx rd; |
2537 struct mem_attrs attrs; | |
2538 | 2658 |
2539 if (d || !force_build_p) | 2659 if (d || !force_build_p) |
2540 return d; | 2660 return d; |
2541 | 2661 |
2542 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl), | 2662 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl), |
2546 TREE_USED (d) = 1; | 2666 TREE_USED (d) = 1; |
2547 spill_slot_decl = d; | 2667 spill_slot_decl = d; |
2548 | 2668 |
2549 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx); | 2669 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx); |
2550 MEM_NOTRAP_P (rd) = 1; | 2670 MEM_NOTRAP_P (rd) = 1; |
2551 attrs = *mode_mem_attrs[(int) BLKmode]; | 2671 mem_attrs attrs (*mode_mem_attrs[(int) BLKmode]); |
2552 attrs.alias = new_alias_set (); | 2672 attrs.alias = new_alias_set (); |
2553 attrs.expr = d; | 2673 attrs.expr = d; |
2554 set_mem_attrs (rd, &attrs); | 2674 set_mem_attrs (rd, &attrs); |
2555 SET_DECL_RTL (d, rd); | 2675 SET_DECL_RTL (d, rd); |
2556 | 2676 |
2564 work properly in the case of shared spill slots. */ | 2684 work properly in the case of shared spill slots. */ |
2565 | 2685 |
2566 void | 2686 void |
2567 set_mem_attrs_for_spill (rtx mem) | 2687 set_mem_attrs_for_spill (rtx mem) |
2568 { | 2688 { |
2569 struct mem_attrs attrs; | |
2570 rtx addr; | 2689 rtx addr; |
2571 | 2690 |
2572 attrs = *get_mem_attrs (mem); | 2691 mem_attrs attrs (*get_mem_attrs (mem)); |
2573 attrs.expr = get_spill_slot_decl (true); | 2692 attrs.expr = get_spill_slot_decl (true); |
2574 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr)); | 2693 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr)); |
2575 attrs.addrspace = ADDR_SPACE_GENERIC; | 2694 attrs.addrspace = ADDR_SPACE_GENERIC; |
2576 | 2695 |
2577 /* We expect the incoming memory to be of the form: | 2696 /* We expect the incoming memory to be of the form: |
2578 (mem:MODE (plus (reg sfp) (const_int offset))) | 2697 (mem:MODE (plus (reg sfp) (const_int offset))) |
2579 with perhaps the plus missing for offset = 0. */ | 2698 with perhaps the plus missing for offset = 0. */ |
2580 addr = XEXP (mem, 0); | 2699 addr = XEXP (mem, 0); |
2581 attrs.offset_known_p = true; | 2700 attrs.offset_known_p = true; |
2582 attrs.offset = 0; | 2701 strip_offset (addr, &attrs.offset); |
2583 if (GET_CODE (addr) == PLUS | |
2584 && CONST_INT_P (XEXP (addr, 1))) | |
2585 attrs.offset = INTVAL (XEXP (addr, 1)); | |
2586 | 2702 |
2587 set_mem_attrs (mem, &attrs); | 2703 set_mem_attrs (mem, &attrs); |
2588 MEM_NOTRAP_P (mem) = 1; | 2704 MEM_NOTRAP_P (mem) = 1; |
2589 } | 2705 } |
2590 | 2706 |
2747 case SIMPLE_RETURN: | 2863 case SIMPLE_RETURN: |
2748 case SCRATCH: | 2864 case SCRATCH: |
2749 /* SCRATCH must be shared because they represent distinct values. */ | 2865 /* SCRATCH must be shared because they represent distinct values. */ |
2750 return; | 2866 return; |
2751 case CLOBBER: | 2867 case CLOBBER: |
2868 case CLOBBER_HIGH: | |
2752 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg | 2869 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg |
2753 clobbers or clobbers of hard registers that originated as pseudos. | 2870 clobbers or clobbers of hard registers that originated as pseudos. |
2754 This is needed to allow safe register renaming. */ | 2871 This is needed to allow safe register renaming. */ |
2755 if (REG_P (XEXP (x, 0)) | 2872 if (REG_P (XEXP (x, 0)) |
2756 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0))) | 2873 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0))) |
3000 case SIMPLE_RETURN: | 3117 case SIMPLE_RETURN: |
3001 case SCRATCH: | 3118 case SCRATCH: |
3002 /* SCRATCH must be shared because they represent distinct values. */ | 3119 /* SCRATCH must be shared because they represent distinct values. */ |
3003 return; | 3120 return; |
3004 case CLOBBER: | 3121 case CLOBBER: |
3122 case CLOBBER_HIGH: | |
3005 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg | 3123 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg |
3006 clobbers or clobbers of hard registers that originated as pseudos. | 3124 clobbers or clobbers of hard registers that originated as pseudos. |
3007 This is needed to allow safe register renaming. */ | 3125 This is needed to allow safe register renaming. */ |
3008 if (REG_P (XEXP (x, 0)) | 3126 if (REG_P (XEXP (x, 0)) |
3009 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0))) | 3127 && HARD_REGISTER_NUM_P (REGNO (XEXP (x, 0))) |
3353 } | 3471 } |
3354 | 3472 |
3355 return insn; | 3473 return insn; |
3356 } | 3474 } |
3357 | 3475 |
3358 /* Return the next insn after INSN that is not a NOTE, but stop the | 3476 /* Return the next insn after INSN that is not a DEBUG_INSN. This |
3359 search before we enter another basic block. This routine does not | 3477 routine does not look inside SEQUENCEs. */ |
3360 look inside SEQUENCEs. */ | |
3361 | 3478 |
3362 rtx_insn * | 3479 rtx_insn * |
3363 next_nonnote_insn_bb (rtx_insn *insn) | 3480 next_nondebug_insn (rtx_insn *insn) |
3364 { | 3481 { |
3365 while (insn) | 3482 while (insn) |
3366 { | 3483 { |
3367 insn = NEXT_INSN (insn); | 3484 insn = NEXT_INSN (insn); |
3485 if (insn == 0 || !DEBUG_INSN_P (insn)) | |
3486 break; | |
3487 } | |
3488 | |
3489 return insn; | |
3490 } | |
3491 | |
3492 /* Return the previous insn before INSN that is not a NOTE. This routine does | |
3493 not look inside SEQUENCEs. */ | |
3494 | |
3495 rtx_insn * | |
3496 prev_nonnote_insn (rtx_insn *insn) | |
3497 { | |
3498 while (insn) | |
3499 { | |
3500 insn = PREV_INSN (insn); | |
3368 if (insn == 0 || !NOTE_P (insn)) | 3501 if (insn == 0 || !NOTE_P (insn)) |
3502 break; | |
3503 } | |
3504 | |
3505 return insn; | |
3506 } | |
3507 | |
3508 /* Return the previous insn before INSN that is not a DEBUG_INSN. | |
3509 This routine does not look inside SEQUENCEs. */ | |
3510 | |
3511 rtx_insn * | |
3512 prev_nondebug_insn (rtx_insn *insn) | |
3513 { | |
3514 while (insn) | |
3515 { | |
3516 insn = PREV_INSN (insn); | |
3517 if (insn == 0 || !DEBUG_INSN_P (insn)) | |
3518 break; | |
3519 } | |
3520 | |
3521 return insn; | |
3522 } | |
3523 | |
3524 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN. | |
3525 This routine does not look inside SEQUENCEs. */ | |
3526 | |
3527 rtx_insn * | |
3528 next_nonnote_nondebug_insn (rtx_insn *insn) | |
3529 { | |
3530 while (insn) | |
3531 { | |
3532 insn = NEXT_INSN (insn); | |
3533 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn))) | |
3534 break; | |
3535 } | |
3536 | |
3537 return insn; | |
3538 } | |
3539 | |
3540 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN, | |
3541 but stop the search before we enter another basic block. This | |
3542 routine does not look inside SEQUENCEs. */ | |
3543 | |
3544 rtx_insn * | |
3545 next_nonnote_nondebug_insn_bb (rtx_insn *insn) | |
3546 { | |
3547 while (insn) | |
3548 { | |
3549 insn = NEXT_INSN (insn); | |
3550 if (insn == 0) | |
3551 break; | |
3552 if (DEBUG_INSN_P (insn)) | |
3553 continue; | |
3554 if (!NOTE_P (insn)) | |
3369 break; | 3555 break; |
3370 if (NOTE_INSN_BASIC_BLOCK_P (insn)) | 3556 if (NOTE_INSN_BASIC_BLOCK_P (insn)) |
3371 return NULL; | 3557 return NULL; |
3372 } | 3558 } |
3373 | 3559 |
3374 return insn; | 3560 return insn; |
3375 } | 3561 } |
3376 | 3562 |
3377 /* Return the previous insn before INSN that is not a NOTE. This routine does | 3563 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN. |
3378 not look inside SEQUENCEs. */ | 3564 This routine does not look inside SEQUENCEs. */ |
3379 | 3565 |
3380 rtx_insn * | 3566 rtx_insn * |
3381 prev_nonnote_insn (rtx_insn *insn) | 3567 prev_nonnote_nondebug_insn (rtx_insn *insn) |
3382 { | 3568 { |
3383 while (insn) | 3569 while (insn) |
3384 { | 3570 { |
3385 insn = PREV_INSN (insn); | 3571 insn = PREV_INSN (insn); |
3386 if (insn == 0 || !NOTE_P (insn)) | 3572 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn))) |
3387 break; | 3573 break; |
3388 } | 3574 } |
3389 | 3575 |
3390 return insn; | 3576 return insn; |
3391 } | 3577 } |
3392 | 3578 |
3393 /* Return the previous insn before INSN that is not a NOTE, but stop | 3579 /* Return the previous insn before INSN that is not a NOTE nor |
3394 the search before we enter another basic block. This routine does | 3580 DEBUG_INSN, but stop the search before we enter another basic |
3395 not look inside SEQUENCEs. */ | 3581 block. This routine does not look inside SEQUENCEs. */ |
3396 | 3582 |
3397 rtx_insn * | 3583 rtx_insn * |
3398 prev_nonnote_insn_bb (rtx_insn *insn) | 3584 prev_nonnote_nondebug_insn_bb (rtx_insn *insn) |
3399 { | 3585 { |
3400 | |
3401 while (insn) | 3586 while (insn) |
3402 { | 3587 { |
3403 insn = PREV_INSN (insn); | 3588 insn = PREV_INSN (insn); |
3404 if (insn == 0 || !NOTE_P (insn)) | 3589 if (insn == 0) |
3590 break; | |
3591 if (DEBUG_INSN_P (insn)) | |
3592 continue; | |
3593 if (!NOTE_P (insn)) | |
3405 break; | 3594 break; |
3406 if (NOTE_INSN_BASIC_BLOCK_P (insn)) | 3595 if (NOTE_INSN_BASIC_BLOCK_P (insn)) |
3407 return NULL; | 3596 return NULL; |
3408 } | 3597 } |
3409 | 3598 |
3410 return insn; | 3599 return insn; |
3411 } | 3600 } |
3412 | 3601 |
3413 /* Return the next insn after INSN that is not a DEBUG_INSN. This | 3602 /* Return the next INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN after INSN; |
3414 routine does not look inside SEQUENCEs. */ | 3603 or 0, if there is none. This routine does not look inside |
3604 SEQUENCEs. */ | |
3415 | 3605 |
3416 rtx_insn * | 3606 rtx_insn * |
3417 next_nondebug_insn (rtx_insn *insn) | 3607 next_real_insn (rtx_insn *insn) |
3418 { | 3608 { |
3419 while (insn) | 3609 while (insn) |
3420 { | 3610 { |
3421 insn = NEXT_INSN (insn); | 3611 insn = NEXT_INSN (insn); |
3422 if (insn == 0 || !DEBUG_INSN_P (insn)) | 3612 if (insn == 0 || INSN_P (insn)) |
3423 break; | 3613 break; |
3424 } | 3614 } |
3425 | 3615 |
3426 return insn; | 3616 return insn; |
3427 } | 3617 } |
3428 | 3618 |
3429 /* Return the previous insn before INSN that is not a DEBUG_INSN. | 3619 /* Return the last INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN before INSN; |
3430 This routine does not look inside SEQUENCEs. */ | 3620 or 0, if there is none. This routine does not look inside |
3621 SEQUENCEs. */ | |
3431 | 3622 |
3432 rtx_insn * | 3623 rtx_insn * |
3433 prev_nondebug_insn (rtx_insn *insn) | 3624 prev_real_insn (rtx_insn *insn) |
3434 { | 3625 { |
3435 while (insn) | 3626 while (insn) |
3436 { | 3627 { |
3437 insn = PREV_INSN (insn); | 3628 insn = PREV_INSN (insn); |
3438 if (insn == 0 || !DEBUG_INSN_P (insn)) | 3629 if (insn == 0 || INSN_P (insn)) |
3439 break; | |
3440 } | |
3441 | |
3442 return insn; | |
3443 } | |
3444 | |
3445 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN. | |
3446 This routine does not look inside SEQUENCEs. */ | |
3447 | |
3448 rtx_insn * | |
3449 next_nonnote_nondebug_insn (rtx_insn *insn) | |
3450 { | |
3451 while (insn) | |
3452 { | |
3453 insn = NEXT_INSN (insn); | |
3454 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn))) | |
3455 break; | |
3456 } | |
3457 | |
3458 return insn; | |
3459 } | |
3460 | |
3461 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN. | |
3462 This routine does not look inside SEQUENCEs. */ | |
3463 | |
3464 rtx_insn * | |
3465 prev_nonnote_nondebug_insn (rtx_insn *insn) | |
3466 { | |
3467 while (insn) | |
3468 { | |
3469 insn = PREV_INSN (insn); | |
3470 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn))) | |
3471 break; | 3630 break; |
3472 } | 3631 } |
3473 | 3632 |
3474 return insn; | 3633 return insn; |
3475 } | 3634 } |
3477 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN; | 3636 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN; |
3478 or 0, if there is none. This routine does not look inside | 3637 or 0, if there is none. This routine does not look inside |
3479 SEQUENCEs. */ | 3638 SEQUENCEs. */ |
3480 | 3639 |
3481 rtx_insn * | 3640 rtx_insn * |
3482 next_real_insn (rtx uncast_insn) | 3641 next_real_nondebug_insn (rtx uncast_insn) |
3483 { | 3642 { |
3484 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn); | 3643 rtx_insn *insn = safe_as_a <rtx_insn *> (uncast_insn); |
3485 | 3644 |
3486 while (insn) | 3645 while (insn) |
3487 { | 3646 { |
3488 insn = NEXT_INSN (insn); | 3647 insn = NEXT_INSN (insn); |
3489 if (insn == 0 || INSN_P (insn)) | 3648 if (insn == 0 || NONDEBUG_INSN_P (insn)) |
3490 break; | 3649 break; |
3491 } | 3650 } |
3492 | 3651 |
3493 return insn; | 3652 return insn; |
3494 } | 3653 } |
3496 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN; | 3655 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN; |
3497 or 0, if there is none. This routine does not look inside | 3656 or 0, if there is none. This routine does not look inside |
3498 SEQUENCEs. */ | 3657 SEQUENCEs. */ |
3499 | 3658 |
3500 rtx_insn * | 3659 rtx_insn * |
3501 prev_real_insn (rtx_insn *insn) | 3660 prev_real_nondebug_insn (rtx_insn *insn) |
3502 { | 3661 { |
3503 while (insn) | 3662 while (insn) |
3504 { | 3663 { |
3505 insn = PREV_INSN (insn); | 3664 insn = PREV_INSN (insn); |
3506 if (insn == 0 || INSN_P (insn)) | 3665 if (insn == 0 || NONDEBUG_INSN_P (insn)) |
3507 break; | 3666 break; |
3508 } | 3667 } |
3509 | 3668 |
3510 return insn; | 3669 return insn; |
3511 } | 3670 } |
3741 if (CALL_P (trial)) | 3900 if (CALL_P (trial)) |
3742 { | 3901 { |
3743 for (insn = insn_last; insn ; insn = PREV_INSN (insn)) | 3902 for (insn = insn_last; insn ; insn = PREV_INSN (insn)) |
3744 if (CALL_P (insn)) | 3903 if (CALL_P (insn)) |
3745 { | 3904 { |
3746 rtx_insn *next; | |
3747 rtx *p; | |
3748 | |
3749 gcc_assert (call_insn == NULL_RTX); | 3905 gcc_assert (call_insn == NULL_RTX); |
3750 call_insn = insn; | 3906 call_insn = insn; |
3751 | 3907 |
3752 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the | 3908 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the |
3753 target may have explicitly specified. */ | 3909 target may have explicitly specified. */ |
3754 p = &CALL_INSN_FUNCTION_USAGE (insn); | 3910 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn); |
3755 while (*p) | 3911 while (*p) |
3756 p = &XEXP (*p, 1); | 3912 p = &XEXP (*p, 1); |
3757 *p = CALL_INSN_FUNCTION_USAGE (trial); | 3913 *p = CALL_INSN_FUNCTION_USAGE (trial); |
3758 | 3914 |
3759 /* If the old call was a sibling call, the new one must | 3915 /* If the old call was a sibling call, the new one must |
3760 be too. */ | 3916 be too. */ |
3761 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial); | 3917 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial); |
3762 | |
3763 /* If the new call is the last instruction in the sequence, | |
3764 it will effectively replace the old call in-situ. Otherwise | |
3765 we must move any following NOTE_INSN_CALL_ARG_LOCATION note | |
3766 so that it comes immediately after the new call. */ | |
3767 if (NEXT_INSN (insn)) | |
3768 for (next = NEXT_INSN (trial); | |
3769 next && NOTE_P (next); | |
3770 next = NEXT_INSN (next)) | |
3771 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION) | |
3772 { | |
3773 remove_insn (next); | |
3774 add_insn_after (next, insn, NULL); | |
3775 break; | |
3776 } | |
3777 } | 3918 } |
3778 } | 3919 } |
3779 | 3920 |
3780 /* Copy notes, particularly those related to the CFG. */ | 3921 /* Copy notes, particularly those related to the CFG. */ |
3781 for (note = REG_NOTES (trial); note; note = XEXP (note, 1)) | 3922 for (note = REG_NOTES (trial); note; note = XEXP (note, 1)) |
3788 | 3929 |
3789 case REG_NORETURN: | 3930 case REG_NORETURN: |
3790 case REG_SETJMP: | 3931 case REG_SETJMP: |
3791 case REG_TM: | 3932 case REG_TM: |
3792 case REG_CALL_NOCF_CHECK: | 3933 case REG_CALL_NOCF_CHECK: |
3934 case REG_CALL_ARG_LOCATION: | |
3793 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn)) | 3935 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn)) |
3794 { | 3936 { |
3795 if (CALL_P (insn)) | 3937 if (CALL_P (insn)) |
3796 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0)); | 3938 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0)); |
3797 } | 3939 } |
3817 add_reg_note (insn, REG_INC, reg); | 3959 add_reg_note (insn, REG_INC, reg); |
3818 } | 3960 } |
3819 break; | 3961 break; |
3820 | 3962 |
3821 case REG_ARGS_SIZE: | 3963 case REG_ARGS_SIZE: |
3822 fixup_args_size_notes (NULL, insn_last, INTVAL (XEXP (note, 0))); | 3964 fixup_args_size_notes (NULL, insn_last, get_args_size (note)); |
3823 break; | 3965 break; |
3824 | 3966 |
3825 case REG_CALL_DECL: | 3967 case REG_CALL_DECL: |
3826 gcc_assert (call_insn != NULL_RTX); | 3968 gcc_assert (call_insn != NULL_RTX); |
3827 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0)); | 3969 add_reg_note (call_insn, REG_NOTE_KIND (note), XEXP (note, 0)); |
4023 void | 4165 void |
4024 add_insn (rtx_insn *insn) | 4166 add_insn (rtx_insn *insn) |
4025 { | 4167 { |
4026 rtx_insn *prev = get_last_insn (); | 4168 rtx_insn *prev = get_last_insn (); |
4027 link_insn_into_chain (insn, prev, NULL); | 4169 link_insn_into_chain (insn, prev, NULL); |
4028 if (NULL == get_insns ()) | 4170 if (get_insns () == NULL) |
4029 set_first_insn (insn); | 4171 set_first_insn (insn); |
4030 set_last_insn (insn); | 4172 set_last_insn (insn); |
4031 } | 4173 } |
4032 | 4174 |
4033 /* Add INSN into the doubly-linked list after insn AFTER. */ | 4175 /* Add INSN into the doubly-linked list after insn AFTER. */ |
4086 This and the next function should be the only functions called | 4228 This and the next function should be the only functions called |
4087 to insert an insn once delay slots have been filled since only | 4229 to insert an insn once delay slots have been filled since only |
4088 they know how to update a SEQUENCE. */ | 4230 they know how to update a SEQUENCE. */ |
4089 | 4231 |
4090 void | 4232 void |
4091 add_insn_after (rtx uncast_insn, rtx uncast_after, basic_block bb) | 4233 add_insn_after (rtx_insn *insn, rtx_insn *after, basic_block bb) |
4092 { | 4234 { |
4093 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn); | |
4094 rtx_insn *after = as_a <rtx_insn *> (uncast_after); | |
4095 add_insn_after_nobb (insn, after); | 4235 add_insn_after_nobb (insn, after); |
4096 if (!BARRIER_P (after) | 4236 if (!BARRIER_P (after) |
4097 && !BARRIER_P (insn) | 4237 && !BARRIER_P (insn) |
4098 && (bb = BLOCK_FOR_INSN (after))) | 4238 && (bb = BLOCK_FOR_INSN (after))) |
4099 { | 4239 { |
4116 This and the previous function should be the only functions called | 4256 This and the previous function should be the only functions called |
4117 to insert an insn once delay slots have been filled since only | 4257 to insert an insn once delay slots have been filled since only |
4118 they know how to update a SEQUENCE. */ | 4258 they know how to update a SEQUENCE. */ |
4119 | 4259 |
4120 void | 4260 void |
4121 add_insn_before (rtx uncast_insn, rtx uncast_before, basic_block bb) | 4261 add_insn_before (rtx_insn *insn, rtx_insn *before, basic_block bb) |
4122 { | 4262 { |
4123 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn); | |
4124 rtx_insn *before = as_a <rtx_insn *> (uncast_before); | |
4125 add_insn_before_nobb (insn, before); | 4263 add_insn_before_nobb (insn, before); |
4126 | 4264 |
4127 if (!bb | 4265 if (!bb |
4128 && !BARRIER_P (before) | 4266 && !BARRIER_P (before) |
4129 && !BARRIER_P (insn)) | 4267 && !BARRIER_P (insn)) |
4144 } | 4282 } |
4145 | 4283 |
4146 /* Replace insn with an deleted instruction note. */ | 4284 /* Replace insn with an deleted instruction note. */ |
4147 | 4285 |
4148 void | 4286 void |
4149 set_insn_deleted (rtx insn) | 4287 set_insn_deleted (rtx_insn *insn) |
4150 { | 4288 { |
4151 if (INSN_P (insn)) | 4289 if (INSN_P (insn)) |
4152 df_insn_delete (as_a <rtx_insn *> (insn)); | 4290 df_insn_delete (insn); |
4153 PUT_CODE (insn, NOTE); | 4291 PUT_CODE (insn, NOTE); |
4154 NOTE_KIND (insn) = NOTE_INSN_DELETED; | 4292 NOTE_KIND (insn) = NOTE_INSN_DELETED; |
4155 } | 4293 } |
4156 | 4294 |
4157 | 4295 |
4169 the caller. Nullifying them here breaks many insn chain walks. | 4307 the caller. Nullifying them here breaks many insn chain walks. |
4170 | 4308 |
4171 To really delete an insn and related DF information, use delete_insn. */ | 4309 To really delete an insn and related DF information, use delete_insn. */ |
4172 | 4310 |
4173 void | 4311 void |
4174 remove_insn (rtx uncast_insn) | 4312 remove_insn (rtx_insn *insn) |
4175 { | 4313 { |
4176 rtx_insn *insn = as_a <rtx_insn *> (uncast_insn); | |
4177 rtx_insn *next = NEXT_INSN (insn); | 4314 rtx_insn *next = NEXT_INSN (insn); |
4178 rtx_insn *prev = PREV_INSN (insn); | 4315 rtx_insn *prev = PREV_INSN (insn); |
4179 basic_block bb; | 4316 basic_block bb; |
4180 | 4317 |
4181 if (prev) | 4318 if (prev) |
4374 is a relic of the past which no longer occurs. The reason is that | 4511 is a relic of the past which no longer occurs. The reason is that |
4375 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE | 4512 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE |
4376 generated would almost certainly die right after it was created. */ | 4513 generated would almost certainly die right after it was created. */ |
4377 | 4514 |
4378 static rtx_insn * | 4515 static rtx_insn * |
4379 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb, | 4516 emit_pattern_before_noloc (rtx x, rtx_insn *before, rtx_insn *last, |
4517 basic_block bb, | |
4380 rtx_insn *(*make_raw) (rtx)) | 4518 rtx_insn *(*make_raw) (rtx)) |
4381 { | 4519 { |
4382 rtx_insn *insn; | 4520 rtx_insn *insn; |
4383 | 4521 |
4384 gcc_assert (before); | 4522 gcc_assert (before); |
4385 | 4523 |
4386 if (x == NULL_RTX) | 4524 if (x == NULL_RTX) |
4387 return safe_as_a <rtx_insn *> (last); | 4525 return last; |
4388 | 4526 |
4389 switch (GET_CODE (x)) | 4527 switch (GET_CODE (x)) |
4390 { | 4528 { |
4391 case DEBUG_INSN: | 4529 case DEBUG_INSN: |
4392 case INSN: | 4530 case INSN: |
4415 last = (*make_raw) (x); | 4553 last = (*make_raw) (x); |
4416 add_insn_before (last, before, bb); | 4554 add_insn_before (last, before, bb); |
4417 break; | 4555 break; |
4418 } | 4556 } |
4419 | 4557 |
4420 return safe_as_a <rtx_insn *> (last); | 4558 return last; |
4421 } | 4559 } |
4422 | 4560 |
4423 /* Make X be output before the instruction BEFORE. */ | 4561 /* Make X be output before the instruction BEFORE. */ |
4424 | 4562 |
4425 rtx_insn * | 4563 rtx_insn * |
4433 | 4571 |
4434 rtx_jump_insn * | 4572 rtx_jump_insn * |
4435 emit_jump_insn_before_noloc (rtx x, rtx_insn *before) | 4573 emit_jump_insn_before_noloc (rtx x, rtx_insn *before) |
4436 { | 4574 { |
4437 return as_a <rtx_jump_insn *> ( | 4575 return as_a <rtx_jump_insn *> ( |
4438 emit_pattern_before_noloc (x, before, NULL_RTX, NULL, | 4576 emit_pattern_before_noloc (x, before, NULL, NULL, |
4439 make_jump_insn_raw)); | 4577 make_jump_insn_raw)); |
4440 } | 4578 } |
4441 | 4579 |
4442 /* Make an instruction with body X and code CALL_INSN | 4580 /* Make an instruction with body X and code CALL_INSN |
4443 and output it before the instruction BEFORE. */ | 4581 and output it before the instruction BEFORE. */ |
4444 | 4582 |
4445 rtx_insn * | 4583 rtx_insn * |
4446 emit_call_insn_before_noloc (rtx x, rtx_insn *before) | 4584 emit_call_insn_before_noloc (rtx x, rtx_insn *before) |
4447 { | 4585 { |
4448 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL, | 4586 return emit_pattern_before_noloc (x, before, NULL, NULL, |
4449 make_call_insn_raw); | 4587 make_call_insn_raw); |
4450 } | 4588 } |
4451 | 4589 |
4452 /* Make an instruction with body X and code DEBUG_INSN | 4590 /* Make an instruction with body X and code DEBUG_INSN |
4453 and output it before the instruction BEFORE. */ | 4591 and output it before the instruction BEFORE. */ |
4454 | 4592 |
4455 rtx_insn * | 4593 rtx_insn * |
4456 emit_debug_insn_before_noloc (rtx x, rtx before) | 4594 emit_debug_insn_before_noloc (rtx x, rtx_insn *before) |
4457 { | 4595 { |
4458 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL, | 4596 return emit_pattern_before_noloc (x, before, NULL, NULL, |
4459 make_debug_insn_raw); | 4597 make_debug_insn_raw); |
4460 } | 4598 } |
4461 | 4599 |
4462 /* Make an insn of code BARRIER | 4600 /* Make an insn of code BARRIER |
4463 and output it before the insn BEFORE. */ | 4601 and output it before the insn BEFORE. */ |
4464 | 4602 |
4465 rtx_barrier * | 4603 rtx_barrier * |
4466 emit_barrier_before (rtx before) | 4604 emit_barrier_before (rtx_insn *before) |
4467 { | 4605 { |
4468 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER)); | 4606 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER)); |
4469 | 4607 |
4470 INSN_UID (insn) = cur_insn_uid++; | 4608 INSN_UID (insn) = cur_insn_uid++; |
4471 | 4609 |
4474 } | 4612 } |
4475 | 4613 |
4476 /* Emit the label LABEL before the insn BEFORE. */ | 4614 /* Emit the label LABEL before the insn BEFORE. */ |
4477 | 4615 |
4478 rtx_code_label * | 4616 rtx_code_label * |
4479 emit_label_before (rtx label, rtx_insn *before) | 4617 emit_label_before (rtx_code_label *label, rtx_insn *before) |
4480 { | 4618 { |
4481 gcc_checking_assert (INSN_UID (label) == 0); | 4619 gcc_checking_assert (INSN_UID (label) == 0); |
4482 INSN_UID (label) = cur_insn_uid++; | 4620 INSN_UID (label) = cur_insn_uid++; |
4483 add_insn_before (label, before, NULL); | 4621 add_insn_before (label, before, NULL); |
4484 return as_a <rtx_code_label *> (label); | 4622 return label; |
4485 } | 4623 } |
4486 | 4624 |
4487 /* Helper for emit_insn_after, handles lists of instructions | 4625 /* Helper for emit_insn_after, handles lists of instructions |
4488 efficiently. */ | 4626 efficiently. */ |
4489 | 4627 |
4490 static rtx_insn * | 4628 static rtx_insn * |
4491 emit_insn_after_1 (rtx_insn *first, rtx uncast_after, basic_block bb) | 4629 emit_insn_after_1 (rtx_insn *first, rtx_insn *after, basic_block bb) |
4492 { | 4630 { |
4493 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after); | |
4494 rtx_insn *last; | 4631 rtx_insn *last; |
4495 rtx_insn *after_after; | 4632 rtx_insn *after_after; |
4496 if (!bb && !BARRIER_P (after)) | 4633 if (!bb && !BARRIER_P (after)) |
4497 bb = BLOCK_FOR_INSN (after); | 4634 bb = BLOCK_FOR_INSN (after); |
4498 | 4635 |
4530 | 4667 |
4531 return last; | 4668 return last; |
4532 } | 4669 } |
4533 | 4670 |
4534 static rtx_insn * | 4671 static rtx_insn * |
4535 emit_pattern_after_noloc (rtx x, rtx uncast_after, basic_block bb, | 4672 emit_pattern_after_noloc (rtx x, rtx_insn *after, basic_block bb, |
4536 rtx_insn *(*make_raw)(rtx)) | 4673 rtx_insn *(*make_raw)(rtx)) |
4537 { | 4674 { |
4538 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after); | |
4539 rtx_insn *last = after; | 4675 rtx_insn *last = after; |
4540 | 4676 |
4541 gcc_assert (after); | 4677 gcc_assert (after); |
4542 | 4678 |
4543 if (x == NULL_RTX) | 4679 if (x == NULL_RTX) |
4572 | 4708 |
4573 /* Make X be output after the insn AFTER and set the BB of insn. If | 4709 /* Make X be output after the insn AFTER and set the BB of insn. If |
4574 BB is NULL, an attempt is made to infer the BB from AFTER. */ | 4710 BB is NULL, an attempt is made to infer the BB from AFTER. */ |
4575 | 4711 |
4576 rtx_insn * | 4712 rtx_insn * |
4577 emit_insn_after_noloc (rtx x, rtx after, basic_block bb) | 4713 emit_insn_after_noloc (rtx x, rtx_insn *after, basic_block bb) |
4578 { | 4714 { |
4579 return emit_pattern_after_noloc (x, after, bb, make_insn_raw); | 4715 return emit_pattern_after_noloc (x, after, bb, make_insn_raw); |
4580 } | 4716 } |
4581 | 4717 |
4582 | 4718 |
4583 /* Make an insn of code JUMP_INSN with body X | 4719 /* Make an insn of code JUMP_INSN with body X |
4584 and output it after the insn AFTER. */ | 4720 and output it after the insn AFTER. */ |
4585 | 4721 |
4586 rtx_jump_insn * | 4722 rtx_jump_insn * |
4587 emit_jump_insn_after_noloc (rtx x, rtx after) | 4723 emit_jump_insn_after_noloc (rtx x, rtx_insn *after) |
4588 { | 4724 { |
4589 return as_a <rtx_jump_insn *> ( | 4725 return as_a <rtx_jump_insn *> ( |
4590 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw)); | 4726 emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw)); |
4591 } | 4727 } |
4592 | 4728 |
4593 /* Make an instruction with body X and code CALL_INSN | 4729 /* Make an instruction with body X and code CALL_INSN |
4594 and output it after the instruction AFTER. */ | 4730 and output it after the instruction AFTER. */ |
4595 | 4731 |
4596 rtx_insn * | 4732 rtx_insn * |
4597 emit_call_insn_after_noloc (rtx x, rtx after) | 4733 emit_call_insn_after_noloc (rtx x, rtx_insn *after) |
4598 { | 4734 { |
4599 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw); | 4735 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw); |
4600 } | 4736 } |
4601 | 4737 |
4602 /* Make an instruction with body X and code CALL_INSN | 4738 /* Make an instruction with body X and code CALL_INSN |
4603 and output it after the instruction AFTER. */ | 4739 and output it after the instruction AFTER. */ |
4604 | 4740 |
4605 rtx_insn * | 4741 rtx_insn * |
4606 emit_debug_insn_after_noloc (rtx x, rtx after) | 4742 emit_debug_insn_after_noloc (rtx x, rtx_insn *after) |
4607 { | 4743 { |
4608 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw); | 4744 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw); |
4609 } | 4745 } |
4610 | 4746 |
4611 /* Make an insn of code BARRIER | 4747 /* Make an insn of code BARRIER |
4612 and output it after the insn AFTER. */ | 4748 and output it after the insn AFTER. */ |
4613 | 4749 |
4614 rtx_barrier * | 4750 rtx_barrier * |
4615 emit_barrier_after (rtx after) | 4751 emit_barrier_after (rtx_insn *after) |
4616 { | 4752 { |
4617 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER)); | 4753 rtx_barrier *insn = as_a <rtx_barrier *> (rtx_alloc (BARRIER)); |
4618 | 4754 |
4619 INSN_UID (insn) = cur_insn_uid++; | 4755 INSN_UID (insn) = cur_insn_uid++; |
4620 | 4756 |
4623 } | 4759 } |
4624 | 4760 |
4625 /* Emit the label LABEL after the insn AFTER. */ | 4761 /* Emit the label LABEL after the insn AFTER. */ |
4626 | 4762 |
4627 rtx_insn * | 4763 rtx_insn * |
4628 emit_label_after (rtx label, rtx_insn *after) | 4764 emit_label_after (rtx_insn *label, rtx_insn *after) |
4629 { | 4765 { |
4630 gcc_checking_assert (INSN_UID (label) == 0); | 4766 gcc_checking_assert (INSN_UID (label) == 0); |
4631 INSN_UID (label) = cur_insn_uid++; | 4767 INSN_UID (label) = cur_insn_uid++; |
4632 add_insn_after (label, after, NULL); | 4768 add_insn_after (label, after, NULL); |
4633 return as_a <rtx_insn *> (label); | 4769 return label; |
4634 } | 4770 } |
4635 | 4771 |
4636 /* Notes require a bit of special handling: Some notes need to have their | 4772 /* Notes require a bit of special handling: Some notes need to have their |
4637 BLOCK_FOR_INSN set, others should never have it set, and some should | 4773 BLOCK_FOR_INSN set, others should never have it set, and some should |
4638 have it set or clear depending on the context. */ | 4774 have it set or clear depending on the context. */ |
4652 | 4788 |
4653 /* Notes for var tracking and EH region markers can appear between or | 4789 /* Notes for var tracking and EH region markers can appear between or |
4654 inside basic blocks. If the caller is emitting on the basic block | 4790 inside basic blocks. If the caller is emitting on the basic block |
4655 boundary, do not set BLOCK_FOR_INSN on the new note. */ | 4791 boundary, do not set BLOCK_FOR_INSN on the new note. */ |
4656 case NOTE_INSN_VAR_LOCATION: | 4792 case NOTE_INSN_VAR_LOCATION: |
4657 case NOTE_INSN_CALL_ARG_LOCATION: | |
4658 case NOTE_INSN_EH_REGION_BEG: | 4793 case NOTE_INSN_EH_REGION_BEG: |
4659 case NOTE_INSN_EH_REGION_END: | 4794 case NOTE_INSN_EH_REGION_END: |
4660 return on_bb_boundary_p; | 4795 return on_bb_boundary_p; |
4661 | 4796 |
4662 /* Otherwise, BLOCK_FOR_INSN must be set. */ | 4797 /* Otherwise, BLOCK_FOR_INSN must be set. */ |
4699 | 4834 |
4700 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC. | 4835 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC. |
4701 MAKE_RAW indicates how to turn PATTERN into a real insn. */ | 4836 MAKE_RAW indicates how to turn PATTERN into a real insn. */ |
4702 | 4837 |
4703 static rtx_insn * | 4838 static rtx_insn * |
4704 emit_pattern_after_setloc (rtx pattern, rtx uncast_after, int loc, | 4839 emit_pattern_after_setloc (rtx pattern, rtx_insn *after, location_t loc, |
4705 rtx_insn *(*make_raw) (rtx)) | 4840 rtx_insn *(*make_raw) (rtx)) |
4706 { | 4841 { |
4707 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after); | |
4708 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw); | 4842 rtx_insn *last = emit_pattern_after_noloc (pattern, after, NULL, make_raw); |
4709 | 4843 |
4710 if (pattern == NULL_RTX || !loc) | 4844 if (pattern == NULL_RTX || !loc) |
4711 return last; | 4845 return last; |
4712 | 4846 |
4727 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN | 4861 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN |
4728 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after | 4862 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after |
4729 any DEBUG_INSNs. */ | 4863 any DEBUG_INSNs. */ |
4730 | 4864 |
4731 static rtx_insn * | 4865 static rtx_insn * |
4732 emit_pattern_after (rtx pattern, rtx uncast_after, bool skip_debug_insns, | 4866 emit_pattern_after (rtx pattern, rtx_insn *after, bool skip_debug_insns, |
4733 rtx_insn *(*make_raw) (rtx)) | 4867 rtx_insn *(*make_raw) (rtx)) |
4734 { | 4868 { |
4735 rtx_insn *after = safe_as_a <rtx_insn *> (uncast_after); | |
4736 rtx_insn *prev = after; | 4869 rtx_insn *prev = after; |
4737 | 4870 |
4738 if (skip_debug_insns) | 4871 if (skip_debug_insns) |
4739 while (DEBUG_INSN_P (prev)) | 4872 while (DEBUG_INSN_P (prev)) |
4740 prev = PREV_INSN (prev); | 4873 prev = PREV_INSN (prev); |
4746 return emit_pattern_after_noloc (pattern, after, NULL, make_raw); | 4879 return emit_pattern_after_noloc (pattern, after, NULL, make_raw); |
4747 } | 4880 } |
4748 | 4881 |
4749 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */ | 4882 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */ |
4750 rtx_insn * | 4883 rtx_insn * |
4751 emit_insn_after_setloc (rtx pattern, rtx after, int loc) | 4884 emit_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc) |
4752 { | 4885 { |
4753 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw); | 4886 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw); |
4754 } | 4887 } |
4755 | 4888 |
4756 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */ | 4889 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */ |
4757 rtx_insn * | 4890 rtx_insn * |
4758 emit_insn_after (rtx pattern, rtx after) | 4891 emit_insn_after (rtx pattern, rtx_insn *after) |
4759 { | 4892 { |
4760 return emit_pattern_after (pattern, after, true, make_insn_raw); | 4893 return emit_pattern_after (pattern, after, true, make_insn_raw); |
4761 } | 4894 } |
4762 | 4895 |
4763 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */ | 4896 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */ |
4764 rtx_jump_insn * | 4897 rtx_jump_insn * |
4765 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc) | 4898 emit_jump_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc) |
4766 { | 4899 { |
4767 return as_a <rtx_jump_insn *> ( | 4900 return as_a <rtx_jump_insn *> ( |
4768 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw)); | 4901 emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw)); |
4769 } | 4902 } |
4770 | 4903 |
4771 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */ | 4904 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */ |
4772 rtx_jump_insn * | 4905 rtx_jump_insn * |
4773 emit_jump_insn_after (rtx pattern, rtx after) | 4906 emit_jump_insn_after (rtx pattern, rtx_insn *after) |
4774 { | 4907 { |
4775 return as_a <rtx_jump_insn *> ( | 4908 return as_a <rtx_jump_insn *> ( |
4776 emit_pattern_after (pattern, after, true, make_jump_insn_raw)); | 4909 emit_pattern_after (pattern, after, true, make_jump_insn_raw)); |
4777 } | 4910 } |
4778 | 4911 |
4779 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */ | 4912 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */ |
4780 rtx_insn * | 4913 rtx_insn * |
4781 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc) | 4914 emit_call_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc) |
4782 { | 4915 { |
4783 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw); | 4916 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw); |
4784 } | 4917 } |
4785 | 4918 |
4786 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */ | 4919 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */ |
4787 rtx_insn * | 4920 rtx_insn * |
4788 emit_call_insn_after (rtx pattern, rtx after) | 4921 emit_call_insn_after (rtx pattern, rtx_insn *after) |
4789 { | 4922 { |
4790 return emit_pattern_after (pattern, after, true, make_call_insn_raw); | 4923 return emit_pattern_after (pattern, after, true, make_call_insn_raw); |
4791 } | 4924 } |
4792 | 4925 |
4793 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */ | 4926 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */ |
4794 rtx_insn * | 4927 rtx_insn * |
4795 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc) | 4928 emit_debug_insn_after_setloc (rtx pattern, rtx_insn *after, location_t loc) |
4796 { | 4929 { |
4797 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw); | 4930 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw); |
4798 } | 4931 } |
4799 | 4932 |
4800 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */ | 4933 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */ |
4801 rtx_insn * | 4934 rtx_insn * |
4802 emit_debug_insn_after (rtx pattern, rtx after) | 4935 emit_debug_insn_after (rtx pattern, rtx_insn *after) |
4803 { | 4936 { |
4804 return emit_pattern_after (pattern, after, false, make_debug_insn_raw); | 4937 return emit_pattern_after (pattern, after, false, make_debug_insn_raw); |
4805 } | 4938 } |
4806 | 4939 |
4807 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC. | 4940 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC. |
4808 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP | 4941 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP |
4809 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN, | 4942 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN, |
4810 CALL_INSN, etc. */ | 4943 CALL_INSN, etc. */ |
4811 | 4944 |
4812 static rtx_insn * | 4945 static rtx_insn * |
4813 emit_pattern_before_setloc (rtx pattern, rtx uncast_before, int loc, bool insnp, | 4946 emit_pattern_before_setloc (rtx pattern, rtx_insn *before, location_t loc, |
4814 rtx_insn *(*make_raw) (rtx)) | 4947 bool insnp, rtx_insn *(*make_raw) (rtx)) |
4815 { | 4948 { |
4816 rtx_insn *before = as_a <rtx_insn *> (uncast_before); | |
4817 rtx_insn *first = PREV_INSN (before); | 4949 rtx_insn *first = PREV_INSN (before); |
4818 rtx_insn *last = emit_pattern_before_noloc (pattern, before, | 4950 rtx_insn *last = emit_pattern_before_noloc (pattern, before, |
4819 insnp ? before : NULL_RTX, | 4951 insnp ? before : NULL, |
4820 NULL, make_raw); | 4952 NULL, make_raw); |
4821 | 4953 |
4822 if (pattern == NULL_RTX || !loc) | 4954 if (pattern == NULL_RTX || !loc) |
4823 return last; | 4955 return last; |
4824 | 4956 |
4843 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert | 4975 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert |
4844 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an | 4976 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an |
4845 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */ | 4977 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */ |
4846 | 4978 |
4847 static rtx_insn * | 4979 static rtx_insn * |
4848 emit_pattern_before (rtx pattern, rtx uncast_before, bool skip_debug_insns, | 4980 emit_pattern_before (rtx pattern, rtx_insn *before, bool skip_debug_insns, |
4849 bool insnp, rtx_insn *(*make_raw) (rtx)) | 4981 bool insnp, rtx_insn *(*make_raw) (rtx)) |
4850 { | 4982 { |
4851 rtx_insn *before = safe_as_a <rtx_insn *> (uncast_before); | |
4852 rtx_insn *next = before; | 4983 rtx_insn *next = before; |
4853 | 4984 |
4854 if (skip_debug_insns) | 4985 if (skip_debug_insns) |
4855 while (DEBUG_INSN_P (next)) | 4986 while (DEBUG_INSN_P (next)) |
4856 next = PREV_INSN (next); | 4987 next = PREV_INSN (next); |
4858 if (INSN_P (next)) | 4989 if (INSN_P (next)) |
4859 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next), | 4990 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next), |
4860 insnp, make_raw); | 4991 insnp, make_raw); |
4861 else | 4992 else |
4862 return emit_pattern_before_noloc (pattern, before, | 4993 return emit_pattern_before_noloc (pattern, before, |
4863 insnp ? before : NULL_RTX, | 4994 insnp ? before : NULL, |
4864 NULL, make_raw); | 4995 NULL, make_raw); |
4865 } | 4996 } |
4866 | 4997 |
4867 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */ | 4998 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */ |
4868 rtx_insn * | 4999 rtx_insn * |
4869 emit_insn_before_setloc (rtx pattern, rtx_insn *before, int loc) | 5000 emit_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc) |
4870 { | 5001 { |
4871 return emit_pattern_before_setloc (pattern, before, loc, true, | 5002 return emit_pattern_before_setloc (pattern, before, loc, true, |
4872 make_insn_raw); | 5003 make_insn_raw); |
4873 } | 5004 } |
4874 | 5005 |
4875 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */ | 5006 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */ |
4876 rtx_insn * | 5007 rtx_insn * |
4877 emit_insn_before (rtx pattern, rtx before) | 5008 emit_insn_before (rtx pattern, rtx_insn *before) |
4878 { | 5009 { |
4879 return emit_pattern_before (pattern, before, true, true, make_insn_raw); | 5010 return emit_pattern_before (pattern, before, true, true, make_insn_raw); |
4880 } | 5011 } |
4881 | 5012 |
4882 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */ | 5013 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */ |
4883 rtx_jump_insn * | 5014 rtx_jump_insn * |
4884 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, int loc) | 5015 emit_jump_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc) |
4885 { | 5016 { |
4886 return as_a <rtx_jump_insn *> ( | 5017 return as_a <rtx_jump_insn *> ( |
4887 emit_pattern_before_setloc (pattern, before, loc, false, | 5018 emit_pattern_before_setloc (pattern, before, loc, false, |
4888 make_jump_insn_raw)); | 5019 make_jump_insn_raw)); |
4889 } | 5020 } |
4890 | 5021 |
4891 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */ | 5022 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */ |
4892 rtx_jump_insn * | 5023 rtx_jump_insn * |
4893 emit_jump_insn_before (rtx pattern, rtx before) | 5024 emit_jump_insn_before (rtx pattern, rtx_insn *before) |
4894 { | 5025 { |
4895 return as_a <rtx_jump_insn *> ( | 5026 return as_a <rtx_jump_insn *> ( |
4896 emit_pattern_before (pattern, before, true, false, | 5027 emit_pattern_before (pattern, before, true, false, |
4897 make_jump_insn_raw)); | 5028 make_jump_insn_raw)); |
4898 } | 5029 } |
4899 | 5030 |
4900 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */ | 5031 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */ |
4901 rtx_insn * | 5032 rtx_insn * |
4902 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, int loc) | 5033 emit_call_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc) |
4903 { | 5034 { |
4904 return emit_pattern_before_setloc (pattern, before, loc, false, | 5035 return emit_pattern_before_setloc (pattern, before, loc, false, |
4905 make_call_insn_raw); | 5036 make_call_insn_raw); |
4906 } | 5037 } |
4907 | 5038 |
4914 make_call_insn_raw); | 5045 make_call_insn_raw); |
4915 } | 5046 } |
4916 | 5047 |
4917 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */ | 5048 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */ |
4918 rtx_insn * | 5049 rtx_insn * |
4919 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc) | 5050 emit_debug_insn_before_setloc (rtx pattern, rtx_insn *before, location_t loc) |
4920 { | 5051 { |
4921 return emit_pattern_before_setloc (pattern, before, loc, false, | 5052 return emit_pattern_before_setloc (pattern, before, loc, false, |
4922 make_debug_insn_raw); | 5053 make_debug_insn_raw); |
4923 } | 5054 } |
4924 | 5055 |
5559 case CC0: | 5690 case CC0: |
5560 case RETURN: | 5691 case RETURN: |
5561 case SIMPLE_RETURN: | 5692 case SIMPLE_RETURN: |
5562 return orig; | 5693 return orig; |
5563 case CLOBBER: | 5694 case CLOBBER: |
5695 case CLOBBER_HIGH: | |
5564 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg | 5696 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg |
5565 clobbers or clobbers of hard registers that originated as pseudos. | 5697 clobbers or clobbers of hard registers that originated as pseudos. |
5566 This is needed to allow safe register renaming. */ | 5698 This is needed to allow safe register renaming. */ |
5567 if (REG_P (XEXP (orig, 0)) | 5699 if (REG_P (XEXP (orig, 0)) |
5568 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))) | 5700 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))) |
5629 break; | 5761 break; |
5630 | 5762 |
5631 case 't': | 5763 case 't': |
5632 case 'w': | 5764 case 'w': |
5633 case 'i': | 5765 case 'i': |
5766 case 'p': | |
5634 case 's': | 5767 case 's': |
5635 case 'S': | 5768 case 'S': |
5636 case 'u': | 5769 case 'u': |
5637 case '0': | 5770 case '0': |
5638 /* These are left unchanged. */ | 5771 /* These are left unchanged. */ |
5740 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY; | 5873 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY; |
5741 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY; | 5874 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY; |
5742 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY; | 5875 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY; |
5743 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY; | 5876 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY; |
5744 | 5877 |
5745 /* ??? These are problematic (for example, 3 out of 4 are wrong on | |
5746 32-bit SPARC and cannot be all fixed because of the ABI). */ | |
5747 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY; | 5878 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY; |
5748 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY; | 5879 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY; |
5749 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY; | 5880 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY; |
5750 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY; | 5881 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY; |
5751 | 5882 |
5755 #ifdef INIT_EXPANDERS | 5886 #ifdef INIT_EXPANDERS |
5756 INIT_EXPANDERS; | 5887 INIT_EXPANDERS; |
5757 #endif | 5888 #endif |
5758 } | 5889 } |
5759 | 5890 |
5760 /* Generate a vector constant for mode MODE and constant value CONSTANT. */ | 5891 /* Return the value of element I of CONST_VECTOR X as a wide_int. */ |
5892 | |
5893 wide_int | |
5894 const_vector_int_elt (const_rtx x, unsigned int i) | |
5895 { | |
5896 /* First handle elements that are directly encoded. */ | |
5897 machine_mode elt_mode = GET_MODE_INNER (GET_MODE (x)); | |
5898 if (i < (unsigned int) XVECLEN (x, 0)) | |
5899 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, i), elt_mode); | |
5900 | |
5901 /* Identify the pattern that contains element I and work out the index of | |
5902 the last encoded element for that pattern. */ | |
5903 unsigned int encoded_nelts = const_vector_encoded_nelts (x); | |
5904 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x); | |
5905 unsigned int count = i / npatterns; | |
5906 unsigned int pattern = i % npatterns; | |
5907 unsigned int final_i = encoded_nelts - npatterns + pattern; | |
5908 | |
5909 /* If there are no steps, the final encoded value is the right one. */ | |
5910 if (!CONST_VECTOR_STEPPED_P (x)) | |
5911 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x, final_i), elt_mode); | |
5912 | |
5913 /* Otherwise work out the value from the last two encoded elements. */ | |
5914 rtx v1 = CONST_VECTOR_ENCODED_ELT (x, final_i - npatterns); | |
5915 rtx v2 = CONST_VECTOR_ENCODED_ELT (x, final_i); | |
5916 wide_int diff = wi::sub (rtx_mode_t (v2, elt_mode), | |
5917 rtx_mode_t (v1, elt_mode)); | |
5918 return wi::add (rtx_mode_t (v2, elt_mode), (count - 2) * diff); | |
5919 } | |
5920 | |
5921 /* Return the value of element I of CONST_VECTOR X. */ | |
5922 | |
5923 rtx | |
5924 const_vector_elt (const_rtx x, unsigned int i) | |
5925 { | |
5926 /* First handle elements that are directly encoded. */ | |
5927 if (i < (unsigned int) XVECLEN (x, 0)) | |
5928 return CONST_VECTOR_ENCODED_ELT (x, i); | |
5929 | |
5930 /* If there are no steps, the final encoded value is the right one. */ | |
5931 if (!CONST_VECTOR_STEPPED_P (x)) | |
5932 { | |
5933 /* Identify the pattern that contains element I and work out the index of | |
5934 the last encoded element for that pattern. */ | |
5935 unsigned int encoded_nelts = const_vector_encoded_nelts (x); | |
5936 unsigned int npatterns = CONST_VECTOR_NPATTERNS (x); | |
5937 unsigned int pattern = i % npatterns; | |
5938 unsigned int final_i = encoded_nelts - npatterns + pattern; | |
5939 return CONST_VECTOR_ENCODED_ELT (x, final_i); | |
5940 } | |
5941 | |
5942 /* Otherwise work out the value from the last two encoded elements. */ | |
5943 return immed_wide_int_const (const_vector_int_elt (x, i), | |
5944 GET_MODE_INNER (GET_MODE (x))); | |
5945 } | |
5946 | |
5947 /* Return true if X is a valid element for a CONST_VECTOR of the given | |
5948 mode. */ | |
5949 | |
5950 bool | |
5951 valid_for_const_vector_p (machine_mode, rtx x) | |
5952 { | |
5953 return (CONST_SCALAR_INT_P (x) | |
5954 || CONST_DOUBLE_AS_FLOAT_P (x) | |
5955 || CONST_FIXED_P (x)); | |
5956 } | |
5957 | |
5958 /* Generate a vector constant of mode MODE in which every element has | |
5959 value ELT. */ | |
5960 | |
5961 rtx | |
5962 gen_const_vec_duplicate (machine_mode mode, rtx elt) | |
5963 { | |
5964 rtx_vector_builder builder (mode, 1, 1); | |
5965 builder.quick_push (elt); | |
5966 return builder.build (); | |
5967 } | |
5968 | |
5969 /* Return a vector rtx of mode MODE in which every element has value X. | |
5970 The result will be a constant if X is constant. */ | |
5971 | |
5972 rtx | |
5973 gen_vec_duplicate (machine_mode mode, rtx x) | |
5974 { | |
5975 if (valid_for_const_vector_p (mode, x)) | |
5976 return gen_const_vec_duplicate (mode, x); | |
5977 return gen_rtx_VEC_DUPLICATE (mode, x); | |
5978 } | |
5979 | |
5980 /* A subroutine of const_vec_series_p that handles the case in which: | |
5981 | |
5982 (GET_CODE (X) == CONST_VECTOR | |
5983 && CONST_VECTOR_NPATTERNS (X) == 1 | |
5984 && !CONST_VECTOR_DUPLICATE_P (X)) | |
5985 | |
5986 is known to hold. */ | |
5987 | |
5988 bool | |
5989 const_vec_series_p_1 (const_rtx x, rtx *base_out, rtx *step_out) | |
5990 { | |
5991 /* Stepped sequences are only defined for integers, to avoid specifying | |
5992 rounding behavior. */ | |
5993 if (GET_MODE_CLASS (GET_MODE (x)) != MODE_VECTOR_INT) | |
5994 return false; | |
5995 | |
5996 /* A non-duplicated vector with two elements can always be seen as a | |
5997 series with a nonzero step. Longer vectors must have a stepped | |
5998 encoding. */ | |
5999 if (maybe_ne (CONST_VECTOR_NUNITS (x), 2) | |
6000 && !CONST_VECTOR_STEPPED_P (x)) | |
6001 return false; | |
6002 | |
6003 /* Calculate the step between the first and second elements. */ | |
6004 scalar_mode inner = GET_MODE_INNER (GET_MODE (x)); | |
6005 rtx base = CONST_VECTOR_ELT (x, 0); | |
6006 rtx step = simplify_binary_operation (MINUS, inner, | |
6007 CONST_VECTOR_ENCODED_ELT (x, 1), base); | |
6008 if (rtx_equal_p (step, CONST0_RTX (inner))) | |
6009 return false; | |
6010 | |
6011 /* If we have a stepped encoding, check that the step between the | |
6012 second and third elements is the same as STEP. */ | |
6013 if (CONST_VECTOR_STEPPED_P (x)) | |
6014 { | |
6015 rtx diff = simplify_binary_operation (MINUS, inner, | |
6016 CONST_VECTOR_ENCODED_ELT (x, 2), | |
6017 CONST_VECTOR_ENCODED_ELT (x, 1)); | |
6018 if (!rtx_equal_p (step, diff)) | |
6019 return false; | |
6020 } | |
6021 | |
6022 *base_out = base; | |
6023 *step_out = step; | |
6024 return true; | |
6025 } | |
6026 | |
6027 /* Generate a vector constant of mode MODE in which element I has | |
6028 the value BASE + I * STEP. */ | |
6029 | |
6030 rtx | |
6031 gen_const_vec_series (machine_mode mode, rtx base, rtx step) | |
6032 { | |
6033 gcc_assert (valid_for_const_vector_p (mode, base) | |
6034 && valid_for_const_vector_p (mode, step)); | |
6035 | |
6036 rtx_vector_builder builder (mode, 1, 3); | |
6037 builder.quick_push (base); | |
6038 for (int i = 1; i < 3; ++i) | |
6039 builder.quick_push (simplify_gen_binary (PLUS, GET_MODE_INNER (mode), | |
6040 builder[i - 1], step)); | |
6041 return builder.build (); | |
6042 } | |
6043 | |
6044 /* Generate a vector of mode MODE in which element I has the value | |
6045 BASE + I * STEP. The result will be a constant if BASE and STEP | |
6046 are both constants. */ | |
6047 | |
6048 rtx | |
6049 gen_vec_series (machine_mode mode, rtx base, rtx step) | |
6050 { | |
6051 if (step == const0_rtx) | |
6052 return gen_vec_duplicate (mode, base); | |
6053 if (valid_for_const_vector_p (mode, base) | |
6054 && valid_for_const_vector_p (mode, step)) | |
6055 return gen_const_vec_series (mode, base, step); | |
6056 return gen_rtx_VEC_SERIES (mode, base, step); | |
6057 } | |
6058 | |
6059 /* Generate a new vector constant for mode MODE and constant value | |
6060 CONSTANT. */ | |
5761 | 6061 |
5762 static rtx | 6062 static rtx |
5763 gen_const_vector (machine_mode mode, int constant) | 6063 gen_const_vector (machine_mode mode, int constant) |
5764 { | 6064 { |
5765 rtx tem; | 6065 machine_mode inner = GET_MODE_INNER (mode); |
5766 rtvec v; | |
5767 int units, i; | |
5768 machine_mode inner; | |
5769 | |
5770 units = GET_MODE_NUNITS (mode); | |
5771 inner = GET_MODE_INNER (mode); | |
5772 | 6066 |
5773 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner)); | 6067 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner)); |
5774 | 6068 |
5775 v = rtvec_alloc (units); | 6069 rtx el = const_tiny_rtx[constant][(int) inner]; |
5776 | 6070 gcc_assert (el); |
5777 /* We need to call this function after we set the scalar const_tiny_rtx | 6071 |
5778 entries. */ | 6072 return gen_const_vec_duplicate (mode, el); |
5779 gcc_assert (const_tiny_rtx[constant][(int) inner]); | |
5780 | |
5781 for (i = 0; i < units; ++i) | |
5782 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner]; | |
5783 | |
5784 tem = gen_rtx_raw_CONST_VECTOR (mode, v); | |
5785 return tem; | |
5786 } | 6073 } |
5787 | 6074 |
5788 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when | 6075 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when |
5789 all elements are zero, and the one vector when all elements are one. */ | 6076 all elements are zero, and the one vector when all elements are one. */ |
5790 rtx | 6077 rtx |
5791 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v) | 6078 gen_rtx_CONST_VECTOR (machine_mode mode, rtvec v) |
5792 { | 6079 { |
5793 machine_mode inner = GET_MODE_INNER (mode); | 6080 gcc_assert (known_eq (GET_MODE_NUNITS (mode), GET_NUM_ELEM (v))); |
5794 int nunits = GET_MODE_NUNITS (mode); | |
5795 rtx x; | |
5796 int i; | |
5797 | |
5798 /* Check to see if all of the elements have the same value. */ | |
5799 x = RTVEC_ELT (v, nunits - 1); | |
5800 for (i = nunits - 2; i >= 0; i--) | |
5801 if (RTVEC_ELT (v, i) != x) | |
5802 break; | |
5803 | 6081 |
5804 /* If the values are all the same, check to see if we can use one of the | 6082 /* If the values are all the same, check to see if we can use one of the |
5805 standard constant vectors. */ | 6083 standard constant vectors. */ |
5806 if (i == -1) | 6084 if (rtvec_all_equal_p (v)) |
5807 { | 6085 return gen_const_vec_duplicate (mode, RTVEC_ELT (v, 0)); |
5808 if (x == CONST0_RTX (inner)) | 6086 |
5809 return CONST0_RTX (mode); | 6087 unsigned int nunits = GET_NUM_ELEM (v); |
5810 else if (x == CONST1_RTX (inner)) | 6088 rtx_vector_builder builder (mode, nunits, 1); |
5811 return CONST1_RTX (mode); | 6089 for (unsigned int i = 0; i < nunits; ++i) |
5812 else if (x == CONSTM1_RTX (inner)) | 6090 builder.quick_push (RTVEC_ELT (v, i)); |
5813 return CONSTM1_RTX (mode); | 6091 return builder.build (v); |
5814 } | |
5815 | |
5816 return gen_rtx_raw_CONST_VECTOR (mode, v); | |
5817 } | 6092 } |
5818 | 6093 |
5819 /* Initialise global register information required by all functions. */ | 6094 /* Initialise global register information required by all functions. */ |
5820 | 6095 |
5821 void | 6096 void |
5866 { | 6141 { |
5867 mode = (machine_mode) i; | 6142 mode = (machine_mode) i; |
5868 attrs = ggc_cleared_alloc<mem_attrs> (); | 6143 attrs = ggc_cleared_alloc<mem_attrs> (); |
5869 attrs->align = BITS_PER_UNIT; | 6144 attrs->align = BITS_PER_UNIT; |
5870 attrs->addrspace = ADDR_SPACE_GENERIC; | 6145 attrs->addrspace = ADDR_SPACE_GENERIC; |
5871 if (mode != BLKmode) | 6146 if (mode != BLKmode && mode != VOIDmode) |
5872 { | 6147 { |
5873 attrs->size_known_p = true; | 6148 attrs->size_known_p = true; |
5874 attrs->size = GET_MODE_SIZE (mode); | 6149 attrs->size = GET_MODE_SIZE (mode); |
5875 if (STRICT_ALIGNMENT) | 6150 if (STRICT_ALIGNMENT) |
5876 attrs->align = GET_MODE_ALIGNMENT (mode); | 6151 attrs->align = GET_MODE_ALIGNMENT (mode); |
5877 } | 6152 } |
5878 mode_mem_attrs[i] = attrs; | 6153 mode_mem_attrs[i] = attrs; |
5879 } | 6154 } |
6155 | |
6156 split_branch_probability = profile_probability::uninitialized (); | |
5880 } | 6157 } |
5881 | 6158 |
5882 /* Initialize global machine_mode variables. */ | 6159 /* Initialize global machine_mode variables. */ |
5883 | 6160 |
5884 void | 6161 void |
5898 opt_word_mode = mode; | 6175 opt_word_mode = mode; |
5899 } | 6176 } |
5900 | 6177 |
5901 byte_mode = opt_byte_mode.require (); | 6178 byte_mode = opt_byte_mode.require (); |
5902 word_mode = opt_word_mode.require (); | 6179 word_mode = opt_word_mode.require (); |
5903 ptr_mode = int_mode_for_size (POINTER_SIZE, 0).require (); | 6180 ptr_mode = as_a <scalar_int_mode> |
6181 (mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0).require ()); | |
5904 } | 6182 } |
5905 | 6183 |
5906 /* Create some permanent unique rtl objects shared between all functions. */ | 6184 /* Create some permanent unique rtl objects shared between all functions. */ |
5907 | 6185 |
5908 void | 6186 void |
5919 | 6197 |
5920 #if TARGET_SUPPORTS_WIDE_INT | 6198 #if TARGET_SUPPORTS_WIDE_INT |
5921 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37); | 6199 const_wide_int_htab = hash_table<const_wide_int_hasher>::create_ggc (37); |
5922 #endif | 6200 #endif |
5923 const_double_htab = hash_table<const_double_hasher>::create_ggc (37); | 6201 const_double_htab = hash_table<const_double_hasher>::create_ggc (37); |
6202 | |
6203 if (NUM_POLY_INT_COEFFS > 1) | |
6204 const_poly_int_htab = hash_table<const_poly_int_hasher>::create_ggc (37); | |
5924 | 6205 |
5925 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37); | 6206 const_fixed_htab = hash_table<const_fixed_hasher>::create_ggc (37); |
5926 | 6207 |
5927 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37); | 6208 reg_attrs_htab = hash_table<reg_attr_hasher>::create_ggc (37); |
5928 | 6209 |
5994 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx; | 6275 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx; |
5995 | 6276 |
5996 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT) | 6277 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT) |
5997 const_tiny_rtx[3][(int) mode] = constm1_rtx; | 6278 const_tiny_rtx[3][(int) mode] = constm1_rtx; |
5998 | 6279 |
6280 /* For BImode, 1 and -1 are unsigned and signed interpretations | |
6281 of the same value. */ | |
6282 const_tiny_rtx[0][(int) BImode] = const0_rtx; | |
6283 const_tiny_rtx[1][(int) BImode] = const_true_rtx; | |
6284 const_tiny_rtx[3][(int) BImode] = const_true_rtx; | |
6285 | |
5999 for (mode = MIN_MODE_PARTIAL_INT; | 6286 for (mode = MIN_MODE_PARTIAL_INT; |
6000 mode <= MAX_MODE_PARTIAL_INT; | 6287 mode <= MAX_MODE_PARTIAL_INT; |
6001 mode = (machine_mode)((int)(mode) + 1)) | 6288 mode = (machine_mode)((int)(mode) + 1)) |
6002 const_tiny_rtx[3][(int) mode] = constm1_rtx; | 6289 const_tiny_rtx[3][(int) mode] = constm1_rtx; |
6003 | 6290 |
6009 | 6296 |
6010 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT) | 6297 FOR_EACH_MODE_IN_CLASS (mode, MODE_COMPLEX_FLOAT) |
6011 { | 6298 { |
6012 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)]; | 6299 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)]; |
6013 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner); | 6300 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner); |
6301 } | |
6302 | |
6303 /* As for BImode, "all 1" and "all -1" are unsigned and signed | |
6304 interpretations of the same value. */ | |
6305 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_BOOL) | |
6306 { | |
6307 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | |
6308 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3); | |
6309 const_tiny_rtx[1][(int) mode] = const_tiny_rtx[3][(int) mode]; | |
6014 } | 6310 } |
6015 | 6311 |
6016 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT) | 6312 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT) |
6017 { | 6313 { |
6018 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); | 6314 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0); |
6111 } | 6407 } |
6112 | 6408 |
6113 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i) | 6409 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i) |
6114 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC) | 6410 if (GET_MODE_CLASS ((machine_mode) i) == MODE_CC) |
6115 const_tiny_rtx[0][i] = const0_rtx; | 6411 const_tiny_rtx[0][i] = const0_rtx; |
6116 | |
6117 const_tiny_rtx[0][(int) BImode] = const0_rtx; | |
6118 if (STORE_FLAG_VALUE == 1) | |
6119 const_tiny_rtx[1][(int) BImode] = const1_rtx; | |
6120 | |
6121 FOR_EACH_MODE_IN_CLASS (smode_iter, MODE_POINTER_BOUNDS) | |
6122 { | |
6123 scalar_mode smode = smode_iter.require (); | |
6124 wide_int wi_zero = wi::zero (GET_MODE_PRECISION (smode)); | |
6125 const_tiny_rtx[0][smode] = immed_wide_int_const (wi_zero, smode); | |
6126 } | |
6127 | 6412 |
6128 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode); | 6413 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode); |
6129 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode); | 6414 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode); |
6130 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode); | 6415 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode); |
6131 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode); | 6416 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode); |
6217 else | 6502 else |
6218 return (hard_reg_clobbers[mode][regno] = | 6503 return (hard_reg_clobbers[mode][regno] = |
6219 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno))); | 6504 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno))); |
6220 } | 6505 } |
6221 | 6506 |
6507 static GTY((deletable)) rtx | |
6508 hard_reg_clobbers_high[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER]; | |
6509 | |
6510 /* Return a CLOBBER_HIGH expression for register REGNO that clobbers MODE, | |
6511 caching into HARD_REG_CLOBBERS_HIGH. */ | |
6512 rtx | |
6513 gen_hard_reg_clobber_high (machine_mode mode, unsigned int regno) | |
6514 { | |
6515 if (hard_reg_clobbers_high[mode][regno]) | |
6516 return hard_reg_clobbers_high[mode][regno]; | |
6517 else | |
6518 return (hard_reg_clobbers_high[mode][regno] | |
6519 = gen_rtx_CLOBBER_HIGH (VOIDmode, gen_rtx_REG (mode, regno))); | |
6520 } | |
6521 | |
6222 location_t prologue_location; | 6522 location_t prologue_location; |
6223 location_t epilogue_location; | 6523 location_t epilogue_location; |
6224 | 6524 |
6225 /* Hold current location information and last location information, so the | 6525 /* Hold current location information and last location information, so the |
6226 datastructures are built lazily only when some instructions in given | 6526 datastructures are built lazily only when some instructions in given |
6307 default: | 6607 default: |
6308 gcc_unreachable (); | 6608 gcc_unreachable (); |
6309 } | 6609 } |
6310 } | 6610 } |
6311 | 6611 |
6612 /* Return a constant shift amount for shifting a value of mode MODE | |
6613 by VALUE bits. */ | |
6614 | |
6615 rtx | |
6616 gen_int_shift_amount (machine_mode, poly_int64 value) | |
6617 { | |
6618 /* Use a 64-bit mode, to avoid any truncation. | |
6619 | |
6620 ??? Perhaps this should be automatically derived from the .md files | |
6621 instead, or perhaps have a target hook. */ | |
6622 scalar_int_mode shift_mode = (BITS_PER_UNIT == 8 | |
6623 ? DImode | |
6624 : int_mode_for_size (64, 0).require ()); | |
6625 return gen_int_mode (value, shift_mode); | |
6626 } | |
6627 | |
6312 /* Initialize fields of rtl_data related to stack alignment. */ | 6628 /* Initialize fields of rtl_data related to stack alignment. */ |
6313 | 6629 |
6314 void | 6630 void |
6315 rtl_data::init_stack_alignment () | 6631 rtl_data::init_stack_alignment () |
6316 { | 6632 { |