Mercurial > hg > CbC > CbC_gcc
comparison gcc/config/s390/s390.c @ 55:77e2b8dfacca gcc-4.4.5
update it from 4.4.3 to 4.5.0
author | ryoma <e075725@ie.u-ryukyu.ac.jp> |
---|---|
date | Fri, 12 Feb 2010 23:39:51 +0900 |
parents | 3bfb6c00c1e0 |
children | b7f97abdc517 |
comparison
equal
deleted
inserted
replaced
52:c156f1bd5cd9 | 55:77e2b8dfacca |
---|---|
1 /* Subroutines used for code generation on IBM S/390 and zSeries | 1 /* Subroutines used for code generation on IBM S/390 and zSeries |
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, | 2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, |
3 2007, 2008 Free Software Foundation, Inc. | 3 2007, 2008, 2009 Free Software Foundation, Inc. |
4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and | 4 Contributed by Hartmut Penner (hpenner@de.ibm.com) and |
5 Ulrich Weigand (uweigand@de.ibm.com) and | 5 Ulrich Weigand (uweigand@de.ibm.com) and |
6 Andreas Krebbel (Andreas.Krebbel@de.ibm.com). | 6 Andreas Krebbel (Andreas.Krebbel@de.ibm.com). |
7 | 7 |
8 This file is part of GCC. | 8 This file is part of GCC. |
55 #include "params.h" | 55 #include "params.h" |
56 | 56 |
57 | 57 |
58 /* Define the specific costs for a given cpu. */ | 58 /* Define the specific costs for a given cpu. */ |
59 | 59 |
60 struct processor_costs | 60 struct processor_costs |
61 { | 61 { |
62 /* multiplication */ | 62 /* multiplication */ |
63 const int m; /* cost of an M instruction. */ | 63 const int m; /* cost of an M instruction. */ |
64 const int mghi; /* cost of an MGHI instruction. */ | 64 const int mghi; /* cost of an MGHI instruction. */ |
65 const int mh; /* cost of an MH instruction. */ | 65 const int mh; /* cost of an MH instruction. */ |
93 }; | 93 }; |
94 | 94 |
95 const struct processor_costs *s390_cost; | 95 const struct processor_costs *s390_cost; |
96 | 96 |
97 static const | 97 static const |
98 struct processor_costs z900_cost = | 98 struct processor_costs z900_cost = |
99 { | 99 { |
100 COSTS_N_INSNS (5), /* M */ | 100 COSTS_N_INSNS (5), /* M */ |
101 COSTS_N_INSNS (10), /* MGHI */ | 101 COSTS_N_INSNS (10), /* MGHI */ |
102 COSTS_N_INSNS (5), /* MH */ | 102 COSTS_N_INSNS (5), /* MH */ |
103 COSTS_N_INSNS (4), /* MHI */ | 103 COSTS_N_INSNS (4), /* MHI */ |
125 COSTS_N_INSNS (32), /* DSGFR */ | 125 COSTS_N_INSNS (32), /* DSGFR */ |
126 COSTS_N_INSNS (32), /* DSGR */ | 126 COSTS_N_INSNS (32), /* DSGR */ |
127 }; | 127 }; |
128 | 128 |
129 static const | 129 static const |
130 struct processor_costs z990_cost = | 130 struct processor_costs z990_cost = |
131 { | 131 { |
132 COSTS_N_INSNS (4), /* M */ | 132 COSTS_N_INSNS (4), /* M */ |
133 COSTS_N_INSNS (2), /* MGHI */ | 133 COSTS_N_INSNS (2), /* MGHI */ |
134 COSTS_N_INSNS (2), /* MH */ | 134 COSTS_N_INSNS (2), /* MH */ |
135 COSTS_N_INSNS (2), /* MHI */ | 135 COSTS_N_INSNS (2), /* MHI */ |
157 COSTS_N_INSNS (31), /* DSGFR */ | 157 COSTS_N_INSNS (31), /* DSGFR */ |
158 COSTS_N_INSNS (31), /* DSGR */ | 158 COSTS_N_INSNS (31), /* DSGR */ |
159 }; | 159 }; |
160 | 160 |
161 static const | 161 static const |
162 struct processor_costs z9_109_cost = | 162 struct processor_costs z9_109_cost = |
163 { | 163 { |
164 COSTS_N_INSNS (4), /* M */ | 164 COSTS_N_INSNS (4), /* M */ |
165 COSTS_N_INSNS (2), /* MGHI */ | 165 COSTS_N_INSNS (2), /* MGHI */ |
166 COSTS_N_INSNS (2), /* MH */ | 166 COSTS_N_INSNS (2), /* MH */ |
167 COSTS_N_INSNS (2), /* MHI */ | 167 COSTS_N_INSNS (2), /* MHI */ |
225 extern int reload_completed; | 225 extern int reload_completed; |
226 | 226 |
227 /* Kept up to date using the SCHED_VARIABLE_ISSUE hook. */ | 227 /* Kept up to date using the SCHED_VARIABLE_ISSUE hook. */ |
228 static rtx last_scheduled_insn; | 228 static rtx last_scheduled_insn; |
229 | 229 |
230 /* Save information from a "cmpxx" operation until the branch or scc is | |
231 emitted. */ | |
232 rtx s390_compare_op0, s390_compare_op1; | |
233 | |
234 /* Save the result of a compare_and_swap until the branch or scc is | |
235 emitted. */ | |
236 rtx s390_compare_emitted = NULL_RTX; | |
237 | |
238 /* Structure used to hold the components of a S/390 memory | 230 /* Structure used to hold the components of a S/390 memory |
239 address. A legitimate address on S/390 is of the general | 231 address. A legitimate address on S/390 is of the general |
240 form | 232 form |
241 base + index + displacement | 233 base + index + displacement |
242 where any of the components is optional. | 234 where any of the components is optional. |
253 bool literal_pool; | 245 bool literal_pool; |
254 }; | 246 }; |
255 | 247 |
256 /* Which cpu are we tuning for. */ | 248 /* Which cpu are we tuning for. */ |
257 enum processor_type s390_tune = PROCESSOR_max; | 249 enum processor_type s390_tune = PROCESSOR_max; |
258 enum processor_flags s390_tune_flags; | 250 int s390_tune_flags; |
259 /* Which instruction set architecture to use. */ | 251 /* Which instruction set architecture to use. */ |
260 enum processor_type s390_arch; | 252 enum processor_type s390_arch; |
261 enum processor_flags s390_arch_flags; | 253 int s390_arch_flags; |
262 | 254 |
263 HOST_WIDE_INT s390_warn_framesize = 0; | 255 HOST_WIDE_INT s390_warn_framesize = 0; |
264 HOST_WIDE_INT s390_stack_size = 0; | 256 HOST_WIDE_INT s390_stack_size = 0; |
265 HOST_WIDE_INT s390_stack_guard = 0; | 257 HOST_WIDE_INT s390_stack_guard = 0; |
266 | 258 |
267 /* The following structure is embedded in the machine | 259 /* The following structure is embedded in the machine |
268 specific part of struct function. */ | 260 specific part of struct function. */ |
269 | 261 |
270 struct s390_frame_layout GTY (()) | 262 struct GTY (()) s390_frame_layout |
271 { | 263 { |
272 /* Offset within stack frame. */ | 264 /* Offset within stack frame. */ |
273 HOST_WIDE_INT gprs_offset; | 265 HOST_WIDE_INT gprs_offset; |
274 HOST_WIDE_INT f0_offset; | 266 HOST_WIDE_INT f0_offset; |
275 HOST_WIDE_INT f4_offset; | 267 HOST_WIDE_INT f4_offset; |
285 int first_save_gpr; | 277 int first_save_gpr; |
286 int first_restore_gpr; | 278 int first_restore_gpr; |
287 int last_save_gpr; | 279 int last_save_gpr; |
288 int last_restore_gpr; | 280 int last_restore_gpr; |
289 | 281 |
290 /* Bits standing for floating point registers. Set, if the | 282 /* Bits standing for floating point registers. Set, if the |
291 respective register has to be saved. Starting with reg 16 (f0) | 283 respective register has to be saved. Starting with reg 16 (f0) |
292 at the rightmost bit. | 284 at the rightmost bit. |
293 Bit 15 - 8 7 6 5 4 3 2 1 0 | 285 Bit 15 - 8 7 6 5 4 3 2 1 0 |
294 fpr 15 - 8 7 5 3 1 6 4 2 0 | 286 fpr 15 - 8 7 5 3 1 6 4 2 0 |
295 reg 31 - 24 23 22 21 20 19 18 17 16 */ | 287 reg 31 - 24 23 22 21 20 19 18 17 16 */ |
296 unsigned int fpr_bitmap; | 288 unsigned int fpr_bitmap; |
308 HOST_WIDE_INT frame_size; | 300 HOST_WIDE_INT frame_size; |
309 }; | 301 }; |
310 | 302 |
311 /* Define the structure for the machine field in struct function. */ | 303 /* Define the structure for the machine field in struct function. */ |
312 | 304 |
313 struct machine_function GTY(()) | 305 struct GTY(()) machine_function |
314 { | 306 { |
315 struct s390_frame_layout frame_layout; | 307 struct s390_frame_layout frame_layout; |
316 | 308 |
317 /* Literal pool base register. */ | 309 /* Literal pool base register. */ |
318 rtx base_reg; | 310 rtx base_reg; |
375 /* Return true if the back end supports mode MODE. */ | 367 /* Return true if the back end supports mode MODE. */ |
376 static bool | 368 static bool |
377 s390_scalar_mode_supported_p (enum machine_mode mode) | 369 s390_scalar_mode_supported_p (enum machine_mode mode) |
378 { | 370 { |
379 if (DECIMAL_FLOAT_MODE_P (mode)) | 371 if (DECIMAL_FLOAT_MODE_P (mode)) |
380 return true; | 372 return default_decimal_float_supported_p (); |
381 else | 373 else |
382 return default_scalar_mode_supported_p (mode); | 374 return default_scalar_mode_supported_p (mode); |
383 } | 375 } |
384 | 376 |
385 /* Set the has_landing_pad_p flag in struct machine_function to VALUE. */ | 377 /* Set the has_landing_pad_p flag in struct machine_function to VALUE. */ |
414 case CCSRmode: | 406 case CCSRmode: |
415 case CCURmode: | 407 case CCURmode: |
416 case CCZ1mode: | 408 case CCZ1mode: |
417 if (m2 == CCZmode) | 409 if (m2 == CCZmode) |
418 return m1; | 410 return m1; |
419 | 411 |
420 return VOIDmode; | 412 return VOIDmode; |
421 | 413 |
422 default: | 414 default: |
423 return VOIDmode; | 415 return VOIDmode; |
424 } | 416 } |
524 /* Selected bits all zero: CC0. | 516 /* Selected bits all zero: CC0. |
525 e.g.: int a; if ((a & (16 + 128)) == 0) */ | 517 e.g.: int a; if ((a & (16 + 128)) == 0) */ |
526 if (INTVAL (op2) == 0) | 518 if (INTVAL (op2) == 0) |
527 return CCTmode; | 519 return CCTmode; |
528 | 520 |
529 /* Selected bits all one: CC3. | 521 /* Selected bits all one: CC3. |
530 e.g.: int a; if ((a & (16 + 128)) == 16 + 128) */ | 522 e.g.: int a; if ((a & (16 + 128)) == 16 + 128) */ |
531 if (INTVAL (op2) == INTVAL (op1)) | 523 if (INTVAL (op2) == INTVAL (op1)) |
532 return CCT3mode; | 524 return CCT3mode; |
533 | 525 |
534 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. e.g.: | 526 /* Exactly two bits selected, mixed zeroes and ones: CC1 or CC2. e.g.: |
596 case LT: | 588 case LT: |
597 case GE: | 589 case GE: |
598 case GT: | 590 case GT: |
599 /* The only overflow condition of NEG and ABS happens when | 591 /* The only overflow condition of NEG and ABS happens when |
600 -INT_MAX is used as parameter, which stays negative. So | 592 -INT_MAX is used as parameter, which stays negative. So |
601 we have an overflow from a positive value to a negative. | 593 we have an overflow from a positive value to a negative. |
602 Using CCAP mode the resulting cc can be used for comparisons. */ | 594 Using CCAP mode the resulting cc can be used for comparisons. */ |
603 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS) | 595 if ((GET_CODE (op0) == NEG || GET_CODE (op0) == ABS) |
604 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT) | 596 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT) |
605 return CCAPmode; | 597 return CCAPmode; |
606 | 598 |
607 /* If constants are involved in an add instruction it is possible to use | 599 /* If constants are involved in an add instruction it is possible to use |
608 the resulting cc for comparisons with zero. Knowing the sign of the | 600 the resulting cc for comparisons with zero. Knowing the sign of the |
609 constant the overflow behavior gets predictable. e.g.: | 601 constant the overflow behavior gets predictable. e.g.: |
610 int a, b; if ((b = a + c) > 0) | 602 int a, b; if ((b = a + c) > 0) |
611 with c as a constant value: c < 0 -> CCAN and c >= 0 -> CCAP */ | 603 with c as a constant value: c < 0 -> CCAN and c >= 0 -> CCAP */ |
612 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT | 604 if (GET_CODE (op0) == PLUS && GET_CODE (XEXP (op0, 1)) == CONST_INT |
613 && CONST_OK_FOR_K (INTVAL (XEXP (op0, 1)))) | 605 && CONST_OK_FOR_K (INTVAL (XEXP (op0, 1)))) |
614 { | 606 { |
615 if (INTVAL (XEXP((op0), 1)) < 0) | 607 if (INTVAL (XEXP((op0), 1)) < 0) |
728 /* Narrow comparisons against 0xffff to HImode if possible. */ | 720 /* Narrow comparisons against 0xffff to HImode if possible. */ |
729 if ((*code == EQ || *code == NE) | 721 if ((*code == EQ || *code == NE) |
730 && GET_CODE (*op1) == CONST_INT | 722 && GET_CODE (*op1) == CONST_INT |
731 && INTVAL (*op1) == 0xffff | 723 && INTVAL (*op1) == 0xffff |
732 && SCALAR_INT_MODE_P (GET_MODE (*op0)) | 724 && SCALAR_INT_MODE_P (GET_MODE (*op0)) |
733 && (nonzero_bits (*op0, GET_MODE (*op0)) | 725 && (nonzero_bits (*op0, GET_MODE (*op0)) |
734 & ~(unsigned HOST_WIDE_INT) 0xffff) == 0) | 726 & ~(unsigned HOST_WIDE_INT) 0xffff) == 0) |
735 { | 727 { |
736 *op0 = gen_lowpart (HImode, *op0); | 728 *op0 = gen_lowpart (HImode, *op0); |
737 *op1 = constm1_rtx; | 729 *op1 = constm1_rtx; |
738 } | 730 } |
820 | 812 |
821 rtx | 813 rtx |
822 s390_emit_compare (enum rtx_code code, rtx op0, rtx op1) | 814 s390_emit_compare (enum rtx_code code, rtx op0, rtx op1) |
823 { | 815 { |
824 enum machine_mode mode = s390_select_ccmode (code, op0, op1); | 816 enum machine_mode mode = s390_select_ccmode (code, op0, op1); |
825 rtx ret = NULL_RTX; | 817 rtx cc; |
826 | 818 |
827 /* Do not output a redundant compare instruction if a compare_and_swap | 819 /* Do not output a redundant compare instruction if a compare_and_swap |
828 pattern already computed the result and the machine modes are compatible. */ | 820 pattern already computed the result and the machine modes are compatible. */ |
829 if (s390_compare_emitted | 821 if (GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC) |
830 && (s390_cc_modes_compatible (GET_MODE (s390_compare_emitted), mode) | 822 { |
831 == GET_MODE (s390_compare_emitted))) | 823 gcc_assert (s390_cc_modes_compatible (GET_MODE (op0), mode) |
832 ret = gen_rtx_fmt_ee (code, VOIDmode, s390_compare_emitted, const0_rtx); | 824 == GET_MODE (op0)); |
825 cc = op0; | |
826 } | |
833 else | 827 else |
834 { | 828 { |
835 rtx cc = gen_rtx_REG (mode, CC_REGNUM); | 829 cc = gen_rtx_REG (mode, CC_REGNUM); |
836 | |
837 emit_insn (gen_rtx_SET (VOIDmode, cc, gen_rtx_COMPARE (mode, op0, op1))); | 830 emit_insn (gen_rtx_SET (VOIDmode, cc, gen_rtx_COMPARE (mode, op0, op1))); |
838 ret = gen_rtx_fmt_ee (code, VOIDmode, cc, const0_rtx); | 831 } |
839 } | 832 |
840 s390_compare_emitted = NULL_RTX; | 833 return gen_rtx_fmt_ee (code, VOIDmode, cc, const0_rtx); |
841 return ret; | |
842 } | 834 } |
843 | 835 |
844 /* Emit a SImode compare and swap instruction setting MEM to NEW_RTX if OLD | 836 /* Emit a SImode compare and swap instruction setting MEM to NEW_RTX if OLD |
845 matches CMP. | 837 matches CMP. |
846 Return the correct condition RTL to be placed in the IF_THEN_ELSE of the | 838 Return the correct condition RTL to be placed in the IF_THEN_ELSE of the |
847 conditional branch testing the result. */ | 839 conditional branch testing the result. */ |
848 | 840 |
849 static rtx | 841 static rtx |
850 s390_emit_compare_and_swap (enum rtx_code code, rtx old, rtx mem, rtx cmp, rtx new_rtx) | 842 s390_emit_compare_and_swap (enum rtx_code code, rtx old, rtx mem, rtx cmp, rtx new_rtx) |
851 { | 843 { |
852 rtx ret; | 844 emit_insn (gen_sync_compare_and_swapsi (old, mem, cmp, new_rtx)); |
853 | 845 return s390_emit_compare (code, gen_rtx_REG (CCZ1mode, CC_REGNUM), const0_rtx); |
854 emit_insn (gen_sync_compare_and_swap_ccsi (old, mem, cmp, new_rtx)); | |
855 ret = gen_rtx_fmt_ee (code, VOIDmode, s390_compare_emitted, const0_rtx); | |
856 | |
857 s390_compare_emitted = NULL_RTX; | |
858 | |
859 return ret; | |
860 } | 846 } |
861 | 847 |
862 /* Emit a jump instruction to TARGET. If COND is NULL_RTX, emit an | 848 /* Emit a jump instruction to TARGET. If COND is NULL_RTX, emit an |
863 unconditional jump, else a conditional jump under condition COND. */ | 849 unconditional jump, else a conditional jump under condition COND. */ |
864 | 850 |
1316 | 1302 |
1317 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1); | 1303 addr_delta = simplify_binary_operation (MINUS, Pmode, addr2, addr1); |
1318 | 1304 |
1319 /* This overlapping check is used by peepholes merging memory block operations. | 1305 /* This overlapping check is used by peepholes merging memory block operations. |
1320 Overlapping operations would otherwise be recognized by the S/390 hardware | 1306 Overlapping operations would otherwise be recognized by the S/390 hardware |
1321 and would fall back to a slower implementation. Allowing overlapping | 1307 and would fall back to a slower implementation. Allowing overlapping |
1322 operations would lead to slow code but not to wrong code. Therefore we are | 1308 operations would lead to slow code but not to wrong code. Therefore we are |
1323 somewhat optimistic if we cannot prove that the memory blocks are | 1309 somewhat optimistic if we cannot prove that the memory blocks are |
1324 overlapping. | 1310 overlapping. |
1325 That's why we return false here although this may accept operations on | 1311 That's why we return false here although this may accept operations on |
1326 overlapping memory areas. */ | 1312 overlapping memory areas. */ |
1327 if (!addr_delta || GET_CODE (addr_delta) != CONST_INT) | 1313 if (!addr_delta || GET_CODE (addr_delta) != CONST_INT) |
1328 return false; | 1314 return false; |
1486 to the associated processor_type and processor_flags if so. */ | 1472 to the associated processor_type and processor_flags if so. */ |
1487 | 1473 |
1488 static bool | 1474 static bool |
1489 s390_handle_arch_option (const char *arg, | 1475 s390_handle_arch_option (const char *arg, |
1490 enum processor_type *type, | 1476 enum processor_type *type, |
1491 enum processor_flags *flags) | 1477 int *flags) |
1492 { | 1478 { |
1493 static struct pta | 1479 static struct pta |
1494 { | 1480 { |
1495 const char *const name; /* processor name or nickname. */ | 1481 const char *const name; /* processor name or nickname. */ |
1496 const enum processor_type processor; | 1482 const enum processor_type processor; |
1497 const enum processor_flags flags; | 1483 const int flags; /* From enum processor_flags. */ |
1498 } | 1484 } |
1499 const processor_alias_table[] = | 1485 const processor_alias_table[] = |
1500 { | 1486 { |
1501 {"g5", PROCESSOR_9672_G5, PF_IEEE_FLOAT}, | 1487 {"g5", PROCESSOR_9672_G5, PF_IEEE_FLOAT}, |
1502 {"g6", PROCESSOR_9672_G6, PF_IEEE_FLOAT}, | 1488 {"g6", PROCESSOR_9672_G6, PF_IEEE_FLOAT}, |
1641 error ("stack size must be greater than the stack guard value"); | 1627 error ("stack size must be greater than the stack guard value"); |
1642 else if (s390_stack_size > 1 << 16) | 1628 else if (s390_stack_size > 1 << 16) |
1643 error ("stack size must not be greater than 64k"); | 1629 error ("stack size must not be greater than 64k"); |
1644 } | 1630 } |
1645 else if (s390_stack_guard) | 1631 else if (s390_stack_guard) |
1646 error ("-mstack-guard implies use of -mstack-size"); | 1632 error ("-mstack-guard implies use of -mstack-size"); |
1647 | 1633 |
1648 #ifdef TARGET_DEFAULT_LONG_DOUBLE_128 | 1634 #ifdef TARGET_DEFAULT_LONG_DOUBLE_128 |
1649 if (!(target_flags_explicit & MASK_LONG_DOUBLE_128)) | 1635 if (!(target_flags_explicit & MASK_LONG_DOUBLE_128)) |
1650 target_flags |= MASK_LONG_DOUBLE_128; | 1636 target_flags |= MASK_LONG_DOUBLE_128; |
1651 #endif | 1637 #endif |
1833 if (GET_CODE (base) == UNSPEC) | 1819 if (GET_CODE (base) == UNSPEC) |
1834 switch (XINT (base, 1)) | 1820 switch (XINT (base, 1)) |
1835 { | 1821 { |
1836 case UNSPEC_LTREF: | 1822 case UNSPEC_LTREF: |
1837 if (!disp) | 1823 if (!disp) |
1838 disp = gen_rtx_UNSPEC (Pmode, | 1824 disp = gen_rtx_UNSPEC (Pmode, |
1839 gen_rtvec (1, XVECEXP (base, 0, 0)), | 1825 gen_rtvec (1, XVECEXP (base, 0, 0)), |
1840 UNSPEC_LTREL_OFFSET); | 1826 UNSPEC_LTREL_OFFSET); |
1841 else | 1827 else |
1842 return false; | 1828 return false; |
1843 | 1829 |
1853 | 1839 |
1854 default: | 1840 default: |
1855 return false; | 1841 return false; |
1856 } | 1842 } |
1857 | 1843 |
1858 if (!REG_P (base) | 1844 if (!REG_P (base) |
1859 || (GET_MODE (base) != SImode | 1845 || (GET_MODE (base) != SImode |
1860 && GET_MODE (base) != Pmode)) | 1846 && GET_MODE (base) != Pmode)) |
1861 return false; | 1847 return false; |
1862 | 1848 |
1863 if (REGNO (base) == STACK_POINTER_REGNUM | 1849 if (REGNO (base) == STACK_POINTER_REGNUM |
1864 || REGNO (base) == FRAME_POINTER_REGNUM | 1850 || REGNO (base) == FRAME_POINTER_REGNUM |
1881 if (GET_CODE (indx) == UNSPEC) | 1867 if (GET_CODE (indx) == UNSPEC) |
1882 switch (XINT (indx, 1)) | 1868 switch (XINT (indx, 1)) |
1883 { | 1869 { |
1884 case UNSPEC_LTREF: | 1870 case UNSPEC_LTREF: |
1885 if (!disp) | 1871 if (!disp) |
1886 disp = gen_rtx_UNSPEC (Pmode, | 1872 disp = gen_rtx_UNSPEC (Pmode, |
1887 gen_rtvec (1, XVECEXP (indx, 0, 0)), | 1873 gen_rtvec (1, XVECEXP (indx, 0, 0)), |
1888 UNSPEC_LTREL_OFFSET); | 1874 UNSPEC_LTREL_OFFSET); |
1889 else | 1875 else |
1890 return false; | 1876 return false; |
1891 | 1877 |
1901 | 1887 |
1902 default: | 1888 default: |
1903 return false; | 1889 return false; |
1904 } | 1890 } |
1905 | 1891 |
1906 if (!REG_P (indx) | 1892 if (!REG_P (indx) |
1907 || (GET_MODE (indx) != SImode | 1893 || (GET_MODE (indx) != SImode |
1908 && GET_MODE (indx) != Pmode)) | 1894 && GET_MODE (indx) != Pmode)) |
1909 return false; | 1895 return false; |
1910 | 1896 |
1911 if (REGNO (indx) == STACK_POINTER_REGNUM | 1897 if (REGNO (indx) == STACK_POINTER_REGNUM |
1933 } | 1919 } |
1934 | 1920 |
1935 /* Validate displacement. */ | 1921 /* Validate displacement. */ |
1936 if (!disp) | 1922 if (!disp) |
1937 { | 1923 { |
1938 /* If virtual registers are involved, the displacement will change later | 1924 /* If virtual registers are involved, the displacement will change later |
1939 anyway as the virtual registers get eliminated. This could make a | 1925 anyway as the virtual registers get eliminated. This could make a |
1940 valid displacement invalid, but it is more likely to make an invalid | 1926 valid displacement invalid, but it is more likely to make an invalid |
1941 displacement valid, because we sometimes access the register save area | 1927 displacement valid, because we sometimes access the register save area |
1942 via negative offsets to one of those registers. | 1928 via negative offsets to one of those registers. |
1943 Thus we don't check the displacement for validity here. If after | 1929 Thus we don't check the displacement for validity here. If after |
1944 elimination the displacement turns out to be invalid after all, | 1930 elimination the displacement turns out to be invalid after all, |
1945 this is fixed up by reload in any case. */ | 1931 this is fixed up by reload in any case. */ |
1946 if (base != arg_pointer_rtx | 1932 if (base != arg_pointer_rtx |
1947 && indx != arg_pointer_rtx | 1933 && indx != arg_pointer_rtx |
1948 && base != return_address_pointer_rtx | 1934 && base != return_address_pointer_rtx |
1949 && indx != return_address_pointer_rtx | 1935 && indx != return_address_pointer_rtx |
1950 && base != frame_pointer_rtx | 1936 && base != frame_pointer_rtx |
1951 && indx != frame_pointer_rtx | 1937 && indx != frame_pointer_rtx |
1952 && base != virtual_stack_vars_rtx | 1938 && base != virtual_stack_vars_rtx |
1953 && indx != virtual_stack_vars_rtx) | 1939 && indx != virtual_stack_vars_rtx) |
1954 if (!DISP_IN_RANGE (offset)) | 1940 if (!DISP_IN_RANGE (offset)) |
1955 return false; | 1941 return false; |
1956 } | 1942 } |
1957 else | 1943 else |
2336 } | 2322 } |
2337 | 2323 |
2338 | 2324 |
2339 /* Compute a (partial) cost for rtx X. Return true if the complete | 2325 /* Compute a (partial) cost for rtx X. Return true if the complete |
2340 cost has been computed, and false if subexpressions should be | 2326 cost has been computed, and false if subexpressions should be |
2341 scanned. In either case, *TOTAL contains the cost result. | 2327 scanned. In either case, *TOTAL contains the cost result. |
2342 CODE contains GET_CODE (x), OUTER_CODE contains the code | 2328 CODE contains GET_CODE (x), OUTER_CODE contains the code |
2343 of the superexpression of x. */ | 2329 of the superexpression of x. */ |
2344 | 2330 |
2345 static bool | 2331 static bool |
2346 s390_rtx_costs (rtx x, int code, int outer_code, int *total, | 2332 s390_rtx_costs (rtx x, int code, int outer_code, int *total, |
2347 bool speed ATTRIBUTE_UNUSED) | 2333 bool speed ATTRIBUTE_UNUSED) |
2380 /* This is the multiply and add case. */ | 2366 /* This is the multiply and add case. */ |
2381 if (GET_MODE (x) == DFmode) | 2367 if (GET_MODE (x) == DFmode) |
2382 *total = s390_cost->madbr; | 2368 *total = s390_cost->madbr; |
2383 else | 2369 else |
2384 *total = s390_cost->maebr; | 2370 *total = s390_cost->maebr; |
2385 *total += rtx_cost (XEXP (XEXP (x, 0), 0), MULT, speed) | 2371 *total += (rtx_cost (XEXP (XEXP (x, 0), 0), MULT, speed) |
2386 + rtx_cost (XEXP (XEXP (x, 0), 1), MULT, speed) | 2372 + rtx_cost (XEXP (XEXP (x, 0), 1), MULT, speed) |
2387 + rtx_cost (XEXP (x, 1), code, speed); | 2373 + rtx_cost (XEXP (x, 1), (enum rtx_code) code, speed)); |
2388 return true; /* Do not do an additional recursive descent. */ | 2374 return true; /* Do not do an additional recursive descent. */ |
2389 } | 2375 } |
2390 *total = COSTS_N_INSNS (1); | 2376 *total = COSTS_N_INSNS (1); |
2391 return false; | 2377 return false; |
2392 | 2378 |
2393 case MULT: | 2379 case MULT: |
2394 switch (GET_MODE (x)) | 2380 switch (GET_MODE (x)) |
2395 { | 2381 { |
2396 case SImode: | 2382 case SImode: |
2397 { | 2383 { |
2398 rtx left = XEXP (x, 0); | 2384 rtx left = XEXP (x, 0); |
3110 | 3096 |
3111 | 3097 |
3112 /* Return true if ADDR is a valid memory address. | 3098 /* Return true if ADDR is a valid memory address. |
3113 STRICT specifies whether strict register checking applies. */ | 3099 STRICT specifies whether strict register checking applies. */ |
3114 | 3100 |
3115 bool | 3101 static bool |
3116 legitimate_address_p (enum machine_mode mode, rtx addr, int strict) | 3102 s390_legitimate_address_p (enum machine_mode mode, rtx addr, bool strict) |
3117 { | 3103 { |
3118 struct s390_address ad; | 3104 struct s390_address ad; |
3119 | 3105 |
3120 if (TARGET_Z10 | 3106 if (TARGET_Z10 |
3121 && larl_operand (addr, VOIDmode) | 3107 && larl_operand (addr, VOIDmode) |
3134 if (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx))) | 3120 if (ad.indx && !REGNO_OK_FOR_INDEX_P (REGNO (ad.indx))) |
3135 return false; | 3121 return false; |
3136 } | 3122 } |
3137 else | 3123 else |
3138 { | 3124 { |
3139 if (ad.base | 3125 if (ad.base |
3140 && !(REGNO (ad.base) >= FIRST_PSEUDO_REGISTER | 3126 && !(REGNO (ad.base) >= FIRST_PSEUDO_REGISTER |
3141 || REGNO_REG_CLASS (REGNO (ad.base)) == ADDR_REGS)) | 3127 || REGNO_REG_CLASS (REGNO (ad.base)) == ADDR_REGS)) |
3142 return false; | 3128 return false; |
3143 | 3129 |
3144 if (ad.indx | 3130 if (ad.indx |
3145 && !(REGNO (ad.indx) >= FIRST_PSEUDO_REGISTER | 3131 && !(REGNO (ad.indx) >= FIRST_PSEUDO_REGISTER |
3146 || REGNO_REG_CLASS (REGNO (ad.indx)) == ADDR_REGS)) | 3132 || REGNO_REG_CLASS (REGNO (ad.indx)) == ADDR_REGS)) |
3147 return false; | 3133 return false; |
3148 } | 3134 } |
3220 compute the address as an offset from the GOT, whose base is in | 3206 compute the address as an offset from the GOT, whose base is in |
3221 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to | 3207 the PIC reg. Static data objects have SYMBOL_FLAG_LOCAL set to |
3222 differentiate them from global data objects. The returned | 3208 differentiate them from global data objects. The returned |
3223 address is the PIC reg + an unspec constant. | 3209 address is the PIC reg + an unspec constant. |
3224 | 3210 |
3225 GO_IF_LEGITIMATE_ADDRESS rejects symbolic references unless the PIC | 3211 TARGET_LEGITIMIZE_ADDRESS_P rejects symbolic references unless the PIC |
3226 reg also appears in the address. */ | 3212 reg also appears in the address. */ |
3227 | 3213 |
3228 rtx | 3214 rtx |
3229 legitimize_pic_address (rtx orig, rtx reg) | 3215 legitimize_pic_address (rtx orig, rtx reg) |
3230 { | 3216 { |
3384 /* Everything else cannot happen. */ | 3370 /* Everything else cannot happen. */ |
3385 default: | 3371 default: |
3386 gcc_unreachable (); | 3372 gcc_unreachable (); |
3387 } | 3373 } |
3388 } | 3374 } |
3389 else | 3375 else |
3390 gcc_assert (GET_CODE (addr) == PLUS); | 3376 gcc_assert (GET_CODE (addr) == PLUS); |
3391 } | 3377 } |
3392 if (GET_CODE (addr) == PLUS) | 3378 if (GET_CODE (addr) == PLUS) |
3393 { | 3379 { |
3394 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1); | 3380 rtx op0 = XEXP (addr, 0), op1 = XEXP (addr, 1); |
3759 MODE is the mode of the operand pointed to by X. | 3745 MODE is the mode of the operand pointed to by X. |
3760 | 3746 |
3761 When -fpic is used, special handling is needed for symbolic references. | 3747 When -fpic is used, special handling is needed for symbolic references. |
3762 See comments by legitimize_pic_address for details. */ | 3748 See comments by legitimize_pic_address for details. */ |
3763 | 3749 |
3764 rtx | 3750 static rtx |
3765 legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED, | 3751 s390_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED, |
3766 enum machine_mode mode ATTRIBUTE_UNUSED) | 3752 enum machine_mode mode ATTRIBUTE_UNUSED) |
3767 { | 3753 { |
3768 rtx constant_term = const0_rtx; | 3754 rtx constant_term = const0_rtx; |
3769 | 3755 |
3770 if (TLS_SYMBOLIC_CONST (x)) | 3756 if (TLS_SYMBOLIC_CONST (x)) |
3771 { | 3757 { |
3772 x = legitimize_tls_address (x, 0); | 3758 x = legitimize_tls_address (x, 0); |
3773 | 3759 |
3774 if (legitimate_address_p (mode, x, FALSE)) | 3760 if (s390_legitimate_address_p (mode, x, FALSE)) |
3775 return x; | 3761 return x; |
3776 } | 3762 } |
3777 else if (GET_CODE (x) == PLUS | 3763 else if (GET_CODE (x) == PLUS |
3778 && (TLS_SYMBOLIC_CONST (XEXP (x, 0)) | 3764 && (TLS_SYMBOLIC_CONST (XEXP (x, 0)) |
3779 || TLS_SYMBOLIC_CONST (XEXP (x, 1)))) | 3765 || TLS_SYMBOLIC_CONST (XEXP (x, 1)))) |
3780 { | 3766 { |
3781 return x; | 3767 return x; |
3782 } | 3768 } |
3783 else if (flag_pic) | 3769 else if (flag_pic) |
3786 || (GET_CODE (x) == PLUS | 3772 || (GET_CODE (x) == PLUS |
3787 && (SYMBOLIC_CONST (XEXP (x, 0)) | 3773 && (SYMBOLIC_CONST (XEXP (x, 0)) |
3788 || SYMBOLIC_CONST (XEXP (x, 1))))) | 3774 || SYMBOLIC_CONST (XEXP (x, 1))))) |
3789 x = legitimize_pic_address (x, 0); | 3775 x = legitimize_pic_address (x, 0); |
3790 | 3776 |
3791 if (legitimate_address_p (mode, x, FALSE)) | 3777 if (s390_legitimate_address_p (mode, x, FALSE)) |
3792 return x; | 3778 return x; |
3793 } | 3779 } |
3794 | 3780 |
3795 x = eliminate_constant_term (x, &constant_term); | 3781 x = eliminate_constant_term (x, &constant_term); |
3796 | 3782 |
3852 operand. If we find one, push the reload and and return the new address. | 3838 operand. If we find one, push the reload and and return the new address. |
3853 | 3839 |
3854 MODE is the mode of the enclosing MEM. OPNUM is the operand number | 3840 MODE is the mode of the enclosing MEM. OPNUM is the operand number |
3855 and TYPE is the reload type of the current reload. */ | 3841 and TYPE is the reload type of the current reload. */ |
3856 | 3842 |
3857 rtx | 3843 rtx |
3858 legitimize_reload_address (rtx ad, enum machine_mode mode ATTRIBUTE_UNUSED, | 3844 legitimize_reload_address (rtx ad, enum machine_mode mode ATTRIBUTE_UNUSED, |
3859 int opnum, int type) | 3845 int opnum, int type) |
3860 { | 3846 { |
3861 if (!optimize || TARGET_LONG_DISPLACEMENT) | 3847 if (!optimize || TARGET_LONG_DISPLACEMENT) |
3862 return NULL_RTX; | 3848 return NULL_RTX; |
3884 | 3870 |
3885 tem = gen_rtx_PLUS (Pmode, XEXP (ad, 0), cst); | 3871 tem = gen_rtx_PLUS (Pmode, XEXP (ad, 0), cst); |
3886 new_rtx = gen_rtx_PLUS (Pmode, tem, GEN_INT (lower)); | 3872 new_rtx = gen_rtx_PLUS (Pmode, tem, GEN_INT (lower)); |
3887 | 3873 |
3888 push_reload (XEXP (tem, 1), 0, &XEXP (tem, 1), 0, | 3874 push_reload (XEXP (tem, 1), 0, &XEXP (tem, 1), 0, |
3889 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0, | 3875 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0, |
3890 opnum, (enum reload_type) type); | 3876 opnum, (enum reload_type) type); |
3891 return new_rtx; | 3877 return new_rtx; |
3892 } | 3878 } |
3893 | 3879 |
3894 return NULL_RTX; | 3880 return NULL_RTX; |
3934 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX)); | 3920 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX)); |
3935 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX)); | 3921 emit_move_insn (src_addr, force_operand (XEXP (src, 0), NULL_RTX)); |
3936 dst = change_address (dst, VOIDmode, dst_addr); | 3922 dst = change_address (dst, VOIDmode, dst_addr); |
3937 src = change_address (src, VOIDmode, src_addr); | 3923 src = change_address (src, VOIDmode, src_addr); |
3938 | 3924 |
3939 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0); | 3925 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, |
3926 OPTAB_DIRECT); | |
3940 if (temp != count) | 3927 if (temp != count) |
3941 emit_move_insn (count, temp); | 3928 emit_move_insn (count, temp); |
3942 | 3929 |
3943 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, 0); | 3930 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, |
3931 OPTAB_DIRECT); | |
3944 if (temp != blocks) | 3932 if (temp != blocks) |
3945 emit_move_insn (blocks, temp); | 3933 emit_move_insn (blocks, temp); |
3946 | 3934 |
3947 emit_cmp_and_jump_insns (blocks, const0_rtx, | 3935 emit_cmp_and_jump_insns (blocks, const0_rtx, |
3948 EQ, NULL_RTX, mode, 1, loop_end_label); | 3936 EQ, NULL_RTX, mode, 1, loop_end_label); |
3953 s390_load_address (dst_addr, | 3941 s390_load_address (dst_addr, |
3954 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256))); | 3942 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256))); |
3955 s390_load_address (src_addr, | 3943 s390_load_address (src_addr, |
3956 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256))); | 3944 gen_rtx_PLUS (Pmode, src_addr, GEN_INT (256))); |
3957 | 3945 |
3958 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0); | 3946 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, |
3947 OPTAB_DIRECT); | |
3959 if (temp != blocks) | 3948 if (temp != blocks) |
3960 emit_move_insn (blocks, temp); | 3949 emit_move_insn (blocks, temp); |
3961 | 3950 |
3962 emit_cmp_and_jump_insns (blocks, const0_rtx, | 3951 emit_cmp_and_jump_insns (blocks, const0_rtx, |
3963 EQ, NULL_RTX, mode, 1, loop_end_label); | 3952 EQ, NULL_RTX, mode, 1, loop_end_label); |
3979 { | 3968 { |
3980 if (GET_CODE (len) == CONST_INT && INTVAL (len) == 0) | 3969 if (GET_CODE (len) == CONST_INT && INTVAL (len) == 0) |
3981 return; | 3970 return; |
3982 | 3971 |
3983 gcc_assert (GET_CODE (val) == CONST_INT || GET_MODE (val) == QImode); | 3972 gcc_assert (GET_CODE (val) == CONST_INT || GET_MODE (val) == QImode); |
3984 | 3973 |
3985 if (GET_CODE (len) == CONST_INT && INTVAL (len) > 0 && INTVAL (len) <= 257) | 3974 if (GET_CODE (len) == CONST_INT && INTVAL (len) > 0 && INTVAL (len) <= 257) |
3986 { | 3975 { |
3987 if (val == const0_rtx && INTVAL (len) <= 256) | 3976 if (val == const0_rtx && INTVAL (len) <= 256) |
3988 emit_insn (gen_clrmem_short (dst, GEN_INT (INTVAL (len) - 1))); | 3977 emit_insn (gen_clrmem_short (dst, GEN_INT (INTVAL (len) - 1))); |
3989 else | 3978 else |
3990 { | 3979 { |
3991 /* Initialize memory by storing the first byte. */ | 3980 /* Initialize memory by storing the first byte. */ |
3992 emit_move_insn (adjust_address (dst, QImode, 0), val); | 3981 emit_move_insn (adjust_address (dst, QImode, 0), val); |
3993 | 3982 |
3994 if (INTVAL (len) > 1) | 3983 if (INTVAL (len) > 1) |
3995 { | 3984 { |
3996 /* Initiate 1 byte overlap move. | 3985 /* Initiate 1 byte overlap move. |
3997 The first byte of DST is propagated through DSTP1. | 3986 The first byte of DST is propagated through DSTP1. |
3998 Prepare a movmem for: DST+1 = DST (length = LEN - 1). | 3987 Prepare a movmem for: DST+1 = DST (length = LEN - 1). |
3999 DST is set to size 1 so the rest of the memory location | 3988 DST is set to size 1 so the rest of the memory location |
4000 does not count as source operand. */ | 3989 does not count as source operand. */ |
4001 rtx dstp1 = adjust_address (dst, VOIDmode, 1); | 3990 rtx dstp1 = adjust_address (dst, VOIDmode, 1); |
4002 set_mem_size (dst, const1_rtx); | 3991 set_mem_size (dst, const1_rtx); |
4003 | 3992 |
4004 emit_insn (gen_movmem_short (dstp1, dst, | 3993 emit_insn (gen_movmem_short (dstp1, dst, |
4005 GEN_INT (INTVAL (len) - 2))); | 3994 GEN_INT (INTVAL (len) - 2))); |
4006 } | 3995 } |
4007 } | 3996 } |
4008 } | 3997 } |
4009 | 3998 |
4036 | 4025 |
4037 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX)); | 4026 emit_move_insn (dst_addr, force_operand (XEXP (dst, 0), NULL_RTX)); |
4038 dst = change_address (dst, VOIDmode, dst_addr); | 4027 dst = change_address (dst, VOIDmode, dst_addr); |
4039 | 4028 |
4040 if (val == const0_rtx) | 4029 if (val == const0_rtx) |
4041 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0); | 4030 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, |
4031 OPTAB_DIRECT); | |
4042 else | 4032 else |
4043 { | 4033 { |
4044 dstp1 = adjust_address (dst, VOIDmode, 1); | 4034 dstp1 = adjust_address (dst, VOIDmode, 1); |
4045 set_mem_size (dst, const1_rtx); | 4035 set_mem_size (dst, const1_rtx); |
4046 | 4036 |
4047 /* Initialize memory by storing the first byte. */ | 4037 /* Initialize memory by storing the first byte. */ |
4048 emit_move_insn (adjust_address (dst, QImode, 0), val); | 4038 emit_move_insn (adjust_address (dst, QImode, 0), val); |
4049 | 4039 |
4050 /* If count is 1 we are done. */ | 4040 /* If count is 1 we are done. */ |
4051 emit_cmp_and_jump_insns (count, const1_rtx, | 4041 emit_cmp_and_jump_insns (count, const1_rtx, |
4052 EQ, NULL_RTX, mode, 1, end_label); | 4042 EQ, NULL_RTX, mode, 1, end_label); |
4053 | 4043 |
4054 temp = expand_binop (mode, add_optab, count, GEN_INT (-2), count, 1, 0); | 4044 temp = expand_binop (mode, add_optab, count, GEN_INT (-2), count, 1, |
4045 OPTAB_DIRECT); | |
4055 } | 4046 } |
4056 if (temp != count) | 4047 if (temp != count) |
4057 emit_move_insn (count, temp); | 4048 emit_move_insn (count, temp); |
4058 | 4049 |
4059 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, 0); | 4050 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, |
4051 OPTAB_DIRECT); | |
4060 if (temp != blocks) | 4052 if (temp != blocks) |
4061 emit_move_insn (blocks, temp); | 4053 emit_move_insn (blocks, temp); |
4062 | 4054 |
4063 emit_cmp_and_jump_insns (blocks, const0_rtx, | 4055 emit_cmp_and_jump_insns (blocks, const0_rtx, |
4064 EQ, NULL_RTX, mode, 1, loop_end_label); | 4056 EQ, NULL_RTX, mode, 1, loop_end_label); |
4070 else | 4062 else |
4071 emit_insn (gen_movmem_short (dstp1, dst, GEN_INT (255))); | 4063 emit_insn (gen_movmem_short (dstp1, dst, GEN_INT (255))); |
4072 s390_load_address (dst_addr, | 4064 s390_load_address (dst_addr, |
4073 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256))); | 4065 gen_rtx_PLUS (Pmode, dst_addr, GEN_INT (256))); |
4074 | 4066 |
4075 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0); | 4067 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, |
4068 OPTAB_DIRECT); | |
4076 if (temp != blocks) | 4069 if (temp != blocks) |
4077 emit_move_insn (blocks, temp); | 4070 emit_move_insn (blocks, temp); |
4078 | 4071 |
4079 emit_cmp_and_jump_insns (blocks, const0_rtx, | 4072 emit_cmp_and_jump_insns (blocks, const0_rtx, |
4080 EQ, NULL_RTX, mode, 1, loop_end_label); | 4073 EQ, NULL_RTX, mode, 1, loop_end_label); |
4142 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX)); | 4135 emit_move_insn (addr0, force_operand (XEXP (op0, 0), NULL_RTX)); |
4143 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX)); | 4136 emit_move_insn (addr1, force_operand (XEXP (op1, 0), NULL_RTX)); |
4144 op0 = change_address (op0, VOIDmode, addr0); | 4137 op0 = change_address (op0, VOIDmode, addr0); |
4145 op1 = change_address (op1, VOIDmode, addr1); | 4138 op1 = change_address (op1, VOIDmode, addr1); |
4146 | 4139 |
4147 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, 0); | 4140 temp = expand_binop (mode, add_optab, count, constm1_rtx, count, 1, |
4141 OPTAB_DIRECT); | |
4148 if (temp != count) | 4142 if (temp != count) |
4149 emit_move_insn (count, temp); | 4143 emit_move_insn (count, temp); |
4150 | 4144 |
4151 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, 0); | 4145 temp = expand_binop (mode, lshr_optab, count, GEN_INT (8), blocks, 1, |
4146 OPTAB_DIRECT); | |
4152 if (temp != blocks) | 4147 if (temp != blocks) |
4153 emit_move_insn (blocks, temp); | 4148 emit_move_insn (blocks, temp); |
4154 | 4149 |
4155 emit_cmp_and_jump_insns (blocks, const0_rtx, | 4150 emit_cmp_and_jump_insns (blocks, const0_rtx, |
4156 EQ, NULL_RTX, mode, 1, loop_end_label); | 4151 EQ, NULL_RTX, mode, 1, loop_end_label); |
4167 s390_load_address (addr0, | 4162 s390_load_address (addr0, |
4168 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256))); | 4163 gen_rtx_PLUS (Pmode, addr0, GEN_INT (256))); |
4169 s390_load_address (addr1, | 4164 s390_load_address (addr1, |
4170 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256))); | 4165 gen_rtx_PLUS (Pmode, addr1, GEN_INT (256))); |
4171 | 4166 |
4172 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, 0); | 4167 temp = expand_binop (mode, add_optab, blocks, constm1_rtx, blocks, 1, |
4168 OPTAB_DIRECT); | |
4173 if (temp != blocks) | 4169 if (temp != blocks) |
4174 emit_move_insn (blocks, temp); | 4170 emit_move_insn (blocks, temp); |
4175 | 4171 |
4176 emit_cmp_and_jump_insns (blocks, const0_rtx, | 4172 emit_cmp_and_jump_insns (blocks, const0_rtx, |
4177 EQ, NULL_RTX, mode, 1, loop_end_label); | 4173 EQ, NULL_RTX, mode, 1, loop_end_label); |
4288 op_res = gen_rtx_PLUS (GET_MODE (dst), op_res, src); | 4284 op_res = gen_rtx_PLUS (GET_MODE (dst), op_res, src); |
4289 op_res = gen_rtx_PLUS (GET_MODE (dst), op_res, const0_rtx); | 4285 op_res = gen_rtx_PLUS (GET_MODE (dst), op_res, const0_rtx); |
4290 } | 4286 } |
4291 | 4287 |
4292 p = rtvec_alloc (2); | 4288 p = rtvec_alloc (2); |
4293 RTVEC_ELT (p, 0) = | 4289 RTVEC_ELT (p, 0) = |
4294 gen_rtx_SET (VOIDmode, dst, op_res); | 4290 gen_rtx_SET (VOIDmode, dst, op_res); |
4295 RTVEC_ELT (p, 1) = | 4291 RTVEC_ELT (p, 1) = |
4296 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM)); | 4292 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM)); |
4297 emit_insn (gen_rtx_PARALLEL (VOIDmode, p)); | 4293 emit_insn (gen_rtx_PARALLEL (VOIDmode, p)); |
4298 | 4294 |
4299 return true; | 4295 return true; |
4300 } | 4296 } |
4349 | 4345 |
4350 /* Emit SLB instruction pattern. */ | 4346 /* Emit SLB instruction pattern. */ |
4351 if (!register_operand (src, GET_MODE (dst))) | 4347 if (!register_operand (src, GET_MODE (dst))) |
4352 src = force_reg (GET_MODE (dst), src); | 4348 src = force_reg (GET_MODE (dst), src); |
4353 | 4349 |
4354 op_res = gen_rtx_MINUS (GET_MODE (dst), | 4350 op_res = gen_rtx_MINUS (GET_MODE (dst), |
4355 gen_rtx_MINUS (GET_MODE (dst), src, const0_rtx), | 4351 gen_rtx_MINUS (GET_MODE (dst), src, const0_rtx), |
4356 gen_rtx_fmt_ee (cmp_code, GET_MODE (dst), | 4352 gen_rtx_fmt_ee (cmp_code, GET_MODE (dst), |
4357 gen_rtx_REG (cc_mode, CC_REGNUM), | 4353 gen_rtx_REG (cc_mode, CC_REGNUM), |
4358 const0_rtx)); | 4354 const0_rtx)); |
4359 p = rtvec_alloc (2); | 4355 p = rtvec_alloc (2); |
4360 RTVEC_ELT (p, 0) = | 4356 RTVEC_ELT (p, 0) = |
4361 gen_rtx_SET (VOIDmode, dst, op_res); | 4357 gen_rtx_SET (VOIDmode, dst, op_res); |
4362 RTVEC_ELT (p, 1) = | 4358 RTVEC_ELT (p, 1) = |
4363 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM)); | 4359 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (CCmode, CC_REGNUM)); |
4364 emit_insn (gen_rtx_PARALLEL (VOIDmode, p)); | 4360 emit_insn (gen_rtx_PARALLEL (VOIDmode, p)); |
4365 | 4361 |
4366 return true; | 4362 return true; |
4367 } | 4363 } |
4417 | 4413 |
4418 dest = adjust_address (dest, BLKmode, 0); | 4414 dest = adjust_address (dest, BLKmode, 0); |
4419 set_mem_size (dest, GEN_INT (size)); | 4415 set_mem_size (dest, GEN_INT (size)); |
4420 s390_expand_movmem (dest, src_mem, GEN_INT (size)); | 4416 s390_expand_movmem (dest, src_mem, GEN_INT (size)); |
4421 } | 4417 } |
4422 | 4418 |
4423 /* (set (ze (mem)) (reg)). */ | 4419 /* (set (ze (mem)) (reg)). */ |
4424 else if (register_operand (src, word_mode)) | 4420 else if (register_operand (src, word_mode)) |
4425 { | 4421 { |
4426 if (bitsize <= GET_MODE_BITSIZE (SImode)) | 4422 if (bitsize <= GET_MODE_BITSIZE (SImode)) |
4427 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, op1, | 4423 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, op1, |
4430 { | 4426 { |
4431 /* Emit st,stcmh sequence. */ | 4427 /* Emit st,stcmh sequence. */ |
4432 int stcmh_width = bitsize - GET_MODE_BITSIZE (SImode); | 4428 int stcmh_width = bitsize - GET_MODE_BITSIZE (SImode); |
4433 int size = stcmh_width / BITS_PER_UNIT; | 4429 int size = stcmh_width / BITS_PER_UNIT; |
4434 | 4430 |
4435 emit_move_insn (adjust_address (dest, SImode, size), | 4431 emit_move_insn (adjust_address (dest, SImode, size), |
4436 gen_lowpart (SImode, src)); | 4432 gen_lowpart (SImode, src)); |
4437 set_mem_size (dest, GEN_INT (size)); | 4433 set_mem_size (dest, GEN_INT (size)); |
4438 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, GEN_INT | 4434 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, GEN_INT |
4439 (stcmh_width), const0_rtx), | 4435 (stcmh_width), const0_rtx), |
4440 gen_rtx_LSHIFTRT (word_mode, src, GEN_INT | 4436 gen_rtx_LSHIFTRT (word_mode, src, GEN_INT |
4447 return true; | 4443 return true; |
4448 } | 4444 } |
4449 | 4445 |
4450 /* (set (ze (reg)) (const_int)). */ | 4446 /* (set (ze (reg)) (const_int)). */ |
4451 if (TARGET_ZARCH | 4447 if (TARGET_ZARCH |
4452 && register_operand (dest, word_mode) | 4448 && register_operand (dest, word_mode) |
4453 && (bitpos % 16) == 0 | 4449 && (bitpos % 16) == 0 |
4454 && (bitsize % 16) == 0 | 4450 && (bitsize % 16) == 0 |
4455 && const_int_operand (src, VOIDmode)) | 4451 && const_int_operand (src, VOIDmode)) |
4456 { | 4452 { |
4457 HOST_WIDE_INT val = INTVAL (src); | 4453 HOST_WIDE_INT val = INTVAL (src); |
4467 else | 4463 else |
4468 putmode = HImode; | 4464 putmode = HImode; |
4469 | 4465 |
4470 putsize = GET_MODE_BITSIZE (putmode); | 4466 putsize = GET_MODE_BITSIZE (putmode); |
4471 regpos -= putsize; | 4467 regpos -= putsize; |
4472 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, | 4468 emit_move_insn (gen_rtx_ZERO_EXTRACT (word_mode, dest, |
4473 GEN_INT (putsize), | 4469 GEN_INT (putsize), |
4474 GEN_INT (regpos)), | 4470 GEN_INT (regpos)), |
4475 gen_int_mode (val, putmode)); | 4471 gen_int_mode (val, putmode)); |
4476 val >>= putsize; | 4472 val >>= putsize; |
4477 } | 4473 } |
4478 gcc_assert (regpos == bitpos); | 4474 gcc_assert (regpos == bitpos); |
4479 return true; | 4475 return true; |
4488 static inline rtx | 4484 static inline rtx |
4489 s390_expand_mask_and_shift (rtx val, enum machine_mode mode, rtx count) | 4485 s390_expand_mask_and_shift (rtx val, enum machine_mode mode, rtx count) |
4490 { | 4486 { |
4491 val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)), | 4487 val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)), |
4492 NULL_RTX, 1, OPTAB_DIRECT); | 4488 NULL_RTX, 1, OPTAB_DIRECT); |
4493 return expand_simple_binop (SImode, ASHIFT, val, count, | 4489 return expand_simple_binop (SImode, ASHIFT, val, count, |
4494 NULL_RTX, 1, OPTAB_DIRECT); | 4490 NULL_RTX, 1, OPTAB_DIRECT); |
4495 } | 4491 } |
4496 | 4492 |
4497 /* Structure to hold the initial parameters for a compare_and_swap operation | 4493 /* Structure to hold the initial parameters for a compare_and_swap operation |
4498 in HImode and QImode. */ | 4494 in HImode and QImode. */ |
4499 | 4495 |
4500 struct alignment_context | 4496 struct alignment_context |
4501 { | 4497 { |
4502 rtx memsi; /* SI aligned memory location. */ | 4498 rtx memsi; /* SI aligned memory location. */ |
4503 rtx shift; /* Bit offset with regard to lsb. */ | 4499 rtx shift; /* Bit offset with regard to lsb. */ |
4504 rtx modemask; /* Mask of the HQImode shifted by SHIFT bits. */ | 4500 rtx modemask; /* Mask of the HQImode shifted by SHIFT bits. */ |
4505 rtx modemaski; /* ~modemask */ | 4501 rtx modemaski; /* ~modemask */ |
4506 bool aligned; /* True if memory is aligned, false else. */ | 4502 bool aligned; /* True if memory is aligned, false else. */ |
4507 }; | 4503 }; |
4549 } | 4545 } |
4550 /* Shift is the byte count, but we need the bitcount. */ | 4546 /* Shift is the byte count, but we need the bitcount. */ |
4551 ac->shift = expand_simple_binop (SImode, MULT, ac->shift, GEN_INT (BITS_PER_UNIT), | 4547 ac->shift = expand_simple_binop (SImode, MULT, ac->shift, GEN_INT (BITS_PER_UNIT), |
4552 NULL_RTX, 1, OPTAB_DIRECT); | 4548 NULL_RTX, 1, OPTAB_DIRECT); |
4553 /* Calculate masks. */ | 4549 /* Calculate masks. */ |
4554 ac->modemask = expand_simple_binop (SImode, ASHIFT, | 4550 ac->modemask = expand_simple_binop (SImode, ASHIFT, |
4555 GEN_INT (GET_MODE_MASK (mode)), ac->shift, | 4551 GEN_INT (GET_MODE_MASK (mode)), ac->shift, |
4556 NULL_RTX, 1, OPTAB_DIRECT); | 4552 NULL_RTX, 1, OPTAB_DIRECT); |
4557 ac->modemaski = expand_simple_unop (SImode, NOT, ac->modemask, NULL_RTX, 1); | 4553 ac->modemaski = expand_simple_unop (SImode, NOT, ac->modemask, NULL_RTX, 1); |
4558 } | 4554 } |
4559 | 4555 |
4587 val = expand_simple_binop (SImode, AND, ac.memsi, ac.modemaski, | 4583 val = expand_simple_binop (SImode, AND, ac.memsi, ac.modemaski, |
4588 NULL_RTX, 1, OPTAB_DIRECT); | 4584 NULL_RTX, 1, OPTAB_DIRECT); |
4589 | 4585 |
4590 /* Start CS loop. */ | 4586 /* Start CS loop. */ |
4591 emit_label (csloop); | 4587 emit_label (csloop); |
4592 /* val = "<mem>00..0<mem>" | 4588 /* val = "<mem>00..0<mem>" |
4593 * cmp = "00..0<cmp>00..0" | 4589 * cmp = "00..0<cmp>00..0" |
4594 * new = "00..0<new>00..0" | 4590 * new = "00..0<new>00..0" |
4595 */ | 4591 */ |
4596 | 4592 |
4597 /* Patch cmp and new with val at correct position. */ | 4593 /* Patch cmp and new with val at correct position. */ |
4598 if (ac.aligned && MEM_P (cmp)) | 4594 if (ac.aligned && MEM_P (cmp)) |
4599 { | 4595 { |
4615 /* Jump to end if we're done (likely?). */ | 4611 /* Jump to end if we're done (likely?). */ |
4616 s390_emit_jump (csend, s390_emit_compare_and_swap (EQ, res, ac.memsi, | 4612 s390_emit_jump (csend, s390_emit_compare_and_swap (EQ, res, ac.memsi, |
4617 cmpv, newv)); | 4613 cmpv, newv)); |
4618 | 4614 |
4619 /* Check for changes outside mode. */ | 4615 /* Check for changes outside mode. */ |
4620 resv = expand_simple_binop (SImode, AND, res, ac.modemaski, | 4616 resv = expand_simple_binop (SImode, AND, res, ac.modemaski, |
4621 NULL_RTX, 1, OPTAB_DIRECT); | 4617 NULL_RTX, 1, OPTAB_DIRECT); |
4622 cc = s390_emit_compare (NE, resv, val); | 4618 cc = s390_emit_compare (NE, resv, val); |
4623 emit_move_insn (val, resv); | 4619 emit_move_insn (val, resv); |
4624 /* Loop internal if so. */ | 4620 /* Loop internal if so. */ |
4625 s390_emit_jump (csloop, cc); | 4621 s390_emit_jump (csloop, cc); |
4626 | 4622 |
4627 emit_label (csend); | 4623 emit_label (csend); |
4628 | 4624 |
4629 /* Return the correct part of the bitfield. */ | 4625 /* Return the correct part of the bitfield. */ |
4630 convert_move (target, expand_simple_binop (SImode, LSHIFTRT, res, ac.shift, | 4626 convert_move (target, expand_simple_binop (SImode, LSHIFTRT, res, ac.shift, |
4631 NULL_RTX, 1, OPTAB_DIRECT), 1); | 4627 NULL_RTX, 1, OPTAB_DIRECT), 1); |
4632 } | 4628 } |
4633 | 4629 |
4634 /* Expand an atomic operation CODE of mode MODE. MEM is the memory location | 4630 /* Expand an atomic operation CODE of mode MODE. MEM is the memory location |
4635 and VAL the value to play with. If AFTER is true then store the value | 4631 and VAL the value to play with. If AFTER is true then store the value |
4679 val = expand_simple_binop (SImode, code, new_rtx, orig, | 4675 val = expand_simple_binop (SImode, code, new_rtx, orig, |
4680 NULL_RTX, 1, OPTAB_DIRECT); | 4676 NULL_RTX, 1, OPTAB_DIRECT); |
4681 val = expand_simple_binop (SImode, AND, val, ac.modemask, | 4677 val = expand_simple_binop (SImode, AND, val, ac.modemask, |
4682 NULL_RTX, 1, OPTAB_DIRECT); | 4678 NULL_RTX, 1, OPTAB_DIRECT); |
4683 /* FALLTHRU */ | 4679 /* FALLTHRU */ |
4684 case SET: | 4680 case SET: |
4685 if (ac.aligned && MEM_P (val)) | 4681 if (ac.aligned && MEM_P (val)) |
4686 store_bit_field (new_rtx, GET_MODE_BITSIZE (mode), 0, SImode, val); | 4682 store_bit_field (new_rtx, GET_MODE_BITSIZE (mode), 0, SImode, val); |
4687 else | 4683 else |
4688 { | 4684 { |
4689 new_rtx = expand_simple_binop (SImode, AND, new_rtx, ac.modemaski, | 4685 new_rtx = expand_simple_binop (SImode, AND, new_rtx, ac.modemaski, |
4966 CODE specified the format flag. The following format flags | 4962 CODE specified the format flag. The following format flags |
4967 are recognized: | 4963 are recognized: |
4968 | 4964 |
4969 'C': print opcode suffix for branch condition. | 4965 'C': print opcode suffix for branch condition. |
4970 'D': print opcode suffix for inverse branch condition. | 4966 'D': print opcode suffix for inverse branch condition. |
4967 'E': print opcode suffix for branch on index instruction. | |
4971 'J': print tls_load/tls_gdcall/tls_ldcall suffix | 4968 'J': print tls_load/tls_gdcall/tls_ldcall suffix |
4972 'G': print the size of the operand in bytes. | 4969 'G': print the size of the operand in bytes. |
4973 'O': print only the displacement of a memory reference. | 4970 'O': print only the displacement of a memory reference. |
4974 'R': print only the base register of a memory reference. | 4971 'R': print only the base register of a memory reference. |
4975 'S': print S-type memory reference (base+displacement). | 4972 'S': print S-type memory reference (base+displacement). |
4996 fprintf (file, s390_branch_condition_mnemonic (x, FALSE)); | 4993 fprintf (file, s390_branch_condition_mnemonic (x, FALSE)); |
4997 return; | 4994 return; |
4998 | 4995 |
4999 case 'D': | 4996 case 'D': |
5000 fprintf (file, s390_branch_condition_mnemonic (x, TRUE)); | 4997 fprintf (file, s390_branch_condition_mnemonic (x, TRUE)); |
4998 return; | |
4999 | |
5000 case 'E': | |
5001 if (GET_CODE (x) == LE) | |
5002 fprintf (file, "l"); | |
5003 else if (GET_CODE (x) == GT) | |
5004 fprintf (file, "h"); | |
5005 else | |
5006 gcc_unreachable (); | |
5001 return; | 5007 return; |
5002 | 5008 |
5003 case 'J': | 5009 case 'J': |
5004 if (GET_CODE (x) == SYMBOL_REF) | 5010 if (GET_CODE (x) == SYMBOL_REF) |
5005 { | 5011 { |
5534 | 5540 |
5535 return new_literal; | 5541 return new_literal; |
5536 } | 5542 } |
5537 | 5543 |
5538 | 5544 |
5539 /* Find an annotated literal pool symbol referenced in RTX X, | 5545 /* Find an annotated literal pool symbol referenced in RTX X, |
5540 and store it at REF. Will abort if X contains references to | 5546 and store it at REF. Will abort if X contains references to |
5541 more than one such pool symbol; multiple references to the same | 5547 more than one such pool symbol; multiple references to the same |
5542 symbol are allowed, however. | 5548 symbol are allowed, however. |
5543 | 5549 |
5544 The rtx pointed to by REF must be initialized to NULL_RTX | 5550 The rtx pointed to by REF must be initialized to NULL_RTX |
5545 by the caller before calling this routine. */ | 5551 by the caller before calling this routine. */ |
5568 gcc_assert (GET_CODE (sym) == SYMBOL_REF | 5574 gcc_assert (GET_CODE (sym) == SYMBOL_REF |
5569 && CONSTANT_POOL_ADDRESS_P (sym)); | 5575 && CONSTANT_POOL_ADDRESS_P (sym)); |
5570 | 5576 |
5571 if (*ref == NULL_RTX) | 5577 if (*ref == NULL_RTX) |
5572 *ref = sym; | 5578 *ref = sym; |
5573 else | 5579 else |
5574 gcc_assert (*ref == sym); | 5580 gcc_assert (*ref == sym); |
5575 | 5581 |
5576 return; | 5582 return; |
5577 } | 5583 } |
5578 | 5584 |
5589 find_constant_pool_ref (XVECEXP (x, i, j), ref); | 5595 find_constant_pool_ref (XVECEXP (x, i, j), ref); |
5590 } | 5596 } |
5591 } | 5597 } |
5592 } | 5598 } |
5593 | 5599 |
5594 /* Replace every reference to the annotated literal pool | 5600 /* Replace every reference to the annotated literal pool |
5595 symbol REF in X by its base plus OFFSET. */ | 5601 symbol REF in X by its base plus OFFSET. */ |
5596 | 5602 |
5597 static void | 5603 static void |
5598 replace_constant_pool_ref (rtx *x, rtx ref, rtx offset) | 5604 replace_constant_pool_ref (rtx *x, rtx ref, rtx offset) |
5599 { | 5605 { |
6534 | 6540 |
6535 /* Insert base register reload insns before every pool. */ | 6541 /* Insert base register reload insns before every pool. */ |
6536 | 6542 |
6537 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next) | 6543 for (curr_pool = pool_list; curr_pool; curr_pool = curr_pool->next) |
6538 { | 6544 { |
6539 rtx new_insn = gen_reload_base (cfun->machine->base_reg, | 6545 rtx new_insn = gen_reload_base (cfun->machine->base_reg, |
6540 curr_pool->label); | 6546 curr_pool->label); |
6541 rtx insn = curr_pool->first_insn; | 6547 rtx insn = curr_pool->first_insn; |
6542 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1); | 6548 INSN_ADDRESSES_NEW (emit_insn_before (new_insn, insn), -1); |
6543 } | 6549 } |
6544 | 6550 |
6549 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn))) | 6555 && bitmap_bit_p (far_labels, CODE_LABEL_NUMBER (insn))) |
6550 { | 6556 { |
6551 struct constant_pool *pool = s390_find_pool (pool_list, insn); | 6557 struct constant_pool *pool = s390_find_pool (pool_list, insn); |
6552 if (pool) | 6558 if (pool) |
6553 { | 6559 { |
6554 rtx new_insn = gen_reload_base (cfun->machine->base_reg, | 6560 rtx new_insn = gen_reload_base (cfun->machine->base_reg, |
6555 pool->label); | 6561 pool->label); |
6556 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1); | 6562 INSN_ADDRESSES_NEW (emit_insn_after (new_insn, insn), -1); |
6557 } | 6563 } |
6558 } | 6564 } |
6559 | 6565 |
6786 return i; | 6792 return i; |
6787 return 0; | 6793 return 0; |
6788 } | 6794 } |
6789 | 6795 |
6790 | 6796 |
6791 /* Helper function for s390_regs_ever_clobbered. Sets the fields in DATA for all | 6797 /* Helper function for s390_regs_ever_clobbered. Sets the fields in DATA for all |
6792 clobbered hard regs in SETREG. */ | 6798 clobbered hard regs in SETREG. */ |
6793 | 6799 |
6794 static void | 6800 static void |
6795 s390_reg_clobbered_rtx (rtx setreg, const_rtx set_insn ATTRIBUTE_UNUSED, void *data) | 6801 s390_reg_clobbered_rtx (rtx setreg, const_rtx set_insn ATTRIBUTE_UNUSED, void *data) |
6796 { | 6802 { |
6844 may use the eh registers, but the code which sets these registers is not | 6850 may use the eh registers, but the code which sets these registers is not |
6845 contained in that function. Hence s390_regs_ever_clobbered is not able to | 6851 contained in that function. Hence s390_regs_ever_clobbered is not able to |
6846 deal with this automatically. */ | 6852 deal with this automatically. */ |
6847 if (crtl->calls_eh_return || cfun->machine->has_landing_pad_p) | 6853 if (crtl->calls_eh_return || cfun->machine->has_landing_pad_p) |
6848 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM ; i++) | 6854 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM ; i++) |
6849 if (crtl->calls_eh_return | 6855 if (crtl->calls_eh_return |
6850 || (cfun->machine->has_landing_pad_p | 6856 || (cfun->machine->has_landing_pad_p |
6851 && df_regs_ever_live_p (EH_RETURN_DATA_REGNO (i)))) | 6857 && df_regs_ever_live_p (EH_RETURN_DATA_REGNO (i)))) |
6852 regs_ever_clobbered[EH_RETURN_DATA_REGNO (i)] = 1; | 6858 regs_ever_clobbered[EH_RETURN_DATA_REGNO (i)] = 1; |
6853 | 6859 |
6854 /* For nonlocal gotos all call-saved registers have to be saved. | 6860 /* For nonlocal gotos all call-saved registers have to be saved. |
6855 This flag is also set for the unwinding code in libgcc. | 6861 This flag is also set for the unwinding code in libgcc. |
6864 { | 6870 { |
6865 FOR_BB_INSNS (cur_bb, cur_insn) | 6871 FOR_BB_INSNS (cur_bb, cur_insn) |
6866 { | 6872 { |
6867 if (INSN_P (cur_insn)) | 6873 if (INSN_P (cur_insn)) |
6868 note_stores (PATTERN (cur_insn), | 6874 note_stores (PATTERN (cur_insn), |
6869 s390_reg_clobbered_rtx, | 6875 s390_reg_clobbered_rtx, |
6870 regs_ever_clobbered); | 6876 regs_ever_clobbered); |
6871 } | 6877 } |
6872 } | 6878 } |
6873 } | 6879 } |
6874 | 6880 |
6875 /* Determine the frame area which actually has to be accessed | 6881 /* Determine the frame area which actually has to be accessed |
6876 in the function epilogue. The values are stored at the | 6882 in the function epilogue. The values are stored at the |
6877 given pointers AREA_BOTTOM (address of the lowest used stack | 6883 given pointers AREA_BOTTOM (address of the lowest used stack |
6878 address) and AREA_TOP (address of the first item which does | 6884 address) and AREA_TOP (address of the first item which does |
6879 not belong to the stack frame). */ | 6885 not belong to the stack frame). */ |
6880 | 6886 |
6881 static void | 6887 static void |
6882 s390_frame_area (int *area_bottom, int *area_top) | 6888 s390_frame_area (int *area_bottom, int *area_top) |
6883 { | 6889 { |
6907 if (cfun_fpr_bit_p (i)) | 6913 if (cfun_fpr_bit_p (i)) |
6908 { | 6914 { |
6909 b = MIN (b, cfun_frame_layout.f4_offset + (i - 2) * 8); | 6915 b = MIN (b, cfun_frame_layout.f4_offset + (i - 2) * 8); |
6910 t = MAX (t, cfun_frame_layout.f4_offset + (i - 1) * 8); | 6916 t = MAX (t, cfun_frame_layout.f4_offset + (i - 1) * 8); |
6911 } | 6917 } |
6912 | 6918 |
6913 *area_bottom = b; | 6919 *area_bottom = b; |
6914 *area_top = t; | 6920 *area_top = t; |
6915 } | 6921 } |
6916 | 6922 |
6917 /* Fill cfun->machine with info about register usage of current function. | 6923 /* Fill cfun->machine with info about register usage of current function. |
6946 | 6952 |
6947 if (frame_pointer_needed) | 6953 if (frame_pointer_needed) |
6948 clobbered_regs[HARD_FRAME_POINTER_REGNUM] = 1; | 6954 clobbered_regs[HARD_FRAME_POINTER_REGNUM] = 1; |
6949 | 6955 |
6950 if (flag_pic) | 6956 if (flag_pic) |
6951 clobbered_regs[PIC_OFFSET_TABLE_REGNUM] | 6957 clobbered_regs[PIC_OFFSET_TABLE_REGNUM] |
6952 |= df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM); | 6958 |= df_regs_ever_live_p (PIC_OFFSET_TABLE_REGNUM); |
6953 | 6959 |
6954 clobbered_regs[BASE_REGNUM] | 6960 clobbered_regs[BASE_REGNUM] |
6955 |= (cfun->machine->base_reg | 6961 |= (cfun->machine->base_reg |
6956 && REGNO (cfun->machine->base_reg) == BASE_REGNUM); | 6962 && REGNO (cfun->machine->base_reg) == BASE_REGNUM); |
6957 | 6963 |
6958 clobbered_regs[RETURN_REGNUM] | 6964 clobbered_regs[RETURN_REGNUM] |
6959 |= (!current_function_is_leaf | 6965 |= (!current_function_is_leaf |
6992 { | 6998 { |
6993 /* Save slots for gprs from i to j. */ | 6999 /* Save slots for gprs from i to j. */ |
6994 cfun_frame_layout.first_save_gpr_slot = i; | 7000 cfun_frame_layout.first_save_gpr_slot = i; |
6995 cfun_frame_layout.last_save_gpr_slot = j; | 7001 cfun_frame_layout.last_save_gpr_slot = j; |
6996 | 7002 |
6997 for (i = cfun_frame_layout.first_save_gpr_slot; | 7003 for (i = cfun_frame_layout.first_save_gpr_slot; |
6998 i < cfun_frame_layout.last_save_gpr_slot + 1; | 7004 i < cfun_frame_layout.last_save_gpr_slot + 1; |
6999 i++) | 7005 i++) |
7000 if (clobbered_regs[i]) | 7006 if (clobbered_regs[i]) |
7001 break; | 7007 break; |
7002 | 7008 |
7003 for (j = cfun_frame_layout.last_save_gpr_slot; j > i; j--) | 7009 for (j = cfun_frame_layout.last_save_gpr_slot; j > i; j--) |
7004 if (clobbered_regs[j]) | 7010 if (clobbered_regs[j]) |
7005 break; | 7011 break; |
7006 | 7012 |
7007 if (i == cfun_frame_layout.last_save_gpr_slot + 1) | 7013 if (i == cfun_frame_layout.last_save_gpr_slot + 1) |
7008 { | 7014 { |
7009 /* Nothing to save/restore. */ | 7015 /* Nothing to save/restore. */ |
7010 cfun_frame_layout.first_save_gpr = -1; | 7016 cfun_frame_layout.first_save_gpr = -1; |
7011 cfun_frame_layout.first_restore_gpr = -1; | 7017 cfun_frame_layout.first_restore_gpr = -1; |
7081 int i; | 7087 int i; |
7082 | 7088 |
7083 cfun_frame_layout.frame_size = get_frame_size (); | 7089 cfun_frame_layout.frame_size = get_frame_size (); |
7084 if (!TARGET_64BIT && cfun_frame_layout.frame_size > 0x7fff0000) | 7090 if (!TARGET_64BIT && cfun_frame_layout.frame_size > 0x7fff0000) |
7085 fatal_error ("total size of local variables exceeds architecture limit"); | 7091 fatal_error ("total size of local variables exceeds architecture limit"); |
7086 | 7092 |
7087 if (!TARGET_PACKED_STACK) | 7093 if (!TARGET_PACKED_STACK) |
7088 { | 7094 { |
7089 cfun_frame_layout.backchain_offset = 0; | 7095 cfun_frame_layout.backchain_offset = 0; |
7090 cfun_frame_layout.f0_offset = 16 * UNITS_PER_WORD; | 7096 cfun_frame_layout.f0_offset = 16 * UNITS_PER_WORD; |
7091 cfun_frame_layout.f4_offset = cfun_frame_layout.f0_offset + 2 * 8; | 7097 cfun_frame_layout.f4_offset = cfun_frame_layout.f0_offset + 2 * 8; |
7095 } | 7101 } |
7096 else if (TARGET_BACKCHAIN) /* kernel stack layout */ | 7102 else if (TARGET_BACKCHAIN) /* kernel stack layout */ |
7097 { | 7103 { |
7098 cfun_frame_layout.backchain_offset = (STACK_POINTER_OFFSET | 7104 cfun_frame_layout.backchain_offset = (STACK_POINTER_OFFSET |
7099 - UNITS_PER_WORD); | 7105 - UNITS_PER_WORD); |
7100 cfun_frame_layout.gprs_offset | 7106 cfun_frame_layout.gprs_offset |
7101 = (cfun_frame_layout.backchain_offset | 7107 = (cfun_frame_layout.backchain_offset |
7102 - (STACK_POINTER_REGNUM - cfun_frame_layout.first_save_gpr_slot + 1) | 7108 - (STACK_POINTER_REGNUM - cfun_frame_layout.first_save_gpr_slot + 1) |
7103 * UNITS_PER_WORD); | 7109 * UNITS_PER_WORD); |
7104 | 7110 |
7105 if (TARGET_64BIT) | 7111 if (TARGET_64BIT) |
7106 { | 7112 { |
7107 cfun_frame_layout.f4_offset | 7113 cfun_frame_layout.f4_offset |
7108 = (cfun_frame_layout.gprs_offset | 7114 = (cfun_frame_layout.gprs_offset |
7109 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3))); | 7115 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3))); |
7110 | 7116 |
7111 cfun_frame_layout.f0_offset | 7117 cfun_frame_layout.f0_offset |
7112 = (cfun_frame_layout.f4_offset | 7118 = (cfun_frame_layout.f4_offset |
7113 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1))); | 7119 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1))); |
7114 } | 7120 } |
7115 else | 7121 else |
7116 { | 7122 { |
7117 /* On 31 bit we have to care about alignment of the | 7123 /* On 31 bit we have to care about alignment of the |
7118 floating point regs to provide fastest access. */ | 7124 floating point regs to provide fastest access. */ |
7119 cfun_frame_layout.f0_offset | 7125 cfun_frame_layout.f0_offset |
7120 = ((cfun_frame_layout.gprs_offset | 7126 = ((cfun_frame_layout.gprs_offset |
7121 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1)) | 7127 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1)) |
7122 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1))); | 7128 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1))); |
7123 | 7129 |
7124 cfun_frame_layout.f4_offset | 7130 cfun_frame_layout.f4_offset |
7125 = (cfun_frame_layout.f0_offset | 7131 = (cfun_frame_layout.f0_offset |
7126 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3))); | 7132 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3))); |
7127 } | 7133 } |
7128 } | 7134 } |
7129 else /* no backchain */ | 7135 else /* no backchain */ |
7130 { | 7136 { |
7131 cfun_frame_layout.f4_offset | 7137 cfun_frame_layout.f4_offset |
7132 = (STACK_POINTER_OFFSET | 7138 = (STACK_POINTER_OFFSET |
7133 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3))); | 7139 - 8 * (cfun_fpr_bit_p (2) + cfun_fpr_bit_p (3))); |
7134 | 7140 |
7135 cfun_frame_layout.f0_offset | 7141 cfun_frame_layout.f0_offset |
7136 = (cfun_frame_layout.f4_offset | 7142 = (cfun_frame_layout.f4_offset |
7137 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1))); | 7143 - 8 * (cfun_fpr_bit_p (0) + cfun_fpr_bit_p (1))); |
7138 | 7144 |
7139 cfun_frame_layout.gprs_offset | 7145 cfun_frame_layout.gprs_offset |
7140 = cfun_frame_layout.f0_offset - cfun_gprs_save_area_size; | 7146 = cfun_frame_layout.f0_offset - cfun_gprs_save_area_size; |
7141 } | 7147 } |
7142 | 7148 |
7143 if (current_function_is_leaf | 7149 if (current_function_is_leaf |
7144 && !TARGET_TPF_PROFILING | 7150 && !TARGET_TPF_PROFILING |
7155 else | 7161 else |
7156 { | 7162 { |
7157 if (TARGET_BACKCHAIN) | 7163 if (TARGET_BACKCHAIN) |
7158 cfun_frame_layout.frame_size += UNITS_PER_WORD; | 7164 cfun_frame_layout.frame_size += UNITS_PER_WORD; |
7159 | 7165 |
7160 /* No alignment trouble here because f8-f15 are only saved under | 7166 /* No alignment trouble here because f8-f15 are only saved under |
7161 64 bit. */ | 7167 64 bit. */ |
7162 cfun_frame_layout.f8_offset = (MIN (MIN (cfun_frame_layout.f0_offset, | 7168 cfun_frame_layout.f8_offset = (MIN (MIN (cfun_frame_layout.f0_offset, |
7163 cfun_frame_layout.f4_offset), | 7169 cfun_frame_layout.f4_offset), |
7164 cfun_frame_layout.gprs_offset) | 7170 cfun_frame_layout.gprs_offset) |
7165 - cfun_frame_layout.high_fprs * 8); | 7171 - cfun_frame_layout.high_fprs * 8); |
7167 cfun_frame_layout.frame_size += cfun_frame_layout.high_fprs * 8; | 7173 cfun_frame_layout.frame_size += cfun_frame_layout.high_fprs * 8; |
7168 | 7174 |
7169 for (i = 0; i < 8; i++) | 7175 for (i = 0; i < 8; i++) |
7170 if (cfun_fpr_bit_p (i)) | 7176 if (cfun_fpr_bit_p (i)) |
7171 cfun_frame_layout.frame_size += 8; | 7177 cfun_frame_layout.frame_size += 8; |
7172 | 7178 |
7173 cfun_frame_layout.frame_size += cfun_gprs_save_area_size; | 7179 cfun_frame_layout.frame_size += cfun_gprs_save_area_size; |
7174 | 7180 |
7175 /* If under 31 bit an odd number of gprs has to be saved we have to adjust | 7181 /* If under 31 bit an odd number of gprs has to be saved we have to adjust |
7176 the frame size to sustain 8 byte alignment of stack frames. */ | 7182 the frame size to sustain 8 byte alignment of stack frames. */ |
7177 cfun_frame_layout.frame_size = ((cfun_frame_layout.frame_size + | 7183 cfun_frame_layout.frame_size = ((cfun_frame_layout.frame_size + |
7178 STACK_BOUNDARY / BITS_PER_UNIT - 1) | 7184 STACK_BOUNDARY / BITS_PER_UNIT - 1) |
7179 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1)); | 7185 & ~(STACK_BOUNDARY / BITS_PER_UNIT - 1)); |
7236 { | 7242 { |
7237 int clobbered_regs[16]; | 7243 int clobbered_regs[16]; |
7238 | 7244 |
7239 s390_register_info (clobbered_regs); | 7245 s390_register_info (clobbered_regs); |
7240 | 7246 |
7241 df_set_regs_ever_live (BASE_REGNUM, | 7247 df_set_regs_ever_live (BASE_REGNUM, |
7242 clobbered_regs[BASE_REGNUM] ? true : false); | 7248 clobbered_regs[BASE_REGNUM] ? true : false); |
7243 df_set_regs_ever_live (RETURN_REGNUM, | 7249 df_set_regs_ever_live (RETURN_REGNUM, |
7244 clobbered_regs[RETURN_REGNUM] ? true : false); | 7250 clobbered_regs[RETURN_REGNUM] ? true : false); |
7245 df_set_regs_ever_live (STACK_POINTER_REGNUM, | 7251 df_set_regs_ever_live (STACK_POINTER_REGNUM, |
7246 clobbered_regs[STACK_POINTER_REGNUM] ? true : false); | 7252 clobbered_regs[STACK_POINTER_REGNUM] ? true : false); |
7247 | 7253 |
7248 if (cfun->machine->base_reg) | 7254 if (cfun->machine->base_reg) |
7249 df_set_regs_ever_live (REGNO (cfun->machine->base_reg), true); | 7255 df_set_regs_ever_live (REGNO (cfun->machine->base_reg), true); |
7250 } | 7256 } |
7272 | 7278 |
7273 /* fallthrough */ | 7279 /* fallthrough */ |
7274 case GENERAL_REGS: | 7280 case GENERAL_REGS: |
7275 if (REGNO_PAIR_OK (regno, mode)) | 7281 if (REGNO_PAIR_OK (regno, mode)) |
7276 { | 7282 { |
7277 if (TARGET_64BIT | 7283 if (TARGET_64BIT |
7278 || (mode != TFmode && mode != TCmode && mode != TDmode)) | 7284 || (mode != TFmode && mode != TCmode && mode != TDmode)) |
7279 return true; | 7285 return true; |
7280 } | 7286 } |
7281 break; | 7287 break; |
7282 case CC_REGS: | 7288 case CC_REGS: |
7283 if (GET_MODE_CLASS (mode) == MODE_CC) | 7289 if (GET_MODE_CLASS (mode) == MODE_CC) |
7284 return true; | 7290 return true; |
7285 break; | 7291 break; |
7291 } | 7297 } |
7292 break; | 7298 break; |
7293 default: | 7299 default: |
7294 return false; | 7300 return false; |
7295 } | 7301 } |
7296 | 7302 |
7297 return false; | 7303 return false; |
7298 } | 7304 } |
7299 | 7305 |
7300 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */ | 7306 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */ |
7301 | 7307 |
7333 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD; | 7339 return (GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD; |
7334 } | 7340 } |
7335 | 7341 |
7336 /* Return true if register FROM can be eliminated via register TO. */ | 7342 /* Return true if register FROM can be eliminated via register TO. */ |
7337 | 7343 |
7338 bool | 7344 static bool |
7339 s390_can_eliminate (int from, int to) | 7345 s390_can_eliminate (const int from, const int to) |
7340 { | 7346 { |
7341 /* On zSeries machines, we have not marked the base register as fixed. | 7347 /* On zSeries machines, we have not marked the base register as fixed. |
7342 Instead, we have an elimination rule BASE_REGNUM -> BASE_REGNUM. | 7348 Instead, we have an elimination rule BASE_REGNUM -> BASE_REGNUM. |
7343 If a function requires the base register, we say here that this | 7349 If a function requires the base register, we say here that this |
7344 elimination cannot be performed. This will cause reload to free | 7350 elimination cannot be performed. This will cause reload to free |
7388 return 0; | 7394 return 0; |
7389 | 7395 |
7390 switch (from) | 7396 switch (from) |
7391 { | 7397 { |
7392 case FRAME_POINTER_REGNUM: | 7398 case FRAME_POINTER_REGNUM: |
7393 offset = (get_frame_size() | 7399 offset = (get_frame_size() |
7394 + STACK_POINTER_OFFSET | 7400 + STACK_POINTER_OFFSET |
7395 + crtl->outgoing_args_size); | 7401 + crtl->outgoing_args_size); |
7396 break; | 7402 break; |
7397 | 7403 |
7398 case ARG_POINTER_REGNUM: | 7404 case ARG_POINTER_REGNUM: |
7447 set_mem_alias_set (addr, get_frame_alias_set ()); | 7453 set_mem_alias_set (addr, get_frame_alias_set ()); |
7448 | 7454 |
7449 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr); | 7455 return emit_move_insn (gen_rtx_REG (DFmode, regnum), addr); |
7450 } | 7456 } |
7451 | 7457 |
7458 /* Return true if REGNO is a global register, but not one | |
7459 of the special ones that need to be saved/restored in anyway. */ | |
7460 | |
7461 static inline bool | |
7462 global_not_special_regno_p (int regno) | |
7463 { | |
7464 return (global_regs[regno] | |
7465 /* These registers are special and need to be | |
7466 restored in any case. */ | |
7467 && !(regno == STACK_POINTER_REGNUM | |
7468 || regno == RETURN_REGNUM | |
7469 || regno == BASE_REGNUM | |
7470 || (flag_pic && regno == (int)PIC_OFFSET_TABLE_REGNUM))); | |
7471 } | |
7472 | |
7452 /* Generate insn to save registers FIRST to LAST into | 7473 /* Generate insn to save registers FIRST to LAST into |
7453 the register save area located at offset OFFSET | 7474 the register save area located at offset OFFSET |
7454 relative to register BASE. */ | 7475 relative to register BASE. */ |
7455 | 7476 |
7456 static rtx | 7477 static rtx |
7470 if (TARGET_64BIT) | 7491 if (TARGET_64BIT) |
7471 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first)); | 7492 insn = gen_movdi (addr, gen_rtx_REG (Pmode, first)); |
7472 else | 7493 else |
7473 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first)); | 7494 insn = gen_movsi (addr, gen_rtx_REG (Pmode, first)); |
7474 | 7495 |
7475 RTX_FRAME_RELATED_P (insn) = 1; | 7496 if (!global_not_special_regno_p (first)) |
7497 RTX_FRAME_RELATED_P (insn) = 1; | |
7476 return insn; | 7498 return insn; |
7477 } | 7499 } |
7478 | 7500 |
7479 | 7501 |
7480 insn = gen_store_multiple (addr, | 7502 insn = gen_store_multiple (addr, |
7483 | 7505 |
7484 if (first <= 6 && cfun->stdarg) | 7506 if (first <= 6 && cfun->stdarg) |
7485 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++) | 7507 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++) |
7486 { | 7508 { |
7487 rtx mem = XEXP (XVECEXP (PATTERN (insn), 0, i), 0); | 7509 rtx mem = XEXP (XVECEXP (PATTERN (insn), 0, i), 0); |
7488 | 7510 |
7489 if (first + i <= 6) | 7511 if (first + i <= 6) |
7490 set_mem_alias_set (mem, get_varargs_alias_set ()); | 7512 set_mem_alias_set (mem, get_varargs_alias_set ()); |
7491 } | 7513 } |
7492 | 7514 |
7493 /* We need to set the FRAME_RELATED flag on all SETs | 7515 /* We need to set the FRAME_RELATED flag on all SETs |
7500 FRAME_RELATED flags for those SETs, because the first SET | 7522 FRAME_RELATED flags for those SETs, because the first SET |
7501 of the PARALLEL is always treated as if it had the flag | 7523 of the PARALLEL is always treated as if it had the flag |
7502 set, even if it does not. Therefore we emit a new pattern | 7524 set, even if it does not. Therefore we emit a new pattern |
7503 without those registers as REG_FRAME_RELATED_EXPR note. */ | 7525 without those registers as REG_FRAME_RELATED_EXPR note. */ |
7504 | 7526 |
7505 if (first >= 6) | 7527 if (first >= 6 && !global_not_special_regno_p (first)) |
7506 { | 7528 { |
7507 rtx pat = PATTERN (insn); | 7529 rtx pat = PATTERN (insn); |
7508 | 7530 |
7509 for (i = 0; i < XVECLEN (pat, 0); i++) | 7531 for (i = 0; i < XVECLEN (pat, 0); i++) |
7510 if (GET_CODE (XVECEXP (pat, 0, i)) == SET) | 7532 if (GET_CODE (XVECEXP (pat, 0, i)) == SET |
7533 && !global_not_special_regno_p (REGNO (SET_SRC (XVECEXP (pat, | |
7534 0, i))))) | |
7511 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1; | 7535 RTX_FRAME_RELATED_P (XVECEXP (pat, 0, i)) = 1; |
7512 | 7536 |
7513 RTX_FRAME_RELATED_P (insn) = 1; | 7537 RTX_FRAME_RELATED_P (insn) = 1; |
7514 } | 7538 } |
7515 else if (last >= 6) | 7539 else if (last >= 6) |
7516 { | 7540 { |
7517 addr = plus_constant (base, offset + (6 - first) * UNITS_PER_WORD); | 7541 int start; |
7542 | |
7543 for (start = first >= 6 ? first : 6; start <= last; start++) | |
7544 if (!global_not_special_regno_p (start)) | |
7545 break; | |
7546 | |
7547 if (start > last) | |
7548 return insn; | |
7549 | |
7550 addr = plus_constant (base, offset + (start - first) * UNITS_PER_WORD); | |
7518 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr), | 7551 note = gen_store_multiple (gen_rtx_MEM (Pmode, addr), |
7519 gen_rtx_REG (Pmode, 6), | 7552 gen_rtx_REG (Pmode, start), |
7520 GEN_INT (last - 6 + 1)); | 7553 GEN_INT (last - start + 1)); |
7521 note = PATTERN (note); | 7554 note = PATTERN (note); |
7522 | 7555 |
7523 REG_NOTES (insn) = | 7556 add_reg_note (insn, REG_FRAME_RELATED_EXPR, note); |
7524 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, | |
7525 note, REG_NOTES (insn)); | |
7526 | 7557 |
7527 for (i = 0; i < XVECLEN (note, 0); i++) | 7558 for (i = 0; i < XVECLEN (note, 0); i++) |
7528 if (GET_CODE (XVECEXP (note, 0, i)) == SET) | 7559 if (GET_CODE (XVECEXP (note, 0, i)) == SET |
7560 && !global_not_special_regno_p (REGNO (SET_SRC (XVECEXP (note, | |
7561 0, i))))) | |
7529 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1; | 7562 RTX_FRAME_RELATED_P (XVECEXP (note, 0, i)) = 1; |
7530 | 7563 |
7531 RTX_FRAME_RELATED_P (insn) = 1; | 7564 RTX_FRAME_RELATED_P (insn) = 1; |
7532 } | 7565 } |
7533 | 7566 |
7649 pop_topmost_sequence (); | 7682 pop_topmost_sequence (); |
7650 | 7683 |
7651 /* Choose best register to use for temp use within prologue. | 7684 /* Choose best register to use for temp use within prologue. |
7652 See below for why TPF must use the register 1. */ | 7685 See below for why TPF must use the register 1. */ |
7653 | 7686 |
7654 if (!has_hard_reg_initial_val (Pmode, RETURN_REGNUM) | 7687 if (!has_hard_reg_initial_val (Pmode, RETURN_REGNUM) |
7655 && !current_function_is_leaf | 7688 && !current_function_is_leaf |
7656 && !TARGET_TPF_PROFILING) | 7689 && !TARGET_TPF_PROFILING) |
7657 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM); | 7690 temp_reg = gen_rtx_REG (Pmode, RETURN_REGNUM); |
7658 else | 7691 else |
7659 temp_reg = gen_rtx_REG (Pmode, 1); | 7692 temp_reg = gen_rtx_REG (Pmode, 1); |
7660 | 7693 |
7661 /* Save call saved gprs. */ | 7694 /* Save call saved gprs. */ |
7662 if (cfun_frame_layout.first_save_gpr != -1) | 7695 if (cfun_frame_layout.first_save_gpr != -1) |
7663 { | 7696 { |
7664 insn = save_gprs (stack_pointer_rtx, | 7697 insn = save_gprs (stack_pointer_rtx, |
7665 cfun_frame_layout.gprs_offset + | 7698 cfun_frame_layout.gprs_offset + |
7666 UNITS_PER_WORD * (cfun_frame_layout.first_save_gpr | 7699 UNITS_PER_WORD * (cfun_frame_layout.first_save_gpr |
7667 - cfun_frame_layout.first_save_gpr_slot), | 7700 - cfun_frame_layout.first_save_gpr_slot), |
7668 cfun_frame_layout.first_save_gpr, | 7701 cfun_frame_layout.first_save_gpr, |
7669 cfun_frame_layout.last_save_gpr); | 7702 cfun_frame_layout.last_save_gpr); |
7670 emit_insn (insn); | 7703 emit_insn (insn); |
7671 } | 7704 } |
7672 | 7705 |
7673 /* Dummy insn to mark literal pool slot. */ | 7706 /* Dummy insn to mark literal pool slot. */ |
7716 | 7749 |
7717 for (i = 15; i > 7 && offset >= 0; i--) | 7750 for (i = 15; i > 7 && offset >= 0; i--) |
7718 if (cfun_fpr_bit_p (i)) | 7751 if (cfun_fpr_bit_p (i)) |
7719 { | 7752 { |
7720 insn = save_fpr (stack_pointer_rtx, offset, i + 16); | 7753 insn = save_fpr (stack_pointer_rtx, offset, i + 16); |
7721 | 7754 |
7722 RTX_FRAME_RELATED_P (insn) = 1; | 7755 RTX_FRAME_RELATED_P (insn) = 1; |
7723 offset -= 8; | 7756 offset -= 8; |
7724 } | 7757 } |
7725 if (offset >= cfun_frame_layout.f8_offset) | 7758 if (offset >= cfun_frame_layout.f8_offset) |
7726 next_fpr = i + 16; | 7759 next_fpr = i + 16; |
7727 } | 7760 } |
7728 | 7761 |
7729 if (!TARGET_PACKED_STACK) | 7762 if (!TARGET_PACKED_STACK) |
7730 next_fpr = cfun_save_high_fprs_p ? 31 : 0; | 7763 next_fpr = cfun_save_high_fprs_p ? 31 : 0; |
7731 | 7764 |
7732 /* Decrement stack pointer. */ | 7765 /* Decrement stack pointer. */ |
7733 | 7766 |
7734 if (cfun_frame_layout.frame_size > 0) | 7767 if (cfun_frame_layout.frame_size > 0) |
7735 { | 7768 { |
7736 rtx frame_off = GEN_INT (-cfun_frame_layout.frame_size); | 7769 rtx frame_off = GEN_INT (-cfun_frame_layout.frame_size); |
7770 rtx real_frame_off; | |
7737 | 7771 |
7738 if (s390_stack_size) | 7772 if (s390_stack_size) |
7739 { | 7773 { |
7740 HOST_WIDE_INT stack_guard; | 7774 HOST_WIDE_INT stack_guard; |
7741 | 7775 |
7766 HOST_WIDE_INT stack_check_mask = ((s390_stack_size - 1) | 7800 HOST_WIDE_INT stack_check_mask = ((s390_stack_size - 1) |
7767 & ~(stack_guard - 1)); | 7801 & ~(stack_guard - 1)); |
7768 rtx t = gen_rtx_AND (Pmode, stack_pointer_rtx, | 7802 rtx t = gen_rtx_AND (Pmode, stack_pointer_rtx, |
7769 GEN_INT (stack_check_mask)); | 7803 GEN_INT (stack_check_mask)); |
7770 if (TARGET_64BIT) | 7804 if (TARGET_64BIT) |
7771 gen_cmpdi (t, const0_rtx); | 7805 emit_insn (gen_ctrapdi4 (gen_rtx_EQ (VOIDmode, t, const0_rtx), |
7806 t, const0_rtx, const0_rtx)); | |
7772 else | 7807 else |
7773 gen_cmpsi (t, const0_rtx); | 7808 emit_insn (gen_ctrapsi4 (gen_rtx_EQ (VOIDmode, t, const0_rtx), |
7774 | 7809 t, const0_rtx, const0_rtx)); |
7775 emit_insn (gen_conditional_trap (gen_rtx_EQ (CCmode, | |
7776 gen_rtx_REG (CCmode, | |
7777 CC_REGNUM), | |
7778 const0_rtx), | |
7779 const0_rtx)); | |
7780 } | 7810 } |
7781 } | 7811 } |
7782 | 7812 |
7783 if (s390_warn_framesize > 0 | 7813 if (s390_warn_framesize > 0 |
7784 && cfun_frame_layout.frame_size >= s390_warn_framesize) | 7814 && cfun_frame_layout.frame_size >= s390_warn_framesize) |
7785 warning (0, "frame size of %qs is " HOST_WIDE_INT_PRINT_DEC " bytes", | 7815 warning (0, "frame size of %qs is " HOST_WIDE_INT_PRINT_DEC " bytes", |
7786 current_function_name (), cfun_frame_layout.frame_size); | 7816 current_function_name (), cfun_frame_layout.frame_size); |
7787 | 7817 |
7788 if (s390_warn_dynamicstack_p && cfun->calls_alloca) | 7818 if (s390_warn_dynamicstack_p && cfun->calls_alloca) |
7789 warning (0, "%qs uses dynamic stack allocation", current_function_name ()); | 7819 warning (0, "%qs uses dynamic stack allocation", current_function_name ()); |
7790 | 7820 |
7795 /* Subtract frame size from stack pointer. */ | 7825 /* Subtract frame size from stack pointer. */ |
7796 | 7826 |
7797 if (DISP_IN_RANGE (INTVAL (frame_off))) | 7827 if (DISP_IN_RANGE (INTVAL (frame_off))) |
7798 { | 7828 { |
7799 insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx, | 7829 insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx, |
7800 gen_rtx_PLUS (Pmode, stack_pointer_rtx, | 7830 gen_rtx_PLUS (Pmode, stack_pointer_rtx, |
7801 frame_off)); | 7831 frame_off)); |
7802 insn = emit_insn (insn); | 7832 insn = emit_insn (insn); |
7803 } | 7833 } |
7804 else | 7834 else |
7805 { | 7835 { |
7809 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off)); | 7839 insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off)); |
7810 annotate_constant_pool_refs (&PATTERN (insn)); | 7840 annotate_constant_pool_refs (&PATTERN (insn)); |
7811 } | 7841 } |
7812 | 7842 |
7813 RTX_FRAME_RELATED_P (insn) = 1; | 7843 RTX_FRAME_RELATED_P (insn) = 1; |
7814 REG_NOTES (insn) = | 7844 real_frame_off = GEN_INT (-cfun_frame_layout.frame_size); |
7815 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, | 7845 add_reg_note (insn, REG_FRAME_RELATED_EXPR, |
7816 gen_rtx_SET (VOIDmode, stack_pointer_rtx, | 7846 gen_rtx_SET (VOIDmode, stack_pointer_rtx, |
7817 gen_rtx_PLUS (Pmode, stack_pointer_rtx, | 7847 gen_rtx_PLUS (Pmode, stack_pointer_rtx, |
7818 GEN_INT (-cfun_frame_layout.frame_size))), | 7848 real_frame_off))); |
7819 REG_NOTES (insn)); | |
7820 | 7849 |
7821 /* Set backchain. */ | 7850 /* Set backchain. */ |
7822 | 7851 |
7823 if (TARGET_BACKCHAIN) | 7852 if (TARGET_BACKCHAIN) |
7824 { | 7853 { |
7825 if (cfun_frame_layout.backchain_offset) | 7854 if (cfun_frame_layout.backchain_offset) |
7826 addr = gen_rtx_MEM (Pmode, | 7855 addr = gen_rtx_MEM (Pmode, |
7827 plus_constant (stack_pointer_rtx, | 7856 plus_constant (stack_pointer_rtx, |
7828 cfun_frame_layout.backchain_offset)); | 7857 cfun_frame_layout.backchain_offset)); |
7829 else | 7858 else |
7830 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx); | 7859 addr = gen_rtx_MEM (Pmode, stack_pointer_rtx); |
7831 set_mem_alias_set (addr, get_frame_alias_set ()); | 7860 set_mem_alias_set (addr, get_frame_alias_set ()); |
7832 insn = emit_insn (gen_move_insn (addr, temp_reg)); | 7861 insn = emit_insn (gen_move_insn (addr, temp_reg)); |
7833 } | 7862 } |
7834 | 7863 |
7835 /* If we support asynchronous exceptions (e.g. for Java), | 7864 /* If we support asynchronous exceptions (e.g. for Java), |
7850 /* If the stack might be accessed through a different register | 7879 /* If the stack might be accessed through a different register |
7851 we have to make sure that the stack pointer decrement is not | 7880 we have to make sure that the stack pointer decrement is not |
7852 moved below the use of the stack slots. */ | 7881 moved below the use of the stack slots. */ |
7853 s390_emit_stack_tie (); | 7882 s390_emit_stack_tie (); |
7854 | 7883 |
7855 insn = emit_insn (gen_add2_insn (temp_reg, | 7884 insn = emit_insn (gen_add2_insn (temp_reg, |
7856 GEN_INT (cfun_frame_layout.f8_offset))); | 7885 GEN_INT (cfun_frame_layout.f8_offset))); |
7857 | 7886 |
7858 offset = 0; | 7887 offset = 0; |
7859 | 7888 |
7860 for (i = 24; i <= next_fpr; i++) | 7889 for (i = 24; i <= next_fpr; i++) |
7862 { | 7891 { |
7863 rtx addr = plus_constant (stack_pointer_rtx, | 7892 rtx addr = plus_constant (stack_pointer_rtx, |
7864 cfun_frame_layout.frame_size | 7893 cfun_frame_layout.frame_size |
7865 + cfun_frame_layout.f8_offset | 7894 + cfun_frame_layout.f8_offset |
7866 + offset); | 7895 + offset); |
7867 | 7896 |
7868 insn = save_fpr (temp_reg, offset, i); | 7897 insn = save_fpr (temp_reg, offset, i); |
7869 offset += 8; | 7898 offset += 8; |
7870 RTX_FRAME_RELATED_P (insn) = 1; | 7899 RTX_FRAME_RELATED_P (insn) = 1; |
7871 REG_NOTES (insn) = | 7900 add_reg_note (insn, REG_FRAME_RELATED_EXPR, |
7872 gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, | 7901 gen_rtx_SET (VOIDmode, |
7873 gen_rtx_SET (VOIDmode, | 7902 gen_rtx_MEM (DFmode, addr), |
7874 gen_rtx_MEM (DFmode, addr), | 7903 gen_rtx_REG (DFmode, i))); |
7875 gen_rtx_REG (DFmode, i)), | |
7876 REG_NOTES (insn)); | |
7877 } | 7904 } |
7878 } | 7905 } |
7879 | 7906 |
7880 /* Set frame pointer, if needed. */ | 7907 /* Set frame pointer, if needed. */ |
7881 | 7908 |
7913 /* Expand the epilogue into a bunch of separate insns. */ | 7940 /* Expand the epilogue into a bunch of separate insns. */ |
7914 | 7941 |
7915 void | 7942 void |
7916 s390_emit_epilogue (bool sibcall) | 7943 s390_emit_epilogue (bool sibcall) |
7917 { | 7944 { |
7918 rtx frame_pointer, return_reg; | 7945 rtx frame_pointer, return_reg, cfa_restores = NULL_RTX; |
7919 int area_bottom, area_top, offset = 0; | 7946 int area_bottom, area_top, offset = 0; |
7920 int next_offset; | 7947 int next_offset; |
7921 rtvec p; | 7948 rtvec p; |
7922 int i; | 7949 int i; |
7923 | 7950 |
7935 emit_insn (gen_epilogue_tpf ()); | 7962 emit_insn (gen_epilogue_tpf ()); |
7936 } | 7963 } |
7937 | 7964 |
7938 /* Check whether to use frame or stack pointer for restore. */ | 7965 /* Check whether to use frame or stack pointer for restore. */ |
7939 | 7966 |
7940 frame_pointer = (frame_pointer_needed | 7967 frame_pointer = (frame_pointer_needed |
7941 ? hard_frame_pointer_rtx : stack_pointer_rtx); | 7968 ? hard_frame_pointer_rtx : stack_pointer_rtx); |
7942 | 7969 |
7943 s390_frame_area (&area_bottom, &area_top); | 7970 s390_frame_area (&area_bottom, &area_top); |
7944 | 7971 |
7945 /* Check whether we can access the register save area. | 7972 /* Check whether we can access the register save area. |
7955 /* Area is in range. */ | 7982 /* Area is in range. */ |
7956 offset = cfun_frame_layout.frame_size; | 7983 offset = cfun_frame_layout.frame_size; |
7957 } | 7984 } |
7958 else | 7985 else |
7959 { | 7986 { |
7960 rtx insn, frame_off; | 7987 rtx insn, frame_off, cfa; |
7961 | 7988 |
7962 offset = area_bottom < 0 ? -area_bottom : 0; | 7989 offset = area_bottom < 0 ? -area_bottom : 0; |
7963 frame_off = GEN_INT (cfun_frame_layout.frame_size - offset); | 7990 frame_off = GEN_INT (cfun_frame_layout.frame_size - offset); |
7964 | 7991 |
7992 cfa = gen_rtx_SET (VOIDmode, frame_pointer, | |
7993 gen_rtx_PLUS (Pmode, frame_pointer, frame_off)); | |
7965 if (DISP_IN_RANGE (INTVAL (frame_off))) | 7994 if (DISP_IN_RANGE (INTVAL (frame_off))) |
7966 { | 7995 { |
7967 insn = gen_rtx_SET (VOIDmode, frame_pointer, | 7996 insn = gen_rtx_SET (VOIDmode, frame_pointer, |
7968 gen_rtx_PLUS (Pmode, frame_pointer, frame_off)); | 7997 gen_rtx_PLUS (Pmode, frame_pointer, frame_off)); |
7969 insn = emit_insn (insn); | 7998 insn = emit_insn (insn); |
7974 frame_off = force_const_mem (Pmode, frame_off); | 8003 frame_off = force_const_mem (Pmode, frame_off); |
7975 | 8004 |
7976 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off)); | 8005 insn = emit_insn (gen_add2_insn (frame_pointer, frame_off)); |
7977 annotate_constant_pool_refs (&PATTERN (insn)); | 8006 annotate_constant_pool_refs (&PATTERN (insn)); |
7978 } | 8007 } |
8008 add_reg_note (insn, REG_CFA_ADJUST_CFA, cfa); | |
8009 RTX_FRAME_RELATED_P (insn) = 1; | |
7979 } | 8010 } |
7980 | 8011 |
7981 /* Restore call saved fprs. */ | 8012 /* Restore call saved fprs. */ |
7982 | 8013 |
7983 if (TARGET_64BIT) | 8014 if (TARGET_64BIT) |
7989 { | 8020 { |
7990 if (cfun_fpr_bit_p (i - 16)) | 8021 if (cfun_fpr_bit_p (i - 16)) |
7991 { | 8022 { |
7992 restore_fpr (frame_pointer, | 8023 restore_fpr (frame_pointer, |
7993 offset + next_offset, i); | 8024 offset + next_offset, i); |
8025 cfa_restores | |
8026 = alloc_reg_note (REG_CFA_RESTORE, | |
8027 gen_rtx_REG (DFmode, i), cfa_restores); | |
7994 next_offset += 8; | 8028 next_offset += 8; |
7995 } | 8029 } |
7996 } | 8030 } |
7997 } | 8031 } |
7998 | 8032 |
7999 } | 8033 } |
8000 else | 8034 else |
8001 { | 8035 { |
8002 next_offset = cfun_frame_layout.f4_offset; | 8036 next_offset = cfun_frame_layout.f4_offset; |
8003 for (i = 18; i < 20; i++) | 8037 for (i = 18; i < 20; i++) |
8004 { | 8038 { |
8005 if (cfun_fpr_bit_p (i - 16)) | 8039 if (cfun_fpr_bit_p (i - 16)) |
8006 { | 8040 { |
8007 restore_fpr (frame_pointer, | 8041 restore_fpr (frame_pointer, |
8008 offset + next_offset, i); | 8042 offset + next_offset, i); |
8043 cfa_restores | |
8044 = alloc_reg_note (REG_CFA_RESTORE, | |
8045 gen_rtx_REG (DFmode, i), cfa_restores); | |
8009 next_offset += 8; | 8046 next_offset += 8; |
8010 } | 8047 } |
8011 else if (!TARGET_PACKED_STACK) | 8048 else if (!TARGET_PACKED_STACK) |
8012 next_offset += 8; | 8049 next_offset += 8; |
8013 } | 8050 } |
8014 | 8051 |
8015 } | 8052 } |
8016 | 8053 |
8017 /* Return register. */ | 8054 /* Return register. */ |
8018 | 8055 |
8019 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM); | 8056 return_reg = gen_rtx_REG (Pmode, RETURN_REGNUM); |
8030 | 8067 |
8031 for (i = cfun_frame_layout.first_restore_gpr; | 8068 for (i = cfun_frame_layout.first_restore_gpr; |
8032 i <= cfun_frame_layout.last_restore_gpr; | 8069 i <= cfun_frame_layout.last_restore_gpr; |
8033 i++) | 8070 i++) |
8034 { | 8071 { |
8035 /* These registers are special and need to be | 8072 if (global_not_special_regno_p (i)) |
8036 restored in any case. */ | |
8037 if (i == STACK_POINTER_REGNUM | |
8038 || i == RETURN_REGNUM | |
8039 || i == BASE_REGNUM | |
8040 || (flag_pic && i == (int)PIC_OFFSET_TABLE_REGNUM)) | |
8041 continue; | |
8042 | |
8043 if (global_regs[i]) | |
8044 { | 8073 { |
8045 addr = plus_constant (frame_pointer, | 8074 addr = plus_constant (frame_pointer, |
8046 offset + cfun_frame_layout.gprs_offset | 8075 offset + cfun_frame_layout.gprs_offset |
8047 + (i - cfun_frame_layout.first_save_gpr_slot) | 8076 + (i - cfun_frame_layout.first_save_gpr_slot) |
8048 * UNITS_PER_WORD); | 8077 * UNITS_PER_WORD); |
8049 addr = gen_rtx_MEM (Pmode, addr); | 8078 addr = gen_rtx_MEM (Pmode, addr); |
8050 set_mem_alias_set (addr, get_frame_alias_set ()); | 8079 set_mem_alias_set (addr, get_frame_alias_set ()); |
8051 emit_move_insn (addr, gen_rtx_REG (Pmode, i)); | 8080 emit_move_insn (addr, gen_rtx_REG (Pmode, i)); |
8052 } | 8081 } |
8082 else | |
8083 cfa_restores | |
8084 = alloc_reg_note (REG_CFA_RESTORE, | |
8085 gen_rtx_REG (Pmode, i), cfa_restores); | |
8053 } | 8086 } |
8054 | 8087 |
8055 if (! sibcall) | 8088 if (! sibcall) |
8056 { | 8089 { |
8057 /* Fetch return address from stack before load multiple, | 8090 /* Fetch return address from stack before load multiple, |
8066 return_regnum = 4; | 8099 return_regnum = 4; |
8067 return_reg = gen_rtx_REG (Pmode, return_regnum); | 8100 return_reg = gen_rtx_REG (Pmode, return_regnum); |
8068 | 8101 |
8069 addr = plus_constant (frame_pointer, | 8102 addr = plus_constant (frame_pointer, |
8070 offset + cfun_frame_layout.gprs_offset | 8103 offset + cfun_frame_layout.gprs_offset |
8071 + (RETURN_REGNUM | 8104 + (RETURN_REGNUM |
8072 - cfun_frame_layout.first_save_gpr_slot) | 8105 - cfun_frame_layout.first_save_gpr_slot) |
8073 * UNITS_PER_WORD); | 8106 * UNITS_PER_WORD); |
8074 addr = gen_rtx_MEM (Pmode, addr); | 8107 addr = gen_rtx_MEM (Pmode, addr); |
8075 set_mem_alias_set (addr, get_frame_alias_set ()); | 8108 set_mem_alias_set (addr, get_frame_alias_set ()); |
8076 emit_move_insn (return_reg, addr); | 8109 emit_move_insn (return_reg, addr); |
8077 } | 8110 } |
8078 } | 8111 } |
8079 | 8112 |
8080 insn = restore_gprs (frame_pointer, | 8113 insn = restore_gprs (frame_pointer, |
8081 offset + cfun_frame_layout.gprs_offset | 8114 offset + cfun_frame_layout.gprs_offset |
8082 + (cfun_frame_layout.first_restore_gpr | 8115 + (cfun_frame_layout.first_restore_gpr |
8083 - cfun_frame_layout.first_save_gpr_slot) | 8116 - cfun_frame_layout.first_save_gpr_slot) |
8084 * UNITS_PER_WORD, | 8117 * UNITS_PER_WORD, |
8085 cfun_frame_layout.first_restore_gpr, | 8118 cfun_frame_layout.first_restore_gpr, |
8086 cfun_frame_layout.last_restore_gpr); | 8119 cfun_frame_layout.last_restore_gpr); |
8087 emit_insn (insn); | 8120 insn = emit_insn (insn); |
8121 REG_NOTES (insn) = cfa_restores; | |
8122 add_reg_note (insn, REG_CFA_DEF_CFA, | |
8123 plus_constant (stack_pointer_rtx, STACK_POINTER_OFFSET)); | |
8124 RTX_FRAME_RELATED_P (insn) = 1; | |
8088 } | 8125 } |
8089 | 8126 |
8090 if (! sibcall) | 8127 if (! sibcall) |
8091 { | 8128 { |
8092 | 8129 |
8324 aggregate_value_p. We can't abort, but it's not clear | 8361 aggregate_value_p. We can't abort, but it's not clear |
8325 what's safe to return. Pretend it's a struct I guess. */ | 8362 what's safe to return. Pretend it's a struct I guess. */ |
8326 return true; | 8363 return true; |
8327 } | 8364 } |
8328 | 8365 |
8366 /* Function arguments and return values are promoted to word size. */ | |
8367 | |
8368 static enum machine_mode | |
8369 s390_promote_function_mode (const_tree type, enum machine_mode mode, | |
8370 int *punsignedp, | |
8371 const_tree fntype ATTRIBUTE_UNUSED, | |
8372 int for_return ATTRIBUTE_UNUSED) | |
8373 { | |
8374 if (INTEGRAL_MODE_P (mode) | |
8375 && GET_MODE_SIZE (mode) < UNITS_PER_WORD) | |
8376 { | |
8377 if (POINTER_TYPE_P (type)) | |
8378 *punsignedp = POINTERS_EXTEND_UNSIGNED; | |
8379 return Pmode; | |
8380 } | |
8381 | |
8382 return mode; | |
8383 } | |
8384 | |
8329 /* Define where to return a (scalar) value of type TYPE. | 8385 /* Define where to return a (scalar) value of type TYPE. |
8330 If TYPE is null, define where to return a (scalar) | 8386 If TYPE is null, define where to return a (scalar) |
8331 value of mode MODE from a libcall. */ | 8387 value of mode MODE from a libcall. */ |
8332 | 8388 |
8333 rtx | 8389 rtx |
8334 s390_function_value (const_tree type, enum machine_mode mode) | 8390 s390_function_value (const_tree type, const_tree fn, enum machine_mode mode) |
8335 { | 8391 { |
8336 if (type) | 8392 if (type) |
8337 { | 8393 { |
8338 int unsignedp = TYPE_UNSIGNED (type); | 8394 int unsignedp = TYPE_UNSIGNED (type); |
8339 mode = promote_mode (type, TYPE_MODE (type), &unsignedp, 1); | 8395 mode = promote_function_mode (type, TYPE_MODE (type), &unsignedp, fn, 1); |
8340 } | 8396 } |
8341 | 8397 |
8342 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT || SCALAR_FLOAT_MODE_P (mode)); | 8398 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT || SCALAR_FLOAT_MODE_P (mode)); |
8343 gcc_assert (GET_MODE_SIZE (mode) <= 8); | 8399 gcc_assert (GET_MODE_SIZE (mode) <= 8); |
8344 | 8400 |
8376 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl; | 8432 tree f_gpr, f_fpr, f_ovf, f_sav, record, type_decl; |
8377 | 8433 |
8378 record = lang_hooks.types.make_type (RECORD_TYPE); | 8434 record = lang_hooks.types.make_type (RECORD_TYPE); |
8379 | 8435 |
8380 type_decl = | 8436 type_decl = |
8381 build_decl (TYPE_DECL, get_identifier ("__va_list_tag"), record); | 8437 build_decl (BUILTINS_LOCATION, |
8382 | 8438 TYPE_DECL, get_identifier ("__va_list_tag"), record); |
8383 f_gpr = build_decl (FIELD_DECL, get_identifier ("__gpr"), | 8439 |
8440 f_gpr = build_decl (BUILTINS_LOCATION, | |
8441 FIELD_DECL, get_identifier ("__gpr"), | |
8384 long_integer_type_node); | 8442 long_integer_type_node); |
8385 f_fpr = build_decl (FIELD_DECL, get_identifier ("__fpr"), | 8443 f_fpr = build_decl (BUILTINS_LOCATION, |
8444 FIELD_DECL, get_identifier ("__fpr"), | |
8386 long_integer_type_node); | 8445 long_integer_type_node); |
8387 f_ovf = build_decl (FIELD_DECL, get_identifier ("__overflow_arg_area"), | 8446 f_ovf = build_decl (BUILTINS_LOCATION, |
8447 FIELD_DECL, get_identifier ("__overflow_arg_area"), | |
8388 ptr_type_node); | 8448 ptr_type_node); |
8389 f_sav = build_decl (FIELD_DECL, get_identifier ("__reg_save_area"), | 8449 f_sav = build_decl (BUILTINS_LOCATION, |
8450 FIELD_DECL, get_identifier ("__reg_save_area"), | |
8390 ptr_type_node); | 8451 ptr_type_node); |
8391 | 8452 |
8392 va_list_gpr_counter_field = f_gpr; | 8453 va_list_gpr_counter_field = f_gpr; |
8393 va_list_fpr_counter_field = f_fpr; | 8454 va_list_fpr_counter_field = f_fpr; |
8394 | 8455 |
8487 || (cfun->va_list_fpr_size && n_fpr < FP_ARG_NUM_REG)) | 8548 || (cfun->va_list_fpr_size && n_fpr < FP_ARG_NUM_REG)) |
8488 { | 8549 { |
8489 t = make_tree (TREE_TYPE (sav), return_address_pointer_rtx); | 8550 t = make_tree (TREE_TYPE (sav), return_address_pointer_rtx); |
8490 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t, | 8551 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t, |
8491 size_int (-RETURN_REGNUM * UNITS_PER_WORD)); | 8552 size_int (-RETURN_REGNUM * UNITS_PER_WORD)); |
8492 | 8553 |
8493 t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t); | 8554 t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t); |
8494 TREE_SIDE_EFFECTS (t) = 1; | 8555 TREE_SIDE_EFFECTS (t) = 1; |
8495 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); | 8556 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); |
8496 } | 8557 } |
8497 } | 8558 } |
8519 else | 8580 else |
8520 ret = **args.overflow_arg_area++; | 8581 ret = **args.overflow_arg_area++; |
8521 } */ | 8582 } */ |
8522 | 8583 |
8523 static tree | 8584 static tree |
8524 s390_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p, | 8585 s390_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p, |
8525 gimple_seq *post_p ATTRIBUTE_UNUSED) | 8586 gimple_seq *post_p ATTRIBUTE_UNUSED) |
8526 { | 8587 { |
8527 tree f_gpr, f_fpr, f_ovf, f_sav; | 8588 tree f_gpr, f_fpr, f_ovf, f_sav; |
8528 tree gpr, fpr, ovf, sav, reg, t, u; | 8589 tree gpr, fpr, ovf, sav, reg, t, u; |
8529 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg; | 8590 int indirect_p, size, n_reg, sav_ofs, sav_scale, max_reg; |
8608 max_reg = GP_ARG_NUM_REG - n_reg; | 8669 max_reg = GP_ARG_NUM_REG - n_reg; |
8609 } | 8670 } |
8610 | 8671 |
8611 /* Pull the value out of the saved registers ... */ | 8672 /* Pull the value out of the saved registers ... */ |
8612 | 8673 |
8613 lab_false = create_artificial_label (); | 8674 lab_false = create_artificial_label (UNKNOWN_LOCATION); |
8614 lab_over = create_artificial_label (); | 8675 lab_over = create_artificial_label (UNKNOWN_LOCATION); |
8615 addr = create_tmp_var (ptr_type_node, "addr"); | 8676 addr = create_tmp_var (ptr_type_node, "addr"); |
8616 DECL_POINTER_ALIAS_SET (addr) = get_varargs_alias_set (); | |
8617 | 8677 |
8618 t = fold_convert (TREE_TYPE (reg), size_int (max_reg)); | 8678 t = fold_convert (TREE_TYPE (reg), size_int (max_reg)); |
8619 t = build2 (GT_EXPR, boolean_type_node, reg, t); | 8679 t = build2 (GT_EXPR, boolean_type_node, reg, t); |
8620 u = build1 (GOTO_EXPR, void_type_node, lab_false); | 8680 u = build1 (GOTO_EXPR, void_type_node, lab_false); |
8621 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE); | 8681 t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE); |
8622 gimplify_and_add (t, pre_p); | 8682 gimplify_and_add (t, pre_p); |
8623 | 8683 |
8624 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, | 8684 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, |
8625 size_int (sav_ofs)); | 8685 size_int (sav_ofs)); |
8626 u = build2 (MULT_EXPR, TREE_TYPE (reg), reg, | 8686 u = build2 (MULT_EXPR, TREE_TYPE (reg), reg, |
8627 fold_convert (TREE_TYPE (reg), size_int (sav_scale))); | 8687 fold_convert (TREE_TYPE (reg), size_int (sav_scale))); |
8628 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, fold_convert (sizetype, u)); | 8688 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, fold_convert (sizetype, u)); |
8629 | 8689 |
8630 gimplify_assign (addr, t, pre_p); | 8690 gimplify_assign (addr, t, pre_p); |
8631 | 8691 |
8636 | 8696 |
8637 /* ... Otherwise out of the overflow area. */ | 8697 /* ... Otherwise out of the overflow area. */ |
8638 | 8698 |
8639 t = ovf; | 8699 t = ovf; |
8640 if (size < UNITS_PER_WORD) | 8700 if (size < UNITS_PER_WORD) |
8641 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, | 8701 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, |
8642 size_int (UNITS_PER_WORD - size)); | 8702 size_int (UNITS_PER_WORD - size)); |
8643 | 8703 |
8644 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue); | 8704 gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue); |
8645 | 8705 |
8646 gimplify_assign (addr, t, pre_p); | 8706 gimplify_assign (addr, t, pre_p); |
8647 | 8707 |
8648 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, | 8708 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, |
8649 size_int (size)); | 8709 size_int (size)); |
8650 gimplify_assign (ovf, t, pre_p); | 8710 gimplify_assign (ovf, t, pre_p); |
8651 | 8711 |
8652 gimple_seq_add_stmt (pre_p, gimple_build_label (lab_over)); | 8712 gimple_seq_add_stmt (pre_p, gimple_build_label (lab_over)); |
8653 | 8713 |
8658 fold_convert (TREE_TYPE (reg), size_int (n_reg))); | 8718 fold_convert (TREE_TYPE (reg), size_int (n_reg))); |
8659 gimplify_and_add (u, pre_p); | 8719 gimplify_and_add (u, pre_p); |
8660 | 8720 |
8661 if (indirect_p) | 8721 if (indirect_p) |
8662 { | 8722 { |
8663 t = build_pointer_type (build_pointer_type (type)); | 8723 t = build_pointer_type_for_mode (build_pointer_type (type), |
8724 ptr_mode, true); | |
8664 addr = fold_convert (t, addr); | 8725 addr = fold_convert (t, addr); |
8665 addr = build_va_arg_indirect_ref (addr); | 8726 addr = build_va_arg_indirect_ref (addr); |
8666 } | 8727 } |
8667 else | 8728 else |
8668 { | 8729 { |
8669 t = build_pointer_type (type); | 8730 t = build_pointer_type_for_mode (type, ptr_mode, true); |
8670 addr = fold_convert (t, addr); | 8731 addr = fold_convert (t, addr); |
8671 } | 8732 } |
8672 | 8733 |
8673 return build_va_arg_indirect_ref (addr); | 8734 return build_va_arg_indirect_ref (addr); |
8674 } | 8735 } |
8682 S390_BUILTIN_SET_THREAD_POINTER, | 8743 S390_BUILTIN_SET_THREAD_POINTER, |
8683 | 8744 |
8684 S390_BUILTIN_max | 8745 S390_BUILTIN_max |
8685 }; | 8746 }; |
8686 | 8747 |
8687 static unsigned int const code_for_builtin_64[S390_BUILTIN_max] = { | 8748 static enum insn_code const code_for_builtin_64[S390_BUILTIN_max] = { |
8688 CODE_FOR_get_tp_64, | 8749 CODE_FOR_get_tp_64, |
8689 CODE_FOR_set_tp_64 | 8750 CODE_FOR_set_tp_64 |
8690 }; | 8751 }; |
8691 | 8752 |
8692 static unsigned int const code_for_builtin_31[S390_BUILTIN_max] = { | 8753 static enum insn_code const code_for_builtin_31[S390_BUILTIN_max] = { |
8693 CODE_FOR_get_tp_31, | 8754 CODE_FOR_get_tp_31, |
8694 CODE_FOR_set_tp_31 | 8755 CODE_FOR_set_tp_31 |
8695 }; | 8756 }; |
8696 | 8757 |
8697 static void | 8758 static void |
8721 enum machine_mode mode ATTRIBUTE_UNUSED, | 8782 enum machine_mode mode ATTRIBUTE_UNUSED, |
8722 int ignore ATTRIBUTE_UNUSED) | 8783 int ignore ATTRIBUTE_UNUSED) |
8723 { | 8784 { |
8724 #define MAX_ARGS 2 | 8785 #define MAX_ARGS 2 |
8725 | 8786 |
8726 unsigned int const *code_for_builtin = | 8787 enum insn_code const *code_for_builtin = |
8727 TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31; | 8788 TARGET_64BIT ? code_for_builtin_64 : code_for_builtin_31; |
8728 | 8789 |
8729 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0); | 8790 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0); |
8730 unsigned int fcode = DECL_FUNCTION_CODE (fndecl); | 8791 unsigned int fcode = DECL_FUNCTION_CODE (fndecl); |
8731 enum insn_code icode; | 8792 enum insn_code icode; |
8753 if (arity > MAX_ARGS) | 8814 if (arity > MAX_ARGS) |
8754 return NULL_RTX; | 8815 return NULL_RTX; |
8755 | 8816 |
8756 insn_op = &insn_data[icode].operand[arity + nonvoid]; | 8817 insn_op = &insn_data[icode].operand[arity + nonvoid]; |
8757 | 8818 |
8758 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, 0); | 8819 op[arity] = expand_expr (arg, NULL_RTX, insn_op->mode, EXPAND_NORMAL); |
8759 | 8820 |
8760 if (!(*insn_op->predicate) (op[arity], insn_op->mode)) | 8821 if (!(*insn_op->predicate) (op[arity], insn_op->mode)) |
8761 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]); | 8822 op[arity] = copy_to_mode_reg (insn_op->mode, op[arity]); |
8762 arity++; | 8823 arity++; |
8763 } | 8824 } |
8803 stdio stream FILE. | 8864 stdio stream FILE. |
8804 | 8865 |
8805 On S/390, we use gpr 1 internally in the trampoline code; | 8866 On S/390, we use gpr 1 internally in the trampoline code; |
8806 gpr 0 is used to hold the static chain. */ | 8867 gpr 0 is used to hold the static chain. */ |
8807 | 8868 |
8808 void | 8869 static void |
8809 s390_trampoline_template (FILE *file) | 8870 s390_asm_trampoline_template (FILE *file) |
8810 { | 8871 { |
8811 rtx op[2]; | 8872 rtx op[2]; |
8812 op[0] = gen_rtx_REG (Pmode, 0); | 8873 op[0] = gen_rtx_REG (Pmode, 0); |
8813 op[1] = gen_rtx_REG (Pmode, 1); | 8874 op[1] = gen_rtx_REG (Pmode, 1); |
8814 | 8875 |
8830 | 8891 |
8831 /* Emit RTL insns to initialize the variable parts of a trampoline. | 8892 /* Emit RTL insns to initialize the variable parts of a trampoline. |
8832 FNADDR is an RTX for the address of the function's pure code. | 8893 FNADDR is an RTX for the address of the function's pure code. |
8833 CXT is an RTX for the static chain value for the function. */ | 8894 CXT is an RTX for the static chain value for the function. */ |
8834 | 8895 |
8835 void | 8896 static void |
8836 s390_initialize_trampoline (rtx addr, rtx fnaddr, rtx cxt) | 8897 s390_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt) |
8837 { | 8898 { |
8838 emit_move_insn (gen_rtx_MEM (Pmode, | 8899 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0); |
8839 memory_address (Pmode, | 8900 rtx mem; |
8840 plus_constant (addr, (TARGET_64BIT ? 16 : 8)))), cxt); | 8901 |
8841 emit_move_insn (gen_rtx_MEM (Pmode, | 8902 emit_block_move (m_tramp, assemble_trampoline_template (), |
8842 memory_address (Pmode, | 8903 GEN_INT (2*UNITS_PER_WORD), BLOCK_OP_NORMAL); |
8843 plus_constant (addr, (TARGET_64BIT ? 24 : 12)))), fnaddr); | 8904 |
8905 mem = adjust_address (m_tramp, Pmode, 2*UNITS_PER_WORD); | |
8906 emit_move_insn (mem, cxt); | |
8907 mem = adjust_address (m_tramp, Pmode, 3*UNITS_PER_WORD); | |
8908 emit_move_insn (mem, fnaddr); | |
8844 } | 8909 } |
8845 | 8910 |
8846 /* Output assembler code to FILE to increment profiler label # LABELNO | 8911 /* Output assembler code to FILE to increment profiler label # LABELNO |
8847 for profiling a function entry. */ | 8912 for profiling a function entry. */ |
8848 | 8913 |
9300 static bool | 9365 static bool |
9301 s390_fixed_condition_code_regs (unsigned int *p1, unsigned int *p2) | 9366 s390_fixed_condition_code_regs (unsigned int *p1, unsigned int *p2) |
9302 { | 9367 { |
9303 *p1 = CC_REGNUM; | 9368 *p1 = CC_REGNUM; |
9304 *p2 = INVALID_REGNUM; | 9369 *p2 = INVALID_REGNUM; |
9305 | 9370 |
9306 return true; | 9371 return true; |
9307 } | 9372 } |
9308 | 9373 |
9309 /* This function is used by the call expanders of the machine description. | 9374 /* This function is used by the call expanders of the machine description. |
9310 It emits the call insn itself together with the necessary operations | 9375 It emits the call insn itself together with the necessary operations |
9466 s390_update_frame_layout (); | 9531 s390_update_frame_layout (); |
9467 | 9532 |
9468 /* If all special registers are in fact used, there's nothing we | 9533 /* If all special registers are in fact used, there's nothing we |
9469 can do, so no point in walking the insn list. */ | 9534 can do, so no point in walking the insn list. */ |
9470 | 9535 |
9471 if (cfun_frame_layout.first_save_gpr <= BASE_REGNUM | 9536 if (cfun_frame_layout.first_save_gpr <= BASE_REGNUM |
9472 && cfun_frame_layout.last_save_gpr >= BASE_REGNUM | 9537 && cfun_frame_layout.last_save_gpr >= BASE_REGNUM |
9473 && (TARGET_CPU_ZARCH | 9538 && (TARGET_CPU_ZARCH |
9474 || (cfun_frame_layout.first_save_gpr <= RETURN_REGNUM | 9539 || (cfun_frame_layout.first_save_gpr <= RETURN_REGNUM |
9475 && cfun_frame_layout.last_save_gpr >= RETURN_REGNUM))) | 9540 && cfun_frame_layout.last_save_gpr >= RETURN_REGNUM))) |
9476 return; | 9541 return; |
9477 | 9542 |
9478 /* Search for prologue/epilogue insns and replace them. */ | 9543 /* Search for prologue/epilogue insns and replace them. */ |
9479 | 9544 |
9509 if (first > BASE_REGNUM || last < BASE_REGNUM) | 9574 if (first > BASE_REGNUM || last < BASE_REGNUM) |
9510 continue; | 9575 continue; |
9511 | 9576 |
9512 if (cfun_frame_layout.first_save_gpr != -1) | 9577 if (cfun_frame_layout.first_save_gpr != -1) |
9513 { | 9578 { |
9514 new_insn = save_gprs (base, | 9579 new_insn = save_gprs (base, |
9515 off + (cfun_frame_layout.first_save_gpr | 9580 off + (cfun_frame_layout.first_save_gpr |
9516 - first) * UNITS_PER_WORD, | 9581 - first) * UNITS_PER_WORD, |
9517 cfun_frame_layout.first_save_gpr, | 9582 cfun_frame_layout.first_save_gpr, |
9518 cfun_frame_layout.last_save_gpr); | 9583 cfun_frame_layout.last_save_gpr); |
9519 new_insn = emit_insn_before (new_insn, insn); | 9584 new_insn = emit_insn_before (new_insn, insn); |
9520 INSN_ADDRESSES_NEW (new_insn, -1); | 9585 INSN_ADDRESSES_NEW (new_insn, -1); |
9521 } | 9586 } |
9570 if (first > BASE_REGNUM || last < BASE_REGNUM) | 9635 if (first > BASE_REGNUM || last < BASE_REGNUM) |
9571 continue; | 9636 continue; |
9572 | 9637 |
9573 if (cfun_frame_layout.first_restore_gpr != -1) | 9638 if (cfun_frame_layout.first_restore_gpr != -1) |
9574 { | 9639 { |
9575 new_insn = restore_gprs (base, | 9640 new_insn = restore_gprs (base, |
9576 off + (cfun_frame_layout.first_restore_gpr | 9641 off + (cfun_frame_layout.first_restore_gpr |
9577 - first) * UNITS_PER_WORD, | 9642 - first) * UNITS_PER_WORD, |
9578 cfun_frame_layout.first_restore_gpr, | 9643 cfun_frame_layout.first_restore_gpr, |
9579 cfun_frame_layout.last_restore_gpr); | 9644 cfun_frame_layout.last_restore_gpr); |
9580 new_insn = emit_insn_before (new_insn, insn); | 9645 new_insn = emit_insn_before (new_insn, insn); |
9581 INSN_ADDRESSES_NEW (new_insn, -1); | 9646 INSN_ADDRESSES_NEW (new_insn, -1); |
9582 } | 9647 } |
10121 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem | 10186 #define TARGET_CANNOT_FORCE_CONST_MEM s390_cannot_force_const_mem |
10122 | 10187 |
10123 #undef TARGET_DELEGITIMIZE_ADDRESS | 10188 #undef TARGET_DELEGITIMIZE_ADDRESS |
10124 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address | 10189 #define TARGET_DELEGITIMIZE_ADDRESS s390_delegitimize_address |
10125 | 10190 |
10191 #undef TARGET_LEGITIMIZE_ADDRESS | |
10192 #define TARGET_LEGITIMIZE_ADDRESS s390_legitimize_address | |
10193 | |
10126 #undef TARGET_RETURN_IN_MEMORY | 10194 #undef TARGET_RETURN_IN_MEMORY |
10127 #define TARGET_RETURN_IN_MEMORY s390_return_in_memory | 10195 #define TARGET_RETURN_IN_MEMORY s390_return_in_memory |
10128 | 10196 |
10129 #undef TARGET_INIT_BUILTINS | 10197 #undef TARGET_INIT_BUILTINS |
10130 #define TARGET_INIT_BUILTINS s390_init_builtins | 10198 #define TARGET_INIT_BUILTINS s390_init_builtins |
10166 #undef TARGET_EXPAND_BUILTIN_VA_START | 10234 #undef TARGET_EXPAND_BUILTIN_VA_START |
10167 #define TARGET_EXPAND_BUILTIN_VA_START s390_va_start | 10235 #define TARGET_EXPAND_BUILTIN_VA_START s390_va_start |
10168 #undef TARGET_GIMPLIFY_VA_ARG_EXPR | 10236 #undef TARGET_GIMPLIFY_VA_ARG_EXPR |
10169 #define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg | 10237 #define TARGET_GIMPLIFY_VA_ARG_EXPR s390_gimplify_va_arg |
10170 | 10238 |
10171 #undef TARGET_PROMOTE_FUNCTION_ARGS | 10239 #undef TARGET_PROMOTE_FUNCTION_MODE |
10172 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_const_tree_true | 10240 #define TARGET_PROMOTE_FUNCTION_MODE s390_promote_function_mode |
10173 #undef TARGET_PROMOTE_FUNCTION_RETURN | |
10174 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_const_tree_true | |
10175 #undef TARGET_PASS_BY_REFERENCE | 10241 #undef TARGET_PASS_BY_REFERENCE |
10176 #define TARGET_PASS_BY_REFERENCE s390_pass_by_reference | 10242 #define TARGET_PASS_BY_REFERENCE s390_pass_by_reference |
10177 | 10243 |
10178 #undef TARGET_FUNCTION_OK_FOR_SIBCALL | 10244 #undef TARGET_FUNCTION_OK_FOR_SIBCALL |
10179 #define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall | 10245 #define TARGET_FUNCTION_OK_FOR_SIBCALL s390_function_ok_for_sibcall |
10207 #define TARGET_LIBGCC_CMP_RETURN_MODE s390_libgcc_cmp_return_mode | 10273 #define TARGET_LIBGCC_CMP_RETURN_MODE s390_libgcc_cmp_return_mode |
10208 | 10274 |
10209 #undef TARGET_LIBGCC_SHIFT_COUNT_MODE | 10275 #undef TARGET_LIBGCC_SHIFT_COUNT_MODE |
10210 #define TARGET_LIBGCC_SHIFT_COUNT_MODE s390_libgcc_shift_count_mode | 10276 #define TARGET_LIBGCC_SHIFT_COUNT_MODE s390_libgcc_shift_count_mode |
10211 | 10277 |
10278 #undef TARGET_LEGITIMATE_ADDRESS_P | |
10279 #define TARGET_LEGITIMATE_ADDRESS_P s390_legitimate_address_p | |
10280 | |
10281 #undef TARGET_CAN_ELIMINATE | |
10282 #define TARGET_CAN_ELIMINATE s390_can_eliminate | |
10283 | |
10284 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE | |
10285 #define TARGET_ASM_TRAMPOLINE_TEMPLATE s390_asm_trampoline_template | |
10286 #undef TARGET_TRAMPOLINE_INIT | |
10287 #define TARGET_TRAMPOLINE_INIT s390_trampoline_init | |
10288 | |
10212 struct gcc_target targetm = TARGET_INITIALIZER; | 10289 struct gcc_target targetm = TARGET_INITIALIZER; |
10213 | 10290 |
10214 #include "gt-s390.h" | 10291 #include "gt-s390.h" |