comparison gcc/config/rx/rx.c @ 111:04ced10e8804

gcc 7
author kono
date Fri, 27 Oct 2017 22:46:09 +0900
parents f6334be47118
children 84e7813d76e9
comparison
equal deleted inserted replaced
68:561a7518be6b 111:04ced10e8804
1 /* Subroutines used for code generation on Renesas RX processors. 1 /* Subroutines used for code generation on Renesas RX processors.
2 Copyright (C) 2008, 2009, 2010, 2011 Free Software Foundation, Inc. 2 Copyright (C) 2008-2017 Free Software Foundation, Inc.
3 Contributed by Red Hat. 3 Contributed by Red Hat.
4 4
5 This file is part of GCC. 5 This file is part of GCC.
6 6
7 GCC is free software; you can redistribute it and/or modify 7 GCC is free software; you can redistribute it and/or modify
23 * Re-enable memory-to-memory copies and fix up reload. */ 23 * Re-enable memory-to-memory copies and fix up reload. */
24 24
25 #include "config.h" 25 #include "config.h"
26 #include "system.h" 26 #include "system.h"
27 #include "coretypes.h" 27 #include "coretypes.h"
28 #include "tm.h" 28 #include "backend.h"
29 #include "target.h"
30 #include "rtl.h"
29 #include "tree.h" 31 #include "tree.h"
30 #include "rtl.h" 32 #include "stringpool.h"
33 #include "attribs.h"
34 #include "cfghooks.h"
35 #include "df.h"
36 #include "memmodel.h"
37 #include "tm_p.h"
31 #include "regs.h" 38 #include "regs.h"
32 #include "hard-reg-set.h" 39 #include "emit-rtl.h"
33 #include "insn-config.h" 40 #include "diagnostic-core.h"
34 #include "conditions.h" 41 #include "varasm.h"
42 #include "stor-layout.h"
43 #include "calls.h"
35 #include "output.h" 44 #include "output.h"
36 #include "insn-attr.h"
37 #include "flags.h" 45 #include "flags.h"
38 #include "function.h" 46 #include "explow.h"
39 #include "expr.h" 47 #include "expr.h"
40 #include "optabs.h"
41 #include "libfuncs.h"
42 #include "recog.h"
43 #include "diagnostic-core.h"
44 #include "toplev.h" 48 #include "toplev.h"
45 #include "reload.h" 49 #include "langhooks.h"
46 #include "df.h" 50 #include "opts.h"
47 #include "ggc.h" 51 #include "builtins.h"
48 #include "tm_p.h" 52
49 #include "debug.h" 53 /* This file should be included last. */
50 #include "target.h"
51 #include "target-def.h" 54 #include "target-def.h"
52 #include "langhooks.h" 55
56 static unsigned int rx_gp_base_regnum_val = INVALID_REGNUM;
57 static unsigned int rx_pid_base_regnum_val = INVALID_REGNUM;
58 static unsigned int rx_num_interrupt_regs;
53 59
60 static unsigned int
61 rx_gp_base_regnum (void)
62 {
63 if (rx_gp_base_regnum_val == INVALID_REGNUM)
64 gcc_unreachable ();
65 return rx_gp_base_regnum_val;
66 }
67
68 static unsigned int
69 rx_pid_base_regnum (void)
70 {
71 if (rx_pid_base_regnum_val == INVALID_REGNUM)
72 gcc_unreachable ();
73 return rx_pid_base_regnum_val;
74 }
75
76 /* Find a SYMBOL_REF in a "standard" MEM address and return its decl. */
77
78 static tree
79 rx_decl_for_addr (rtx op)
80 {
81 if (GET_CODE (op) == MEM)
82 op = XEXP (op, 0);
83 if (GET_CODE (op) == CONST)
84 op = XEXP (op, 0);
85 while (GET_CODE (op) == PLUS)
86 op = XEXP (op, 0);
87 if (GET_CODE (op) == SYMBOL_REF)
88 return SYMBOL_REF_DECL (op);
89 return NULL_TREE;
90 }
91
54 static void rx_print_operand (FILE *, rtx, int); 92 static void rx_print_operand (FILE *, rtx, int);
55 93
56 #define CC_FLAG_S (1 << 0) 94 #define CC_FLAG_S (1 << 0)
57 #define CC_FLAG_Z (1 << 1) 95 #define CC_FLAG_Z (1 << 1)
58 #define CC_FLAG_O (1 << 2) 96 #define CC_FLAG_O (1 << 2)
59 #define CC_FLAG_C (1 << 3) 97 #define CC_FLAG_C (1 << 3)
60 #define CC_FLAG_FP (1 << 4) /* fake, to differentiate CC_Fmode */ 98 #define CC_FLAG_FP (1 << 4) /* Fake, to differentiate CC_Fmode. */
61 99
62 static unsigned int flags_from_mode (enum machine_mode mode); 100 static unsigned int flags_from_mode (machine_mode mode);
63 static unsigned int flags_from_code (enum rtx_code code); 101 static unsigned int flags_from_code (enum rtx_code code);
64
65 enum rx_cpu_types rx_cpu_type = RX600;
66 102
103 /* Return true if OP is a reference to an object in a PID data area. */
104
105 enum pid_type
106 {
107 PID_NOT_PID = 0, /* The object is not in the PID data area. */
108 PID_ENCODED, /* The object is in the PID data area. */
109 PID_UNENCODED /* The object will be placed in the PID data area, but it has not been placed there yet. */
110 };
111
112 static enum pid_type
113 rx_pid_data_operand (rtx op)
114 {
115 tree op_decl;
116
117 if (!TARGET_PID)
118 return PID_NOT_PID;
119
120 if (GET_CODE (op) == PLUS
121 && GET_CODE (XEXP (op, 0)) == REG
122 && GET_CODE (XEXP (op, 1)) == CONST
123 && GET_CODE (XEXP (XEXP (op, 1), 0)) == UNSPEC)
124 return PID_ENCODED;
125
126 op_decl = rx_decl_for_addr (op);
127
128 if (op_decl)
129 {
130 if (TREE_READONLY (op_decl))
131 return PID_UNENCODED;
132 }
133 else
134 {
135 /* Sigh, some special cases. */
136 if (GET_CODE (op) == SYMBOL_REF
137 || GET_CODE (op) == LABEL_REF)
138 return PID_UNENCODED;
139 }
140
141 return PID_NOT_PID;
142 }
143
144 static rtx
145 rx_legitimize_address (rtx x,
146 rtx oldx ATTRIBUTE_UNUSED,
147 machine_mode mode ATTRIBUTE_UNUSED)
148 {
149 if (rx_pid_data_operand (x) == PID_UNENCODED)
150 {
151 rtx rv = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), x);
152 return rv;
153 }
154
155 if (GET_CODE (x) == PLUS
156 && GET_CODE (XEXP (x, 0)) == PLUS
157 && REG_P (XEXP (XEXP (x, 0), 0))
158 && REG_P (XEXP (x, 1)))
159 return force_reg (SImode, x);
160
161 return x;
162 }
163
67 /* Return true if OP is a reference to an object in a small data area. */ 164 /* Return true if OP is a reference to an object in a small data area. */
68 165
69 static bool 166 static bool
70 rx_small_data_operand (rtx op) 167 rx_small_data_operand (rtx op)
71 { 168 {
77 174
78 return false; 175 return false;
79 } 176 }
80 177
81 static bool 178 static bool
82 rx_is_legitimate_address (Mmode mode, rtx x, bool strict ATTRIBUTE_UNUSED) 179 rx_is_legitimate_address (machine_mode mode, rtx x,
180 bool strict ATTRIBUTE_UNUSED)
83 { 181 {
84 if (RTX_OK_FOR_BASE (x, strict)) 182 if (RTX_OK_FOR_BASE (x, strict))
85 /* Register Indirect. */ 183 /* Register Indirect. */
86 return true; 184 return true;
87 185
88 if (GET_MODE_SIZE (mode) == 4 186 if ((GET_MODE_SIZE (mode) == 4
187 || GET_MODE_SIZE (mode) == 2
188 || GET_MODE_SIZE (mode) == 1)
89 && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)) 189 && (GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC))
90 /* Pre-decrement Register Indirect or 190 /* Pre-decrement Register Indirect or
91 Post-increment Register Indirect. */ 191 Post-increment Register Indirect. */
92 return RTX_OK_FOR_BASE (XEXP (x, 0), strict); 192 return RTX_OK_FOR_BASE (XEXP (x, 0), strict);
193
194 switch (rx_pid_data_operand (x))
195 {
196 case PID_UNENCODED:
197 return false;
198 case PID_ENCODED:
199 return true;
200 default:
201 break;
202 }
93 203
94 if (GET_CODE (x) == PLUS) 204 if (GET_CODE (x) == PLUS)
95 { 205 {
96 rtx arg1 = XEXP (x, 0); 206 rtx arg1 = XEXP (x, 0);
97 rtx arg2 = XEXP (x, 1); 207 rtx arg2 = XEXP (x, 1);
114 HOST_WIDE_INT val = INTVAL (index); 224 HOST_WIDE_INT val = INTVAL (index);
115 int factor; 225 int factor;
116 226
117 if (val < 0) 227 if (val < 0)
118 return false; 228 return false;
119 229
120 switch (GET_MODE_SIZE (mode)) 230 switch (GET_MODE_SIZE (mode))
121 { 231 {
122 default: 232 default:
123 case 4: factor = 4; break; 233 case 4: factor = 4; break;
124 case 2: factor = 2; break; 234 case 2: factor = 2; break;
157 267
158 /* Small data area accesses turn into register relative offsets. */ 268 /* Small data area accesses turn into register relative offsets. */
159 return rx_small_data_operand (x); 269 return rx_small_data_operand (x);
160 } 270 }
161 271
162 /* Returns TRUE for simple memory addreses, ie ones 272 /* Returns TRUE for simple memory addresses, ie ones
163 that do not involve register indirect addressing 273 that do not involve register indirect addressing
164 or pre/post increment/decrement. */ 274 or pre/post increment/decrement. */
165 275
166 bool 276 bool
167 rx_is_restricted_memory_address (rtx mem, enum machine_mode mode) 277 rx_is_restricted_memory_address (rtx mem, machine_mode mode)
168 { 278 {
169 rtx base, index;
170
171 if (! rx_is_legitimate_address 279 if (! rx_is_legitimate_address
172 (mode, mem, reload_in_progress || reload_completed)) 280 (mode, mem, reload_in_progress || reload_completed))
173 return false; 281 return false;
174 282
175 switch (GET_CODE (mem)) 283 switch (GET_CODE (mem))
181 case PRE_DEC: 289 case PRE_DEC:
182 case POST_INC: 290 case POST_INC:
183 return false; 291 return false;
184 292
185 case PLUS: 293 case PLUS:
186 /* Only allow REG+INT addressing. */ 294 {
187 base = XEXP (mem, 0); 295 rtx base, index;
188 index = XEXP (mem, 1); 296
189 297 /* Only allow REG+INT addressing. */
190 return RX_REG_P (base) && CONST_INT_P (index); 298 base = XEXP (mem, 0);
299 index = XEXP (mem, 1);
300
301 if (! RX_REG_P (base) || ! CONST_INT_P (index))
302 return false;
303
304 return IN_RANGE (INTVAL (index), 0, (0x10000 * GET_MODE_SIZE (mode)) - 1);
305 }
191 306
192 case SYMBOL_REF: 307 case SYMBOL_REF:
193 /* Can happen when small data is being supported. 308 /* Can happen when small data is being supported.
194 Assume that it will be resolved into GP+INT. */ 309 Assume that it will be resolved into GP+INT. */
195 return true; 310 return true;
200 } 315 }
201 316
202 /* Implement TARGET_MODE_DEPENDENT_ADDRESS_P. */ 317 /* Implement TARGET_MODE_DEPENDENT_ADDRESS_P. */
203 318
204 static bool 319 static bool
205 rx_mode_dependent_address_p (const_rtx addr) 320 rx_mode_dependent_address_p (const_rtx addr, addr_space_t as ATTRIBUTE_UNUSED)
206 { 321 {
207 if (GET_CODE (addr) == CONST) 322 if (GET_CODE (addr) == CONST)
208 addr = XEXP (addr, 0); 323 addr = XEXP (addr, 0);
209 324
210 switch (GET_CODE (addr)) 325 switch (GET_CODE (addr))
227 /* REG+REG only works in SImode. */ 342 /* REG+REG only works in SImode. */
228 return true; 343 return true;
229 344
230 case CONST_INT: 345 case CONST_INT:
231 /* REG+INT is only mode independent if INT is a 346 /* REG+INT is only mode independent if INT is a
232 multiple of 4, positive and will fit into 8-bits. */ 347 multiple of 4, positive and will fit into 16-bits. */
233 if (((INTVAL (addr) & 3) == 0) 348 if (((INTVAL (addr) & 3) == 0)
234 && IN_RANGE (INTVAL (addr), 4, 252)) 349 && IN_RANGE (INTVAL (addr), 4, 0xfffc))
235 return false; 350 return false;
236 return true; 351 return true;
237 352
238 case SYMBOL_REF: 353 case SYMBOL_REF:
239 case LABEL_REF: 354 case LABEL_REF:
240 return true; 355 return true;
241 356
242 case MULT: 357 case MULT:
243 gcc_assert (REG_P (XEXP (addr, 0)));
244 gcc_assert (CONST_INT_P (XEXP (addr, 1)));
245 /* REG+REG*SCALE is always mode dependent. */ 358 /* REG+REG*SCALE is always mode dependent. */
246 return true; 359 return true;
247 360
248 default: 361 default:
249 /* Not recognized, so treat as mode dependent. */ 362 /* Not recognized, so treat as mode dependent. */
267 /* A C compound statement to output to stdio stream FILE the 380 /* A C compound statement to output to stdio stream FILE the
268 assembler syntax for an instruction operand that is a memory 381 assembler syntax for an instruction operand that is a memory
269 reference whose address is ADDR. */ 382 reference whose address is ADDR. */
270 383
271 static void 384 static void
272 rx_print_operand_address (FILE * file, rtx addr) 385 rx_print_operand_address (FILE * file, machine_mode /*mode*/, rtx addr)
273 { 386 {
274 switch (GET_CODE (addr)) 387 switch (GET_CODE (addr))
275 { 388 {
276 case REG: 389 case REG:
277 fprintf (file, "["); 390 fprintf (file, "[");
328 case CONST: 441 case CONST:
329 if (GET_CODE (XEXP (addr, 0)) == UNSPEC) 442 if (GET_CODE (XEXP (addr, 0)) == UNSPEC)
330 { 443 {
331 addr = XEXP (addr, 0); 444 addr = XEXP (addr, 0);
332 gcc_assert (XINT (addr, 1) == UNSPEC_CONST); 445 gcc_assert (XINT (addr, 1) == UNSPEC_CONST);
333 446
334 addr = XVECEXP (addr, 0, 0); 447 addr = XVECEXP (addr, 0, 0);
335 gcc_assert (CONST_INT_P (addr)); 448 gcc_assert (CONST_INT_P (addr));
449 fprintf (file, "#");
450 output_addr_const (file, addr);
451 break;
336 } 452 }
453 fprintf (file, "#");
454 output_addr_const (file, XEXP (addr, 0));
455 break;
456
457 case UNSPEC:
458 addr = XVECEXP (addr, 0, 0);
337 /* Fall through. */ 459 /* Fall through. */
338 case LABEL_REF: 460 case LABEL_REF:
339 case SYMBOL_REF: 461 case SYMBOL_REF:
340 fprintf (file, "#"); 462 fprintf (file, "#");
341 463 /* Fall through. */
342 default: 464 default:
343 output_addr_const (file, addr); 465 output_addr_const (file, addr);
344 break; 466 break;
345 } 467 }
346 } 468 }
347 469
348 static void 470 static void
349 rx_print_integer (FILE * file, HOST_WIDE_INT val) 471 rx_print_integer (FILE * file, HOST_WIDE_INT val)
350 { 472 {
351 if (IN_RANGE (val, -64, 64)) 473 if (val < 64)
352 fprintf (file, HOST_WIDE_INT_PRINT_DEC, val); 474 fprintf (file, HOST_WIDE_INT_PRINT_DEC, val);
353 else 475 else
354 fprintf (file, 476 fprintf (file,
355 TARGET_AS100_SYNTAX 477 TARGET_AS100_SYNTAX
356 ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX, 478 ? "0%" HOST_WIDE_INT_PRINT "xH" : HOST_WIDE_INT_PRINT_HEX,
380 502
381 %A Print an operand without a leading # character. 503 %A Print an operand without a leading # character.
382 %B Print an integer comparison name. 504 %B Print an integer comparison name.
383 %C Print a control register name. 505 %C Print a control register name.
384 %F Print a condition code flag name. 506 %F Print a condition code flag name.
507 %G Register used for small-data-area addressing
385 %H Print high part of a DImode register, integer or address. 508 %H Print high part of a DImode register, integer or address.
386 %L Print low part of a DImode register, integer or address. 509 %L Print low part of a DImode register, integer or address.
387 %N Print the negation of the immediate value. 510 %N Print the negation of the immediate value.
511 %P Register used for PID addressing
388 %Q If the operand is a MEM, then correctly generate 512 %Q If the operand is a MEM, then correctly generate
389 register indirect or register relative addressing. */ 513 register indirect or register relative addressing.
514 %R Like %Q but for zero-extending loads. */
390 515
391 static void 516 static void
392 rx_print_operand (FILE * file, rtx op, int letter) 517 rx_print_operand (FILE * file, rtx op, int letter)
393 { 518 {
519 bool unsigned_load = false;
520 bool print_hash = true;
521
522 if (letter == 'A'
523 && ((GET_CODE (op) == CONST
524 && GET_CODE (XEXP (op, 0)) == UNSPEC)
525 || GET_CODE (op) == UNSPEC))
526 {
527 print_hash = false;
528 letter = 0;
529 }
530
394 switch (letter) 531 switch (letter)
395 { 532 {
396 case 'A': 533 case 'A':
397 /* Print an operand without a leading #. */ 534 /* Print an operand without a leading #. */
398 if (MEM_P (op)) 535 if (MEM_P (op))
414 break; 551 break;
415 552
416 case 'B': 553 case 'B':
417 { 554 {
418 enum rtx_code code = GET_CODE (op); 555 enum rtx_code code = GET_CODE (op);
419 enum machine_mode mode = GET_MODE (XEXP (op, 0)); 556 machine_mode mode = GET_MODE (XEXP (op, 0));
420 const char *ret; 557 const char *ret;
421 558
422 if (mode == CC_Fmode) 559 if (mode == CC_Fmode)
423 { 560 {
424 /* C flag is undefined, and O flag carries unordered. None of the 561 /* C flag is undefined, and O flag carries unordered. None of the
448 } 585 }
449 } 586 }
450 else 587 else
451 { 588 {
452 unsigned int flags = flags_from_mode (mode); 589 unsigned int flags = flags_from_mode (mode);
590
453 switch (code) 591 switch (code)
454 { 592 {
455 case LT: 593 case LT:
456 ret = (flags & CC_FLAG_O ? "lt" : "n"); 594 ret = (flags & CC_FLAG_O ? "lt" : "n");
457 break; 595 break;
493 631
494 case 'C': 632 case 'C':
495 gcc_assert (CONST_INT_P (op)); 633 gcc_assert (CONST_INT_P (op));
496 switch (INTVAL (op)) 634 switch (INTVAL (op))
497 { 635 {
498 case 0: fprintf (file, "psw"); break; 636 case CTRLREG_PSW: fprintf (file, "psw"); break;
499 case 2: fprintf (file, "usp"); break; 637 case CTRLREG_USP: fprintf (file, "usp"); break;
500 case 3: fprintf (file, "fpsw"); break; 638 case CTRLREG_FPSW: fprintf (file, "fpsw"); break;
501 case 4: fprintf (file, "cpen"); break; 639 case CTRLREG_CPEN: fprintf (file, "cpen"); break;
502 case 8: fprintf (file, "bpsw"); break; 640 case CTRLREG_BPSW: fprintf (file, "bpsw"); break;
503 case 9: fprintf (file, "bpc"); break; 641 case CTRLREG_BPC: fprintf (file, "bpc"); break;
504 case 0xa: fprintf (file, "isp"); break; 642 case CTRLREG_ISP: fprintf (file, "isp"); break;
505 case 0xb: fprintf (file, "fintv"); break; 643 case CTRLREG_FINTV: fprintf (file, "fintv"); break;
506 case 0xc: fprintf (file, "intb"); break; 644 case CTRLREG_INTB: fprintf (file, "intb"); break;
507 default: 645 default:
508 warning (0, "unreocgnized control register number: %d - using 'psw'", 646 warning (0, "unrecognized control register number: %d - using 'psw'",
509 (int) INTVAL (op)); 647 (int) INTVAL (op));
510 fprintf (file, "psw"); 648 fprintf (file, "psw");
511 break; 649 break;
512 } 650 }
513 break; 651 break;
525 default: 663 default:
526 gcc_unreachable (); 664 gcc_unreachable ();
527 } 665 }
528 break; 666 break;
529 667
668 case 'G':
669 fprintf (file, "%s", reg_names [rx_gp_base_regnum ()]);
670 break;
671
530 case 'H': 672 case 'H':
531 switch (GET_CODE (op)) 673 switch (GET_CODE (op))
532 { 674 {
533 case REG: 675 case REG:
534 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]); 676 fprintf (file, "%s", reg_names [REGNO (op) + (WORDS_BIG_ENDIAN ? 0 : 1)]);
549 rx_print_integer (file, CONST_DOUBLE_HIGH (op)); 691 rx_print_integer (file, CONST_DOUBLE_HIGH (op));
550 break; 692 break;
551 case MEM: 693 case MEM:
552 if (! WORDS_BIG_ENDIAN) 694 if (! WORDS_BIG_ENDIAN)
553 op = adjust_address (op, SImode, 4); 695 op = adjust_address (op, SImode, 4);
554 output_address (XEXP (op, 0)); 696 output_address (GET_MODE (op), XEXP (op, 0));
555 break; 697 break;
556 default: 698 default:
557 gcc_unreachable (); 699 gcc_unreachable ();
558 } 700 }
559 break; 701 break;
573 rx_print_integer (file, CONST_DOUBLE_LOW (op)); 715 rx_print_integer (file, CONST_DOUBLE_LOW (op));
574 break; 716 break;
575 case MEM: 717 case MEM:
576 if (WORDS_BIG_ENDIAN) 718 if (WORDS_BIG_ENDIAN)
577 op = adjust_address (op, SImode, 4); 719 op = adjust_address (op, SImode, 4);
578 output_address (XEXP (op, 0)); 720 output_address (GET_MODE (op), XEXP (op, 0));
579 break; 721 break;
580 default: 722 default:
581 gcc_unreachable (); 723 gcc_unreachable ();
582 } 724 }
583 break; 725 break;
586 gcc_assert (CONST_INT_P (op)); 728 gcc_assert (CONST_INT_P (op));
587 fprintf (file, "#"); 729 fprintf (file, "#");
588 rx_print_integer (file, - INTVAL (op)); 730 rx_print_integer (file, - INTVAL (op));
589 break; 731 break;
590 732
733 case 'P':
734 fprintf (file, "%s", reg_names [rx_pid_base_regnum ()]);
735 break;
736
737 case 'R':
738 gcc_assert (GET_MODE_SIZE (GET_MODE (op)) <= 4);
739 unsigned_load = true;
740 /* Fall through. */
591 case 'Q': 741 case 'Q':
592 if (MEM_P (op)) 742 if (MEM_P (op))
593 { 743 {
594 HOST_WIDE_INT offset; 744 HOST_WIDE_INT offset;
745 rtx mem = op;
595 746
596 op = XEXP (op, 0); 747 op = XEXP (op, 0);
597 748
598 if (REG_P (op)) 749 if (REG_P (op))
599 offset = 0; 750 offset = 0;
624 775
625 fprintf (file, "["); 776 fprintf (file, "[");
626 rx_print_operand (file, op, 0); 777 rx_print_operand (file, op, 0);
627 fprintf (file, "]."); 778 fprintf (file, "].");
628 779
629 switch (GET_MODE_SIZE (GET_MODE (op))) 780 switch (GET_MODE_SIZE (GET_MODE (mem)))
630 { 781 {
631 case 1: 782 case 1:
632 gcc_assert (offset < 65535 * 1); 783 gcc_assert (offset <= 65535 * 1);
633 fprintf (file, "B"); 784 fprintf (file, unsigned_load ? "UB" : "B");
634 break; 785 break;
635 case 2: 786 case 2:
636 gcc_assert (offset % 2 == 0); 787 gcc_assert (offset % 2 == 0);
637 gcc_assert (offset < 65535 * 2); 788 gcc_assert (offset <= 65535 * 2);
638 fprintf (file, "W"); 789 fprintf (file, unsigned_load ? "UW" : "W");
790 break;
791 case 4:
792 gcc_assert (offset % 4 == 0);
793 gcc_assert (offset <= 65535 * 4);
794 fprintf (file, "L");
639 break; 795 break;
640 default: 796 default:
641 gcc_assert (offset % 4 == 0); 797 gcc_unreachable ();
642 gcc_assert (offset < 65535 * 4);
643 fprintf (file, "L");
644 break;
645 } 798 }
646 break; 799 break;
647 } 800 }
648 801
649 /* Fall through. */ 802 /* Fall through. */
650 803
651 default: 804 default:
805 if (GET_CODE (op) == CONST
806 && GET_CODE (XEXP (op, 0)) == UNSPEC)
807 op = XEXP (op, 0);
808 else if (GET_CODE (op) == CONST
809 && GET_CODE (XEXP (op, 0)) == PLUS
810 && GET_CODE (XEXP (XEXP (op, 0), 0)) == UNSPEC
811 && GET_CODE (XEXP (XEXP (op, 0), 1)) == CONST_INT)
812 {
813 if (print_hash)
814 fprintf (file, "#");
815 fprintf (file, "(");
816 rx_print_operand (file, XEXP (XEXP (op, 0), 0), 'A');
817 fprintf (file, " + ");
818 output_addr_const (file, XEXP (XEXP (op, 0), 1));
819 fprintf (file, ")");
820 return;
821 }
822
652 switch (GET_CODE (op)) 823 switch (GET_CODE (op))
653 { 824 {
654 case MULT: 825 case MULT:
655 /* Should be the scaled part of an 826 /* Should be the scaled part of an
656 indexed register indirect address. */ 827 indexed register indirect address. */
676 fprintf (file, "%s", reg_names [REGNO (base)]); 847 fprintf (file, "%s", reg_names [REGNO (base)]);
677 break; 848 break;
678 } 849 }
679 850
680 case MEM: 851 case MEM:
681 output_address (XEXP (op, 0)); 852 output_address (GET_MODE (op), XEXP (op, 0));
682 break; 853 break;
683 854
684 case PLUS: 855 case PLUS:
685 output_address (op); 856 output_address (VOIDmode, op);
686 break; 857 break;
687 858
688 case REG: 859 case REG:
689 gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER); 860 gcc_assert (REGNO (op) < FIRST_PSEUDO_REGISTER);
690 fprintf (file, "%s", reg_names [REGNO (op)]); 861 fprintf (file, "%s", reg_names [REGNO (op)]);
697 868
698 /* This will only be single precision.... */ 869 /* This will only be single precision.... */
699 case CONST_DOUBLE: 870 case CONST_DOUBLE:
700 { 871 {
701 unsigned long val; 872 unsigned long val;
702 REAL_VALUE_TYPE rv; 873
703 874 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (op), val);
704 REAL_VALUE_FROM_CONST_DOUBLE (rv, op); 875 if (print_hash)
705 REAL_VALUE_TO_TARGET_SINGLE (rv, val); 876 fprintf (file, "#");
706 fprintf (file, TARGET_AS100_SYNTAX ? "#0%lxH" : "#0x%lx", val); 877 fprintf (file, TARGET_AS100_SYNTAX ? "0%lxH" : "0x%lx", val);
707 break; 878 break;
708 } 879 }
709 880
710 case CONST_INT: 881 case CONST_INT:
711 fprintf (file, "#"); 882 if (print_hash)
883 fprintf (file, "#");
712 rx_print_integer (file, INTVAL (op)); 884 rx_print_integer (file, INTVAL (op));
713 break; 885 break;
714 886
887 case UNSPEC:
888 switch (XINT (op, 1))
889 {
890 case UNSPEC_PID_ADDR:
891 {
892 rtx sym, add;
893
894 if (print_hash)
895 fprintf (file, "#");
896 sym = XVECEXP (op, 0, 0);
897 add = NULL_RTX;
898 fprintf (file, "(");
899 if (GET_CODE (sym) == PLUS)
900 {
901 add = XEXP (sym, 1);
902 sym = XEXP (sym, 0);
903 }
904 output_addr_const (file, sym);
905 if (add != NULL_RTX)
906 {
907 fprintf (file, "+");
908 output_addr_const (file, add);
909 }
910 fprintf (file, "-__pid_base");
911 fprintf (file, ")");
912 return;
913 }
914 }
915 /* Fall through */
916
917 case CONST:
715 case SYMBOL_REF: 918 case SYMBOL_REF:
716 case CONST:
717 case LABEL_REF: 919 case LABEL_REF:
718 case CODE_LABEL: 920 case CODE_LABEL:
719 case UNSPEC: 921 rx_print_operand_address (file, VOIDmode, op);
720 rx_print_operand_address (file, op);
721 break; 922 break;
722 923
723 default: 924 default:
724 gcc_unreachable (); 925 gcc_unreachable ();
725 } 926 }
726 break; 927 break;
727 } 928 }
929 }
930
931 /* Maybe convert an operand into its PID format. */
932
933 rtx
934 rx_maybe_pidify_operand (rtx op, int copy_to_reg)
935 {
936 if (rx_pid_data_operand (op) == PID_UNENCODED)
937 {
938 if (GET_CODE (op) == MEM)
939 {
940 rtx a = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), XEXP (op, 0));
941 op = replace_equiv_address (op, a);
942 }
943 else
944 {
945 op = gen_pid_addr (gen_rtx_REG (SImode, rx_pid_base_regnum ()), op);
946 }
947
948 if (copy_to_reg)
949 op = copy_to_mode_reg (GET_MODE (op), op);
950 }
951 return op;
728 } 952 }
729 953
730 /* Returns an assembler template for a move instruction. */ 954 /* Returns an assembler template for a move instruction. */
731 955
732 char * 956 char *
740 rtx src = operands[1]; 964 rtx src = operands[1];
741 965
742 /* Decide which extension, if any, should be given to the move instruction. */ 966 /* Decide which extension, if any, should be given to the move instruction. */
743 switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src)) 967 switch (CONST_INT_P (src) ? GET_MODE (dest) : GET_MODE (src))
744 { 968 {
745 case QImode: 969 case E_QImode:
746 /* The .B extension is not valid when 970 /* The .B extension is not valid when
747 loading an immediate into a register. */ 971 loading an immediate into a register. */
748 if (! REG_P (dest) || ! CONST_INT_P (src)) 972 if (! REG_P (dest) || ! CONST_INT_P (src))
749 extension = ".B"; 973 extension = ".B";
750 break; 974 break;
751 case HImode: 975 case E_HImode:
752 if (! REG_P (dest) || ! CONST_INT_P (src)) 976 if (! REG_P (dest) || ! CONST_INT_P (src))
753 /* The .W extension is not valid when 977 /* The .W extension is not valid when
754 loading an immediate into a register. */ 978 loading an immediate into a register. */
755 extension = ".W"; 979 extension = ".W";
756 break; 980 break;
757 case SFmode: 981 case E_DFmode:
758 case SImode: 982 case E_DImode:
983 case E_SFmode:
984 case E_SImode:
759 extension = ".L"; 985 extension = ".L";
760 break; 986 break;
761 case VOIDmode: 987 case E_VOIDmode:
762 /* This mode is used by constants. */ 988 /* This mode is used by constants. */
763 break; 989 break;
764 default: 990 default:
765 debug_rtx (src); 991 debug_rtx (src);
766 gcc_unreachable (); 992 gcc_unreachable ();
767 } 993 }
768 994
769 if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0))) 995 if (MEM_P (src) && rx_pid_data_operand (XEXP (src, 0)) == PID_UNENCODED)
770 src_template = "%%gp(%A1)[r13]"; 996 {
997 gcc_assert (GET_MODE (src) != DImode);
998 gcc_assert (GET_MODE (src) != DFmode);
999
1000 src_template = "(%A1 - __pid_base)[%P1]";
1001 }
1002 else if (MEM_P (src) && rx_small_data_operand (XEXP (src, 0)))
1003 {
1004 gcc_assert (GET_MODE (src) != DImode);
1005 gcc_assert (GET_MODE (src) != DFmode);
1006
1007 src_template = "%%gp(%A1)[%G1]";
1008 }
771 else 1009 else
772 src_template = "%1"; 1010 src_template = "%1";
773 1011
774 if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0))) 1012 if (MEM_P (dest) && rx_small_data_operand (XEXP (dest, 0)))
775 dst_template = "%%gp(%A0)[r13]"; 1013 {
1014 gcc_assert (GET_MODE (dest) != DImode);
1015 gcc_assert (GET_MODE (dest) != DFmode);
1016
1017 dst_template = "%%gp(%A0)[%G0]";
1018 }
776 else 1019 else
777 dst_template = "%0"; 1020 dst_template = "%0";
778 1021
779 sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov", 1022 if (GET_MODE (dest) == DImode || GET_MODE (dest) == DFmode)
780 extension, src_template, dst_template); 1023 {
1024 gcc_assert (! is_movu);
1025
1026 if (REG_P (src) && REG_P (dest) && (REGNO (dest) == REGNO (src) + 1))
1027 sprintf (out_template, "mov.L\t%%H1, %%H0 ! mov.L\t%%1, %%0");
1028 else
1029 sprintf (out_template, "mov.L\t%%1, %%0 ! mov.L\t%%H1, %%H0");
1030 }
1031 else
1032 sprintf (out_template, "%s%s\t%s, %s", is_movu ? "movu" : "mov",
1033 extension, src_template, dst_template);
781 return out_template; 1034 return out_template;
782 } 1035 }
783 1036
784 /* Return VALUE rounded up to the next ALIGNMENT boundary. */ 1037 /* Return VALUE rounded up to the next ALIGNMENT boundary. */
785 1038
792 1045
793 /* Return the number of bytes in the argument registers 1046 /* Return the number of bytes in the argument registers
794 occupied by an argument of type TYPE and mode MODE. */ 1047 occupied by an argument of type TYPE and mode MODE. */
795 1048
796 static unsigned int 1049 static unsigned int
797 rx_function_arg_size (Mmode mode, const_tree type) 1050 rx_function_arg_size (machine_mode mode, const_tree type)
798 { 1051 {
799 unsigned int num_bytes; 1052 unsigned int num_bytes;
800 1053
801 num_bytes = (mode == BLKmode) 1054 num_bytes = (mode == BLKmode)
802 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode); 1055 ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
812 function and NAMED is false if the parameter is part of a variable 1065 function and NAMED is false if the parameter is part of a variable
813 parameter list, or the last named parameter before the start of a 1066 parameter list, or the last named parameter before the start of a
814 variable parameter list. */ 1067 variable parameter list. */
815 1068
816 static rtx 1069 static rtx
817 rx_function_arg (Fargs * cum, Mmode mode, const_tree type, bool named) 1070 rx_function_arg (cumulative_args_t cum, machine_mode mode,
1071 const_tree type, bool named)
818 { 1072 {
819 unsigned int next_reg; 1073 unsigned int next_reg;
820 unsigned int bytes_so_far = *cum; 1074 unsigned int bytes_so_far = *get_cumulative_args (cum);
821 unsigned int size; 1075 unsigned int size;
822 unsigned int rounded_size; 1076 unsigned int rounded_size;
823 1077
824 /* An exploded version of rx_function_arg_size. */ 1078 /* An exploded version of rx_function_arg_size. */
825 size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode); 1079 size = (mode == BLKmode) ? int_size_in_bytes (type) : GET_MODE_SIZE (mode);
849 1103
850 return gen_rtx_REG (mode, next_reg); 1104 return gen_rtx_REG (mode, next_reg);
851 } 1105 }
852 1106
853 static void 1107 static void
854 rx_function_arg_advance (Fargs * cum, Mmode mode, const_tree type, 1108 rx_function_arg_advance (cumulative_args_t cum, machine_mode mode,
855 bool named ATTRIBUTE_UNUSED) 1109 const_tree type, bool named ATTRIBUTE_UNUSED)
856 { 1110 {
857 *cum += rx_function_arg_size (mode, type); 1111 *get_cumulative_args (cum) += rx_function_arg_size (mode, type);
858 } 1112 }
859 1113
860 static unsigned int 1114 static unsigned int
861 rx_function_arg_boundary (Mmode mode ATTRIBUTE_UNUSED, 1115 rx_function_arg_boundary (machine_mode mode ATTRIBUTE_UNUSED,
862 const_tree type ATTRIBUTE_UNUSED) 1116 const_tree type ATTRIBUTE_UNUSED)
863 { 1117 {
864 return 32; 1118 /* Older versions of the RX backend aligned all on-stack arguments
1119 to 32-bits. The RX C ABI however says that they should be
1120 aligned to their natural alignment. (See section 5.2.2 of the ABI). */
1121 if (TARGET_GCC_ABI)
1122 return STACK_BOUNDARY;
1123
1124 if (type)
1125 {
1126 if (DECL_P (type))
1127 return DECL_ALIGN (type);
1128 return TYPE_ALIGN (type);
1129 }
1130
1131 return PARM_BOUNDARY;
865 } 1132 }
866 1133
867 /* Return an RTL describing where a function return value of type RET_TYPE 1134 /* Return an RTL describing where a function return value of type RET_TYPE
868 is held. */ 1135 is held. */
869 1136
870 static rtx 1137 static rtx
871 rx_function_value (const_tree ret_type, 1138 rx_function_value (const_tree ret_type,
872 const_tree fn_decl_or_type ATTRIBUTE_UNUSED, 1139 const_tree fn_decl_or_type ATTRIBUTE_UNUSED,
873 bool outgoing ATTRIBUTE_UNUSED) 1140 bool outgoing ATTRIBUTE_UNUSED)
874 { 1141 {
875 enum machine_mode mode = TYPE_MODE (ret_type); 1142 machine_mode mode = TYPE_MODE (ret_type);
876 1143
877 /* RX ABI specifies that small integer types are 1144 /* RX ABI specifies that small integer types are
878 promoted to int when returned by a function. */ 1145 promoted to int when returned by a function. */
879 if (GET_MODE_SIZE (mode) > 0 1146 if (GET_MODE_SIZE (mode) > 0
880 && GET_MODE_SIZE (mode) < 4 1147 && GET_MODE_SIZE (mode) < 4
881 && ! COMPLEX_MODE_P (mode) 1148 && ! COMPLEX_MODE_P (mode)
1149 && ! VECTOR_TYPE_P (ret_type)
1150 && ! VECTOR_MODE_P (mode)
882 ) 1151 )
883 return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM); 1152 return gen_rtx_REG (SImode, FUNC_RETURN_REGNUM);
884 1153
885 return gen_rtx_REG (mode, FUNC_RETURN_REGNUM); 1154 return gen_rtx_REG (mode, FUNC_RETURN_REGNUM);
886 } 1155 }
887 1156
888 /* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with 1157 /* TARGET_PROMOTE_FUNCTION_MODE must behave in the same way with
889 regard to function returns as does TARGET_FUNCTION_VALUE. */ 1158 regard to function returns as does TARGET_FUNCTION_VALUE. */
890 1159
891 static enum machine_mode 1160 static machine_mode
892 rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED, 1161 rx_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
893 enum machine_mode mode, 1162 machine_mode mode,
894 int * punsignedp ATTRIBUTE_UNUSED, 1163 int * punsignedp ATTRIBUTE_UNUSED,
895 const_tree funtype ATTRIBUTE_UNUSED, 1164 const_tree funtype ATTRIBUTE_UNUSED,
896 int for_return) 1165 int for_return)
897 { 1166 {
898 if (for_return != 1 1167 if (for_return != 1
899 || GET_MODE_SIZE (mode) >= 4 1168 || GET_MODE_SIZE (mode) >= 4
900 || COMPLEX_MODE_P (mode) 1169 || COMPLEX_MODE_P (mode)
1170 || VECTOR_MODE_P (mode)
1171 || VECTOR_TYPE_P (type)
901 || GET_MODE_SIZE (mode) < 1) 1172 || GET_MODE_SIZE (mode) < 1)
902 return mode; 1173 return mode;
903 1174
904 return SImode; 1175 return SImode;
905 } 1176 }
946 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE; 1217 return lookup_attribute (func_attr, DECL_ATTRIBUTES (decl)) != NULL_TREE;
947 } 1218 }
948 1219
949 /* Returns true if the provided function has the "fast_interrupt" attribute. */ 1220 /* Returns true if the provided function has the "fast_interrupt" attribute. */
950 1221
951 static inline bool 1222 bool
952 is_fast_interrupt_func (const_tree decl) 1223 is_fast_interrupt_func (const_tree decl)
953 { 1224 {
954 return has_func_attr (decl, "fast_interrupt"); 1225 return has_func_attr (decl, "fast_interrupt");
955 } 1226 }
956 1227
957 /* Returns true if the provided function has the "interrupt" attribute. */ 1228 /* Returns true if the provided function has the "interrupt" attribute. */
958 1229
959 static inline bool 1230 bool
960 is_interrupt_func (const_tree decl) 1231 is_interrupt_func (const_tree decl)
961 { 1232 {
962 return has_func_attr (decl, "interrupt"); 1233 return has_func_attr (decl, "interrupt");
963 } 1234 }
964 1235
975 static void 1246 static void
976 rx_conditional_register_usage (void) 1247 rx_conditional_register_usage (void)
977 { 1248 {
978 static bool using_fixed_regs = false; 1249 static bool using_fixed_regs = false;
979 1250
1251 if (TARGET_PID)
1252 {
1253 rx_pid_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1254 fixed_regs[rx_pid_base_regnum_val] = call_used_regs [rx_pid_base_regnum_val] = 1;
1255 }
1256
980 if (rx_small_data_limit > 0) 1257 if (rx_small_data_limit > 0)
981 fixed_regs[GP_BASE_REGNUM] = call_used_regs [GP_BASE_REGNUM] = 1; 1258 {
1259 if (TARGET_PID)
1260 rx_gp_base_regnum_val = rx_pid_base_regnum_val - 1;
1261 else
1262 rx_gp_base_regnum_val = GP_BASE_REGNUM - rx_num_interrupt_regs;
1263
1264 fixed_regs[rx_gp_base_regnum_val] = call_used_regs [rx_gp_base_regnum_val] = 1;
1265 }
982 1266
983 if (use_fixed_regs != using_fixed_regs) 1267 if (use_fixed_regs != using_fixed_regs)
984 { 1268 {
985 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER]; 1269 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
986 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER]; 1270 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
1016 1300
1017 using_fixed_regs = use_fixed_regs; 1301 using_fixed_regs = use_fixed_regs;
1018 } 1302 }
1019 } 1303 }
1020 1304
1305 struct decl_chain
1306 {
1307 tree fndecl;
1308 struct decl_chain * next;
1309 };
1310
1311 /* Stack of decls for which we have issued warnings. */
1312 static struct decl_chain * warned_decls = NULL;
1313
1314 static void
1315 add_warned_decl (tree fndecl)
1316 {
1317 struct decl_chain * warned = (struct decl_chain *) xmalloc (sizeof * warned);
1318
1319 warned->fndecl = fndecl;
1320 warned->next = warned_decls;
1321 warned_decls = warned;
1322 }
1323
1324 /* Returns TRUE if FNDECL is on our list of warned about decls. */
1325
1326 static bool
1327 already_warned (tree fndecl)
1328 {
1329 struct decl_chain * warned;
1330
1331 for (warned = warned_decls;
1332 warned != NULL;
1333 warned = warned->next)
1334 if (warned->fndecl == fndecl)
1335 return true;
1336
1337 return false;
1338 }
1339
1021 /* Perform any actions necessary before starting to compile FNDECL. 1340 /* Perform any actions necessary before starting to compile FNDECL.
1022 For the RX we use this to make sure that we have the correct 1341 For the RX we use this to make sure that we have the correct
1023 set of register masks selected. If FNDECL is NULL then we are 1342 set of register masks selected. If FNDECL is NULL then we are
1024 compiling top level things. */ 1343 compiling top level things. */
1025 1344
1046 1365
1047 if (prev_was_fast_interrupt != current_is_fast_interrupt) 1366 if (prev_was_fast_interrupt != current_is_fast_interrupt)
1048 { 1367 {
1049 use_fixed_regs = current_is_fast_interrupt; 1368 use_fixed_regs = current_is_fast_interrupt;
1050 target_reinit (); 1369 target_reinit ();
1370 }
1371
1372 if (current_is_fast_interrupt && rx_warn_multiple_fast_interrupts)
1373 {
1374 /* We do not warn about the first fast interrupt routine that
1375 we see. Instead we just push it onto the stack. */
1376 if (warned_decls == NULL)
1377 add_warned_decl (fndecl);
1378
1379 /* Otherwise if this fast interrupt is one for which we have
1380 not already issued a warning, generate one and then push
1381 it onto the stack as well. */
1382 else if (! already_warned (fndecl))
1383 {
1384 warning (0, "multiple fast interrupt routines seen: %qE and %qE",
1385 fndecl, warned_decls->fndecl);
1386 add_warned_decl (fndecl);
1387 }
1051 } 1388 }
1052 1389
1053 rx_previous_fndecl = fndecl; 1390 rx_previous_fndecl = fndecl;
1054 } 1391 }
1055 1392
1142 /* Always save all call clobbered registers inside non-leaf 1479 /* Always save all call clobbered registers inside non-leaf
1143 interrupt handlers, even if they are not live - they may 1480 interrupt handlers, even if they are not live - they may
1144 be used in (non-interrupt aware) routines called from this one. */ 1481 be used in (non-interrupt aware) routines called from this one. */
1145 || (call_used_regs[reg] 1482 || (call_used_regs[reg]
1146 && is_interrupt_func (NULL_TREE) 1483 && is_interrupt_func (NULL_TREE)
1147 && ! current_function_is_leaf)) 1484 && ! crtl->is_leaf))
1148 && (! call_used_regs[reg] 1485 && (! call_used_regs[reg]
1149 /* Even call clobbered registered must 1486 /* Even call clobbered registered must
1150 be pushed inside interrupt handlers. */ 1487 be pushed inside interrupt handlers. */
1151 || is_interrupt_func (NULL_TREE) 1488 || is_interrupt_func (NULL_TREE)
1152 /* Likewise for fast interrupt handlers, except registers r10 - 1489 /* Likewise for fast interrupt handlers, except registers r10 -
1199 We also use multiple PUSH instructions if there are any fixed registers 1536 We also use multiple PUSH instructions if there are any fixed registers
1200 between LOW and HIGH. The only way that this can happen is if the user 1537 between LOW and HIGH. The only way that this can happen is if the user
1201 has specified --fixed-<reg-name> on the command line and in such 1538 has specified --fixed-<reg-name> on the command line and in such
1202 circumstances we do not want to touch the fixed registers at all. 1539 circumstances we do not want to touch the fixed registers at all.
1203 1540
1541 Note also that the code in the prologue/epilogue handlers will
1542 automatically merge multiple PUSHes of adjacent registers into a single
1543 PUSHM.
1544
1204 FIXME: Is it worth improving this heuristic ? */ 1545 FIXME: Is it worth improving this heuristic ? */
1205 pushed_mask = (-1 << low) & ~(-1 << (high + 1)); 1546 pushed_mask = (HOST_WIDE_INT_M1U << low) & ~(HOST_WIDE_INT_M1U << (high + 1));
1206 unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask; 1547 unneeded_pushes = (pushed_mask & (~ save_mask)) & pushed_mask;
1207 1548
1208 if ((fixed_reg && fixed_reg <= high) 1549 if ((fixed_reg && fixed_reg <= high)
1209 || (optimize_function_for_speed_p (cfun) 1550 || (optimize_function_for_speed_p (cfun)
1210 && bit_count (save_mask) < bit_count (unneeded_pushes))) 1551 && bit_count (save_mask) < bit_count (unneeded_pushes)))
1265 rtx vector; 1606 rtx vector;
1266 1607
1267 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count)); 1608 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1268 1609
1269 XVECEXP (vector, 0, 0) = 1610 XVECEXP (vector, 0, 0) =
1270 gen_rtx_SET (VOIDmode, stack_pointer_rtx, 1611 gen_rtx_SET (stack_pointer_rtx,
1271 gen_rtx_MINUS (SImode, stack_pointer_rtx, 1612 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1272 GEN_INT ((count - 1) * UNITS_PER_WORD))); 1613 GEN_INT ((count - 1) * UNITS_PER_WORD)));
1273 1614
1274 for (i = 0; i < count - 1; i++) 1615 for (i = 0; i < count - 1; i++)
1275 XVECEXP (vector, 0, i + 1) = 1616 XVECEXP (vector, 0, i + 1) =
1276 gen_rtx_SET (VOIDmode, 1617 gen_rtx_SET (gen_rtx_MEM (SImode,
1277 gen_rtx_MEM (SImode,
1278 gen_rtx_MINUS (SImode, stack_pointer_rtx, 1618 gen_rtx_MINUS (SImode, stack_pointer_rtx,
1279 GEN_INT ((i + 1) * UNITS_PER_WORD))), 1619 GEN_INT ((i + 1) * UNITS_PER_WORD))),
1280 gen_rtx_REG (SImode, high - i)); 1620 gen_rtx_REG (SImode, high - i));
1281 return vector; 1621 return vector;
1282 } 1622 }
1307 used as operands, then any value is legitimate. */ 1647 used as operands, then any value is legitimate. */
1308 return true; 1648 return true;
1309 1649
1310 /* rx_max_constant_size specifies the maximum number 1650 /* rx_max_constant_size specifies the maximum number
1311 of bytes that can be used to hold a signed value. */ 1651 of bytes that can be used to hold a signed value. */
1312 return IN_RANGE (val, (-1 << (rx_max_constant_size * 8)), 1652 return IN_RANGE (val, (HOST_WIDE_INT_M1U << (rx_max_constant_size * 8)),
1313 ( 1 << (rx_max_constant_size * 8))); 1653 ( 1 << (rx_max_constant_size * 8)));
1314 } 1654 }
1315 1655
1316 /* Generate an ADD of SRC plus VAL into DEST. 1656 /* Generate an ADD of SRC plus VAL into DEST.
1317 Handles the case where VAL is too big for max_constant_value. 1657 Handles the case where VAL is too big for max_constant_value.
1340 if (is_frame_related) 1680 if (is_frame_related)
1341 /* We have to provide our own frame related note here 1681 /* We have to provide our own frame related note here
1342 as the dwarf2out code cannot be expected to grok 1682 as the dwarf2out code cannot be expected to grok
1343 our unspec. */ 1683 our unspec. */
1344 add_reg_note (insn, REG_FRAME_RELATED_EXPR, 1684 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
1345 gen_rtx_SET (SImode, dest, 1685 gen_rtx_SET (dest, gen_rtx_PLUS (SImode, src, val)));
1346 gen_rtx_PLUS (SImode, src, val)));
1347 return; 1686 return;
1348 } 1687 }
1349 1688
1350 if (is_frame_related) 1689 if (is_frame_related)
1351 RTX_FRAME_RELATED_P (insn) = 1; 1690 RTX_FRAME_RELATED_P (insn) = 1;
1352 return; 1691 }
1692
1693 static void
1694 push_regs (unsigned int high, unsigned int low)
1695 {
1696 rtx insn;
1697
1698 if (low == high)
1699 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1700 else
1701 insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1) * UNITS_PER_WORD),
1702 gen_rx_store_vector (low, high)));
1703 mark_frame_related (insn);
1353 } 1704 }
1354 1705
1355 void 1706 void
1356 rx_expand_prologue (void) 1707 rx_expand_prologue (void)
1357 { 1708 {
1359 unsigned int frame_size; 1710 unsigned int frame_size;
1360 unsigned int mask; 1711 unsigned int mask;
1361 unsigned int low; 1712 unsigned int low;
1362 unsigned int high; 1713 unsigned int high;
1363 unsigned int reg; 1714 unsigned int reg;
1364 rtx insn;
1365 1715
1366 /* Naked functions use their own, programmer provided prologues. */ 1716 /* Naked functions use their own, programmer provided prologues. */
1367 if (is_naked_func (NULL_TREE)) 1717 if (is_naked_func (NULL_TREE))
1368 return; 1718 return;
1369 1719
1370 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size); 1720 rx_get_stack_layout (& low, & high, & mask, & frame_size, & stack_size);
1721
1722 if (flag_stack_usage_info)
1723 current_function_static_stack_size = frame_size + stack_size;
1371 1724
1372 /* If we use any of the callee-saved registers, save them now. */ 1725 /* If we use any of the callee-saved registers, save them now. */
1373 if (mask) 1726 if (mask)
1374 { 1727 {
1375 /* Push registers in reverse order. */ 1728 /* Push registers in reverse order. */
1376 for (reg = CC_REGNUM; reg --;) 1729 for (reg = CC_REGNUM; reg --;)
1377 if (mask & (1 << reg)) 1730 if (mask & (1 << reg))
1378 { 1731 {
1379 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, reg))); 1732 low = high = reg;
1380 mark_frame_related (insn); 1733
1734 /* Look for a span of registers.
1735 Note - we do not have to worry about -Os and whether
1736 it is better to use a single, longer PUSHM as
1737 rx_get_stack_layout has already done that for us. */
1738 while (reg-- > 0)
1739 if ((mask & (1 << reg)) == 0)
1740 break;
1741 else
1742 --low;
1743
1744 push_regs (high, low);
1745 if (reg == (unsigned) -1)
1746 break;
1381 } 1747 }
1382 } 1748 }
1383 else if (low) 1749 else if (low)
1384 { 1750 push_regs (high, low);
1385 if (high == low)
1386 insn = emit_insn (gen_stack_push (gen_rtx_REG (SImode, low)));
1387 else
1388 insn = emit_insn (gen_stack_pushm (GEN_INT (((high - low) + 1)
1389 * UNITS_PER_WORD),
1390 gen_rx_store_vector (low, high)));
1391 mark_frame_related (insn);
1392 }
1393 1751
1394 if (MUST_SAVE_ACC_REGISTER) 1752 if (MUST_SAVE_ACC_REGISTER)
1395 { 1753 {
1396 unsigned int acc_high, acc_low; 1754 unsigned int acc_high, acc_low;
1397 1755
1467 if (! frame_pointer_needed) 1825 if (! frame_pointer_needed)
1468 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx, 1826 gen_safe_add (stack_pointer_rtx, stack_pointer_rtx,
1469 GEN_INT (- (HOST_WIDE_INT) frame_size), true); 1827 GEN_INT (- (HOST_WIDE_INT) frame_size), true);
1470 else 1828 else
1471 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX, 1829 gen_safe_add (stack_pointer_rtx, frame_pointer_rtx, NULL_RTX,
1472 true); 1830 false /* False because the epilogue will use the FP not the SP. */);
1473 } 1831 }
1474 } 1832 }
1475 1833
1476 static void 1834 static void
1477 rx_output_function_prologue (FILE * file, 1835 add_vector_labels (FILE *file, const char *aname)
1478 HOST_WIDE_INT frame_size ATTRIBUTE_UNUSED) 1836 {
1479 { 1837 tree vec_attr;
1838 tree val_attr;
1839 const char *vname = "vect";
1840 const char *s;
1841 int vnum;
1842
1843 /* This node is for the vector/interrupt tag itself */
1844 vec_attr = lookup_attribute (aname, DECL_ATTRIBUTES (current_function_decl));
1845 if (!vec_attr)
1846 return;
1847
1848 /* Now point it at the first argument */
1849 vec_attr = TREE_VALUE (vec_attr);
1850
1851 /* Iterate through the arguments. */
1852 while (vec_attr)
1853 {
1854 val_attr = TREE_VALUE (vec_attr);
1855 switch (TREE_CODE (val_attr))
1856 {
1857 case STRING_CST:
1858 s = TREE_STRING_POINTER (val_attr);
1859 goto string_id_common;
1860
1861 case IDENTIFIER_NODE:
1862 s = IDENTIFIER_POINTER (val_attr);
1863
1864 string_id_common:
1865 if (strcmp (s, "$default") == 0)
1866 {
1867 fprintf (file, "\t.global\t$tableentry$default$%s\n", vname);
1868 fprintf (file, "$tableentry$default$%s:\n", vname);
1869 }
1870 else
1871 vname = s;
1872 break;
1873
1874 case INTEGER_CST:
1875 vnum = TREE_INT_CST_LOW (val_attr);
1876
1877 fprintf (file, "\t.global\t$tableentry$%d$%s\n", vnum, vname);
1878 fprintf (file, "$tableentry$%d$%s:\n", vnum, vname);
1879 break;
1880
1881 default:
1882 ;
1883 }
1884
1885 vec_attr = TREE_CHAIN (vec_attr);
1886 }
1887
1888 }
1889
1890 static void
1891 rx_output_function_prologue (FILE * file)
1892 {
1893 add_vector_labels (file, "interrupt");
1894 add_vector_labels (file, "vector");
1895
1480 if (is_fast_interrupt_func (NULL_TREE)) 1896 if (is_fast_interrupt_func (NULL_TREE))
1481 asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n"); 1897 asm_fprintf (file, "\t; Note: Fast Interrupt Handler\n");
1482 1898
1483 if (is_interrupt_func (NULL_TREE)) 1899 if (is_interrupt_func (NULL_TREE))
1484 asm_fprintf (file, "\t; Note: Interrupt Handler\n"); 1900 asm_fprintf (file, "\t; Note: Interrupt Handler\n");
1536 rtx vector; 1952 rtx vector;
1537 1953
1538 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count)); 1954 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1539 1955
1540 XVECEXP (vector, 0, 0) = 1956 XVECEXP (vector, 0, 0) =
1541 gen_rtx_SET (VOIDmode, stack_pointer_rtx, 1957 gen_rtx_SET (stack_pointer_rtx,
1542 plus_constant (stack_pointer_rtx, adjust)); 1958 plus_constant (Pmode, stack_pointer_rtx, adjust));
1543 1959
1544 for (i = 0; i < count - 2; i++) 1960 for (i = 0; i < count - 2; i++)
1545 XVECEXP (vector, 0, i + 1) = 1961 XVECEXP (vector, 0, i + 1) =
1546 gen_rtx_SET (VOIDmode, 1962 gen_rtx_SET (gen_rtx_REG (SImode, low + i),
1547 gen_rtx_REG (SImode, low + i),
1548 gen_rtx_MEM (SImode, 1963 gen_rtx_MEM (SImode,
1549 i == 0 ? stack_pointer_rtx 1964 i == 0 ? stack_pointer_rtx
1550 : plus_constant (stack_pointer_rtx, 1965 : plus_constant (Pmode, stack_pointer_rtx,
1551 i * UNITS_PER_WORD))); 1966 i * UNITS_PER_WORD)));
1552 1967
1553 XVECEXP (vector, 0, count - 1) = gen_rtx_RETURN (VOIDmode); 1968 XVECEXP (vector, 0, count - 1) = ret_rtx;
1554 1969
1555 return vector; 1970 return vector;
1556 } 1971 }
1557 1972
1558 /* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */ 1973 /* Generate a PARALLEL which will satisfy the rx_load_multiple_vector predicate. */
1565 rtx vector; 1980 rtx vector;
1566 1981
1567 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count)); 1982 vector = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
1568 1983
1569 XVECEXP (vector, 0, 0) = 1984 XVECEXP (vector, 0, 0) =
1570 gen_rtx_SET (VOIDmode, stack_pointer_rtx, 1985 gen_rtx_SET (stack_pointer_rtx,
1571 plus_constant (stack_pointer_rtx, 1986 plus_constant (Pmode, stack_pointer_rtx,
1572 (count - 1) * UNITS_PER_WORD)); 1987 (count - 1) * UNITS_PER_WORD));
1573 1988
1574 for (i = 0; i < count - 1; i++) 1989 for (i = 0; i < count - 1; i++)
1575 XVECEXP (vector, 0, i + 1) = 1990 XVECEXP (vector, 0, i + 1) =
1576 gen_rtx_SET (VOIDmode, 1991 gen_rtx_SET (gen_rtx_REG (SImode, low + i),
1577 gen_rtx_REG (SImode, low + i),
1578 gen_rtx_MEM (SImode, 1992 gen_rtx_MEM (SImode,
1579 i == 0 ? stack_pointer_rtx 1993 i == 0 ? stack_pointer_rtx
1580 : plus_constant (stack_pointer_rtx, 1994 : plus_constant (Pmode, stack_pointer_rtx,
1581 i * UNITS_PER_WORD))); 1995 i * UNITS_PER_WORD)));
1582 1996
1583 return vector; 1997 return vector;
1584 } 1998 }
1585 1999
2000 /* Returns true if a simple return insn can be used. */
2001
2002 bool
2003 rx_can_use_simple_return (void)
2004 {
2005 unsigned int low;
2006 unsigned int high;
2007 unsigned int frame_size;
2008 unsigned int stack_size;
2009 unsigned int register_mask;
2010
2011 if (is_naked_func (NULL_TREE)
2012 || is_fast_interrupt_func (NULL_TREE)
2013 || is_interrupt_func (NULL_TREE))
2014 return false;
2015
2016 rx_get_stack_layout (& low, & high, & register_mask,
2017 & frame_size, & stack_size);
2018
2019 return (register_mask == 0
2020 && (frame_size + stack_size) == 0
2021 && low == 0);
2022 }
2023
2024 static void
2025 pop_regs (unsigned int high, unsigned int low)
2026 {
2027 if (high == low)
2028 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
2029 else
2030 emit_insn (gen_stack_popm (GEN_INT (((high - low) + 1) * UNITS_PER_WORD),
2031 gen_rx_popm_vector (low, high)));
2032 }
2033
1586 void 2034 void
1587 rx_expand_epilogue (bool is_sibcall) 2035 rx_expand_epilogue (bool is_sibcall)
1588 { 2036 {
1589 unsigned int low; 2037 unsigned int low;
1590 unsigned int high; 2038 unsigned int high;
1693 2141
1694 if (register_mask) 2142 if (register_mask)
1695 { 2143 {
1696 for (reg = 0; reg < CC_REGNUM; reg ++) 2144 for (reg = 0; reg < CC_REGNUM; reg ++)
1697 if (register_mask & (1 << reg)) 2145 if (register_mask & (1 << reg))
1698 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, reg))); 2146 {
2147 low = high = reg;
2148 while (register_mask & (1 << high))
2149 high ++;
2150 pop_regs (high - 1, low);
2151 reg = high;
2152 }
1699 } 2153 }
1700 else if (low) 2154 else if (low)
1701 { 2155 pop_regs (high, low);
1702 if (high == low)
1703 emit_insn (gen_stack_pop (gen_rtx_REG (SImode, low)));
1704 else
1705 emit_insn (gen_stack_popm (GEN_INT (regs_size),
1706 gen_rx_popm_vector (low, high)));
1707 }
1708 2156
1709 if (is_fast_interrupt_func (NULL_TREE)) 2157 if (is_fast_interrupt_func (NULL_TREE))
1710 { 2158 {
1711 gcc_assert (! is_sibcall); 2159 gcc_assert (! is_sibcall);
1712 emit_jump_insn (gen_fast_interrupt_return ()); 2160 emit_jump_insn (gen_fast_interrupt_return ());
1797 2245
1798 static bool 2246 static bool
1799 rx_in_small_data (const_tree decl) 2247 rx_in_small_data (const_tree decl)
1800 { 2248 {
1801 int size; 2249 int size;
1802 const_tree section; 2250 const char * section;
1803 2251
1804 if (rx_small_data_limit == 0) 2252 if (rx_small_data_limit == 0)
1805 return false; 2253 return false;
1806 2254
1807 if (TREE_CODE (decl) != VAR_DECL) 2255 if (TREE_CODE (decl) != VAR_DECL)
1816 if (TREE_READONLY (decl) || DECL_COMMON (decl)) 2264 if (TREE_READONLY (decl) || DECL_COMMON (decl))
1817 return false; 2265 return false;
1818 2266
1819 section = DECL_SECTION_NAME (decl); 2267 section = DECL_SECTION_NAME (decl);
1820 if (section) 2268 if (section)
1821 { 2269 return (strcmp (section, "D_2") == 0) || (strcmp (section, "B_2") == 0);
1822 const char * const name = TREE_STRING_POINTER (section);
1823
1824 return (strcmp (name, "D_2") == 0) || (strcmp (name, "B_2") == 0);
1825 }
1826 2270
1827 size = int_size_in_bytes (TREE_TYPE (decl)); 2271 size = int_size_in_bytes (TREE_TYPE (decl));
1828 2272
1829 return (size > 0) && (size <= rx_small_data_limit); 2273 return (size > 0) && (size <= rx_small_data_limit);
1830 } 2274 }
1831 2275
1832 /* Return a section for X. 2276 /* Return a section for X.
1833 The only special thing we do here is to honor small data. */ 2277 The only special thing we do here is to honor small data. */
1834 2278
1835 static section * 2279 static section *
1836 rx_select_rtx_section (enum machine_mode mode, 2280 rx_select_rtx_section (machine_mode mode,
1837 rtx x, 2281 rtx x,
1838 unsigned HOST_WIDE_INT align) 2282 unsigned HOST_WIDE_INT align)
1839 { 2283 {
1840 if (rx_small_data_limit > 0 2284 if (rx_small_data_limit > 0
1841 && GET_MODE_SIZE (mode) <= rx_small_data_limit 2285 && GET_MODE_SIZE (mode) <= rx_small_data_limit
1909 RX_BUILTIN_SETPSW, 2353 RX_BUILTIN_SETPSW,
1910 RX_BUILTIN_WAIT, 2354 RX_BUILTIN_WAIT,
1911 RX_BUILTIN_max 2355 RX_BUILTIN_max
1912 }; 2356 };
1913 2357
2358 static GTY(()) tree rx_builtins[(int) RX_BUILTIN_max];
2359
1914 static void 2360 static void
1915 rx_init_builtins (void) 2361 rx_init_builtins (void)
1916 { 2362 {
2363 #define ADD_RX_BUILTIN0(UC_NAME, LC_NAME, RET_TYPE) \
2364 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2365 add_builtin_function ("__builtin_rx_" LC_NAME, \
2366 build_function_type_list (RET_TYPE##_type_node, \
2367 NULL_TREE), \
2368 RX_BUILTIN_##UC_NAME, \
2369 BUILT_IN_MD, NULL, NULL_TREE)
2370
1917 #define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \ 2371 #define ADD_RX_BUILTIN1(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE) \
1918 add_builtin_function ("__builtin_rx_" LC_NAME, \ 2372 rx_builtins[RX_BUILTIN_##UC_NAME] = \
2373 add_builtin_function ("__builtin_rx_" LC_NAME, \
1919 build_function_type_list (RET_TYPE##_type_node, \ 2374 build_function_type_list (RET_TYPE##_type_node, \
1920 ARG_TYPE##_type_node, \ 2375 ARG_TYPE##_type_node, \
1921 NULL_TREE), \ 2376 NULL_TREE), \
1922 RX_BUILTIN_##UC_NAME, \ 2377 RX_BUILTIN_##UC_NAME, \
1923 BUILT_IN_MD, NULL, NULL_TREE) 2378 BUILT_IN_MD, NULL, NULL_TREE)
1924 2379
1925 #define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \ 2380 #define ADD_RX_BUILTIN2(UC_NAME, LC_NAME, RET_TYPE, ARG_TYPE1, ARG_TYPE2) \
2381 rx_builtins[RX_BUILTIN_##UC_NAME] = \
1926 add_builtin_function ("__builtin_rx_" LC_NAME, \ 2382 add_builtin_function ("__builtin_rx_" LC_NAME, \
1927 build_function_type_list (RET_TYPE##_type_node, \ 2383 build_function_type_list (RET_TYPE##_type_node, \
1928 ARG_TYPE1##_type_node,\ 2384 ARG_TYPE1##_type_node,\
1929 ARG_TYPE2##_type_node,\ 2385 ARG_TYPE2##_type_node,\
1930 NULL_TREE), \ 2386 NULL_TREE), \
1931 RX_BUILTIN_##UC_NAME, \ 2387 RX_BUILTIN_##UC_NAME, \
1932 BUILT_IN_MD, NULL, NULL_TREE) 2388 BUILT_IN_MD, NULL, NULL_TREE)
1933 2389
1934 #define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \ 2390 #define ADD_RX_BUILTIN3(UC_NAME,LC_NAME,RET_TYPE,ARG_TYPE1,ARG_TYPE2,ARG_TYPE3) \
2391 rx_builtins[RX_BUILTIN_##UC_NAME] = \
1935 add_builtin_function ("__builtin_rx_" LC_NAME, \ 2392 add_builtin_function ("__builtin_rx_" LC_NAME, \
1936 build_function_type_list (RET_TYPE##_type_node, \ 2393 build_function_type_list (RET_TYPE##_type_node, \
1937 ARG_TYPE1##_type_node,\ 2394 ARG_TYPE1##_type_node,\
1938 ARG_TYPE2##_type_node,\ 2395 ARG_TYPE2##_type_node,\
1939 ARG_TYPE3##_type_node,\ 2396 ARG_TYPE3##_type_node,\
1940 NULL_TREE), \ 2397 NULL_TREE), \
1941 RX_BUILTIN_##UC_NAME, \ 2398 RX_BUILTIN_##UC_NAME, \
1942 BUILT_IN_MD, NULL, NULL_TREE) 2399 BUILT_IN_MD, NULL, NULL_TREE)
1943 2400
1944 ADD_RX_BUILTIN1 (BRK, "brk", void, void); 2401 ADD_RX_BUILTIN0 (BRK, "brk", void);
1945 ADD_RX_BUILTIN1 (CLRPSW, "clrpsw", void, integer); 2402 ADD_RX_BUILTIN1 (CLRPSW, "clrpsw", void, integer);
1946 ADD_RX_BUILTIN1 (SETPSW, "setpsw", void, integer); 2403 ADD_RX_BUILTIN1 (SETPSW, "setpsw", void, integer);
1947 ADD_RX_BUILTIN1 (INT, "int", void, integer); 2404 ADD_RX_BUILTIN1 (INT, "int", void, integer);
1948 ADD_RX_BUILTIN2 (MACHI, "machi", void, intSI, intSI); 2405 ADD_RX_BUILTIN2 (MACHI, "machi", void, intSI, intSI);
1949 ADD_RX_BUILTIN2 (MACLO, "maclo", void, intSI, intSI); 2406 ADD_RX_BUILTIN2 (MACLO, "maclo", void, intSI, intSI);
1950 ADD_RX_BUILTIN2 (MULHI, "mulhi", void, intSI, intSI); 2407 ADD_RX_BUILTIN2 (MULHI, "mulhi", void, intSI, intSI);
1951 ADD_RX_BUILTIN2 (MULLO, "mullo", void, intSI, intSI); 2408 ADD_RX_BUILTIN2 (MULLO, "mullo", void, intSI, intSI);
1952 ADD_RX_BUILTIN1 (MVFACHI, "mvfachi", intSI, void); 2409 ADD_RX_BUILTIN0 (MVFACHI, "mvfachi", intSI);
1953 ADD_RX_BUILTIN1 (MVFACMI, "mvfacmi", intSI, void); 2410 ADD_RX_BUILTIN0 (MVFACMI, "mvfacmi", intSI);
1954 ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void, intSI); 2411 ADD_RX_BUILTIN1 (MVTACHI, "mvtachi", void, intSI);
1955 ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void, intSI); 2412 ADD_RX_BUILTIN1 (MVTACLO, "mvtaclo", void, intSI);
1956 ADD_RX_BUILTIN1 (RMPA, "rmpa", void, void); 2413 ADD_RX_BUILTIN0 (RMPA, "rmpa", void);
1957 ADD_RX_BUILTIN1 (MVFC, "mvfc", intSI, integer); 2414 ADD_RX_BUILTIN1 (MVFC, "mvfc", intSI, integer);
1958 ADD_RX_BUILTIN2 (MVTC, "mvtc", void, integer, integer); 2415 ADD_RX_BUILTIN2 (MVTC, "mvtc", void, integer, integer);
1959 ADD_RX_BUILTIN1 (MVTIPL, "mvtipl", void, integer); 2416 ADD_RX_BUILTIN1 (MVTIPL, "mvtipl", void, integer);
1960 ADD_RX_BUILTIN1 (RACW, "racw", void, integer); 2417 ADD_RX_BUILTIN1 (RACW, "racw", void, integer);
1961 ADD_RX_BUILTIN1 (ROUND, "round", intSI, float); 2418 ADD_RX_BUILTIN1 (ROUND, "round", intSI, float);
1962 ADD_RX_BUILTIN1 (REVW, "revw", intSI, intSI); 2419 ADD_RX_BUILTIN1 (REVW, "revw", intSI, intSI);
1963 ADD_RX_BUILTIN1 (WAIT, "wait", void, void); 2420 ADD_RX_BUILTIN0 (WAIT, "wait", void);
2421 }
2422
2423 /* Return the RX builtin for CODE. */
2424
2425 static tree
2426 rx_builtin_decl (unsigned code, bool initialize_p ATTRIBUTE_UNUSED)
2427 {
2428 if (code >= RX_BUILTIN_max)
2429 return error_mark_node;
2430
2431 return rx_builtins[code];
1964 } 2432 }
1965 2433
1966 static rtx 2434 static rtx
1967 rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg) 2435 rx_expand_void_builtin_1_arg (rtx arg, rtx (* gen_func)(rtx), bool reg)
1968 { 2436 {
2113 2581
2114 static rtx 2582 static rtx
2115 rx_expand_builtin (tree exp, 2583 rx_expand_builtin (tree exp,
2116 rtx target, 2584 rtx target,
2117 rtx subtarget ATTRIBUTE_UNUSED, 2585 rtx subtarget ATTRIBUTE_UNUSED,
2118 enum machine_mode mode ATTRIBUTE_UNUSED, 2586 machine_mode mode ATTRIBUTE_UNUSED,
2119 int ignore ATTRIBUTE_UNUSED) 2587 int ignore ATTRIBUTE_UNUSED)
2120 { 2588 {
2121 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0); 2589 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
2122 tree arg = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE; 2590 tree arg = call_expr_nargs (exp) >= 1 ? CALL_EXPR_ARG (exp, 0) : NULL_TREE;
2123 rtx op = arg ? expand_normal (arg) : NULL_RTX; 2591 rtx op = arg ? expand_normal (arg) : NULL_RTX;
2146 (target, gen_mvfacmi); 2614 (target, gen_mvfacmi);
2147 case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg 2615 case RX_BUILTIN_MVTACHI: return rx_expand_void_builtin_1_arg
2148 (op, gen_mvtachi, true); 2616 (op, gen_mvtachi, true);
2149 case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg 2617 case RX_BUILTIN_MVTACLO: return rx_expand_void_builtin_1_arg
2150 (op, gen_mvtaclo, true); 2618 (op, gen_mvtaclo, true);
2151 case RX_BUILTIN_RMPA: emit_insn (gen_rmpa ()); return NULL_RTX; 2619 case RX_BUILTIN_RMPA:
2620 if (rx_allow_string_insns)
2621 emit_insn (gen_rmpa ());
2622 else
2623 error ("-mno-allow-string-insns forbids the generation of the RMPA instruction");
2624 return NULL_RTX;
2152 case RX_BUILTIN_MVFC: return rx_expand_builtin_mvfc (arg, target); 2625 case RX_BUILTIN_MVFC: return rx_expand_builtin_mvfc (arg, target);
2153 case RX_BUILTIN_MVTC: return rx_expand_builtin_mvtc (exp); 2626 case RX_BUILTIN_MVTC: return rx_expand_builtin_mvtc (exp);
2154 case RX_BUILTIN_MVTIPL: return rx_expand_builtin_mvtipl (op); 2627 case RX_BUILTIN_MVTIPL: return rx_expand_builtin_mvtipl (op);
2155 case RX_BUILTIN_RACW: return rx_expand_void_builtin_1_arg 2628 case RX_BUILTIN_RACW: return rx_expand_void_builtin_1_arg
2156 (op, gen_racw, false); 2629 (op, gen_racw, false);
2211 /* Check "fast_interrupt", "interrupt" and "naked" attributes. */ 2684 /* Check "fast_interrupt", "interrupt" and "naked" attributes. */
2212 2685
2213 static tree 2686 static tree
2214 rx_handle_func_attribute (tree * node, 2687 rx_handle_func_attribute (tree * node,
2215 tree name, 2688 tree name,
2216 tree args, 2689 tree args ATTRIBUTE_UNUSED,
2217 int flags ATTRIBUTE_UNUSED, 2690 int flags ATTRIBUTE_UNUSED,
2218 bool * no_add_attrs) 2691 bool * no_add_attrs)
2219 { 2692 {
2220 gcc_assert (DECL_P (* node)); 2693 gcc_assert (DECL_P (* node));
2221 gcc_assert (args == NULL_TREE);
2222 2694
2223 if (TREE_CODE (* node) != FUNCTION_DECL) 2695 if (TREE_CODE (* node) != FUNCTION_DECL)
2224 { 2696 {
2225 warning (OPT_Wattributes, "%qE attribute only applies to functions", 2697 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2226 name); 2698 name);
2232 /* FIXME: We ought to check that the interrupt and exception 2704 /* FIXME: We ought to check that the interrupt and exception
2233 handler attributes have been applied to void functions. */ 2705 handler attributes have been applied to void functions. */
2234 return NULL_TREE; 2706 return NULL_TREE;
2235 } 2707 }
2236 2708
2709 /* Check "vector" attribute. */
2710
2711 static tree
2712 rx_handle_vector_attribute (tree * node,
2713 tree name,
2714 tree args,
2715 int flags ATTRIBUTE_UNUSED,
2716 bool * no_add_attrs)
2717 {
2718 gcc_assert (DECL_P (* node));
2719 gcc_assert (args != NULL_TREE);
2720
2721 if (TREE_CODE (* node) != FUNCTION_DECL)
2722 {
2723 warning (OPT_Wattributes, "%qE attribute only applies to functions",
2724 name);
2725 * no_add_attrs = true;
2726 }
2727
2728 return NULL_TREE;
2729 }
2730
2237 /* Table of RX specific attributes. */ 2731 /* Table of RX specific attributes. */
2238 const struct attribute_spec rx_attribute_table[] = 2732 const struct attribute_spec rx_attribute_table[] =
2239 { 2733 {
2240 /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler. */ 2734 /* Name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
2241 { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute }, 2735 affects_type_identity. */
2242 { "interrupt", 0, 0, true, false, false, rx_handle_func_attribute }, 2736 { "fast_interrupt", 0, 0, true, false, false, rx_handle_func_attribute,
2243 { "naked", 0, 0, true, false, false, rx_handle_func_attribute }, 2737 false },
2244 { NULL, 0, 0, false, false, false, NULL } 2738 { "interrupt", 0, -1, true, false, false, rx_handle_func_attribute,
2739 false },
2740 { "naked", 0, 0, true, false, false, rx_handle_func_attribute,
2741 false },
2742 { "vector", 1, -1, true, false, false, rx_handle_vector_attribute,
2743 false },
2744 { NULL, 0, 0, false, false, false, NULL, false }
2245 }; 2745 };
2246
2247 /* Extra processing for target specific command line options. */
2248
2249 static bool
2250 rx_handle_option (size_t code, const char * arg ATTRIBUTE_UNUSED, int value)
2251 {
2252 switch (code)
2253 {
2254 case OPT_mint_register_:
2255 switch (value)
2256 {
2257 case 4:
2258 fixed_regs[10] = call_used_regs [10] = 1;
2259 /* Fall through. */
2260 case 3:
2261 fixed_regs[11] = call_used_regs [11] = 1;
2262 /* Fall through. */
2263 case 2:
2264 fixed_regs[12] = call_used_regs [12] = 1;
2265 /* Fall through. */
2266 case 1:
2267 fixed_regs[13] = call_used_regs [13] = 1;
2268 /* Fall through. */
2269 case 0:
2270 return true;
2271 default:
2272 return false;
2273 }
2274 break;
2275
2276 case OPT_mmax_constant_size_:
2277 /* Make sure that the -mmax-constant_size option is in range. */
2278 return value >= 0 && value <= 4;
2279
2280 case OPT_mcpu_:
2281 if (strcasecmp (arg, "RX610") == 0)
2282 rx_cpu_type = RX610;
2283 else if (strcasecmp (arg, "RX200") == 0)
2284 {
2285 target_flags |= MASK_NO_USE_FPU;
2286 rx_cpu_type = RX200;
2287 }
2288 else if (strcasecmp (arg, "RX600") != 0)
2289 warning (0, "unrecognized argument '%s' to -mcpu= option", arg);
2290 break;
2291
2292 case OPT_fpu:
2293 if (rx_cpu_type == RX200)
2294 error ("the RX200 cpu does not have FPU hardware");
2295 break;
2296
2297 default:
2298 break;
2299 }
2300
2301 return true;
2302 }
2303 2746
2304 /* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE. */ 2747 /* Implement TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE. */
2305 2748
2306 static void 2749 static void
2307 rx_override_options_after_change (void) 2750 rx_override_options_after_change (void)
2329 } 2772 }
2330 2773
2331 static void 2774 static void
2332 rx_option_override (void) 2775 rx_option_override (void)
2333 { 2776 {
2777 unsigned int i;
2778 cl_deferred_option *opt;
2779 vec<cl_deferred_option> *v = (vec<cl_deferred_option> *) rx_deferred_options;
2780
2781 if (v)
2782 FOR_EACH_VEC_ELT (*v, i, opt)
2783 {
2784 switch (opt->opt_index)
2785 {
2786 case OPT_mint_register_:
2787 switch (opt->value)
2788 {
2789 case 4:
2790 fixed_regs[10] = call_used_regs [10] = 1;
2791 /* Fall through. */
2792 case 3:
2793 fixed_regs[11] = call_used_regs [11] = 1;
2794 /* Fall through. */
2795 case 2:
2796 fixed_regs[12] = call_used_regs [12] = 1;
2797 /* Fall through. */
2798 case 1:
2799 fixed_regs[13] = call_used_regs [13] = 1;
2800 /* Fall through. */
2801 case 0:
2802 rx_num_interrupt_regs = opt->value;
2803 break;
2804 default:
2805 rx_num_interrupt_regs = 0;
2806 /* Error message already given because rx_handle_option
2807 returned false. */
2808 break;
2809 }
2810 break;
2811
2812 default:
2813 gcc_unreachable ();
2814 }
2815 }
2816
2334 /* This target defaults to strict volatile bitfields. */ 2817 /* This target defaults to strict volatile bitfields. */
2335 if (flag_strict_volatile_bitfields < 0) 2818 if (flag_strict_volatile_bitfields < 0 && abi_version_at_least(2))
2336 flag_strict_volatile_bitfields = 1; 2819 flag_strict_volatile_bitfields = 1;
2337 2820
2338 rx_override_options_after_change (); 2821 rx_override_options_after_change ();
2339 } 2822
2340 2823 /* These values are bytes, not log. */
2341 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */ 2824 if (align_jumps == 0 && ! optimize_size)
2342 static const struct default_options rx_option_optimization_table[] = 2825 align_jumps = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
2343 { 2826 if (align_loops == 0 && ! optimize_size)
2344 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 }, 2827 align_loops = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
2345 { OPT_LEVELS_NONE, 0, NULL, 0 } 2828 if (align_labels == 0 && ! optimize_size)
2346 }; 2829 align_labels = ((rx_cpu_type == RX100 || rx_cpu_type == RX200) ? 4 : 8);
2830 }
2347 2831
2348 2832
2349 static bool 2833 static bool
2350 rx_allocate_stack_slots_for_args (void) 2834 rx_allocate_stack_slots_for_args (void)
2351 { 2835 {
2359 return ! is_fast_interrupt_func (decl) 2843 return ! is_fast_interrupt_func (decl)
2360 && ! is_interrupt_func (decl) 2844 && ! is_interrupt_func (decl)
2361 && ! is_naked_func (decl); 2845 && ! is_naked_func (decl);
2362 } 2846 }
2363 2847
2848 static bool
2849 rx_warn_func_return (tree decl)
2850 {
2851 /* Naked functions are implemented entirely in assembly, including the
2852 return sequence, so suppress warnings about this. */
2853 return !is_naked_func (decl);
2854 }
2855
2364 /* Return nonzero if it is ok to make a tail-call to DECL, 2856 /* Return nonzero if it is ok to make a tail-call to DECL,
2365 a function_decl or NULL if this is an indirect call, using EXP */ 2857 a function_decl or NULL if this is an indirect call, using EXP */
2366 2858
2367 static bool 2859 static bool
2368 rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED) 2860 rx_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
2369 { 2861 {
2862 if (TARGET_JSR)
2863 return false;
2864
2370 /* Do not allow indirect tailcalls. The 2865 /* Do not allow indirect tailcalls. The
2371 sibcall patterns do not support them. */ 2866 sibcall patterns do not support them. */
2372 if (decl == NULL) 2867 if (decl == NULL)
2373 return false; 2868 return false;
2374 2869
2389 } 2884 }
2390 2885
2391 static bool 2886 static bool
2392 rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED) 2887 rx_is_ms_bitfield_layout (const_tree record_type ATTRIBUTE_UNUSED)
2393 { 2888 {
2394 /* The packed attribute overrides the MS behaviour. */ 2889 /* The packed attribute overrides the MS behavior. */
2395 return ! TYPE_PACKED (record_type); 2890 return ! TYPE_PACKED (record_type);
2396 } 2891 }
2397 2892
2398 /* Returns true if X a legitimate constant for an immediate 2893 /* Returns true if X a legitimate constant for an immediate
2399 operand on the RX. X is already known to satisfy CONSTANT_P. */ 2894 operand on the RX. X is already known to satisfy CONSTANT_P. */
2400 2895
2401 bool 2896 bool
2402 rx_is_legitimate_constant (rtx x) 2897 rx_is_legitimate_constant (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2403 { 2898 {
2404 switch (GET_CODE (x)) 2899 switch (GET_CODE (x))
2405 { 2900 {
2406 case CONST: 2901 case CONST:
2407 x = XEXP (x, 0); 2902 x = XEXP (x, 0);
2422 case LABEL_REF: 2917 case LABEL_REF:
2423 case SYMBOL_REF: 2918 case SYMBOL_REF:
2424 return true; 2919 return true;
2425 2920
2426 case UNSPEC: 2921 case UNSPEC:
2427 return XINT (x, 1) == UNSPEC_CONST; 2922 return XINT (x, 1) == UNSPEC_CONST || XINT (x, 1) == UNSPEC_PID_ADDR;
2428 2923
2429 default: 2924 default:
2430 /* FIXME: Can this ever happen ? */ 2925 /* FIXME: Can this ever happen ? */
2431 abort (); 2926 gcc_unreachable ();
2432 return false;
2433 } 2927 }
2434 break; 2928 break;
2435 2929
2436 case LABEL_REF: 2930 case LABEL_REF:
2437 case SYMBOL_REF: 2931 case SYMBOL_REF:
2447 2941
2448 return ok_for_max_constant (INTVAL (x)); 2942 return ok_for_max_constant (INTVAL (x));
2449 } 2943 }
2450 2944
2451 static int 2945 static int
2452 rx_address_cost (rtx addr, bool speed) 2946 rx_address_cost (rtx addr, machine_mode mode ATTRIBUTE_UNUSED,
2947 addr_space_t as ATTRIBUTE_UNUSED, bool speed)
2453 { 2948 {
2454 rtx a, b; 2949 rtx a, b;
2455 2950
2456 if (GET_CODE (addr) != PLUS) 2951 if (GET_CODE (addr) != PLUS)
2457 return COSTS_N_INSNS (1); 2952 return COSTS_N_INSNS (1);
2568 emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr); 3063 emit_move_insn (adjust_address (tramp, SImode, 6 + 2), fnaddr);
2569 } 3064 }
2570 } 3065 }
2571 3066
2572 static int 3067 static int
2573 rx_memory_move_cost (enum machine_mode mode, reg_class_t regclass, bool in) 3068 rx_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
2574 { 3069 reg_class_t regclass ATTRIBUTE_UNUSED,
2575 return 2 + memory_move_secondary_cost (mode, regclass, in); 3070 bool in)
3071 {
3072 return (in ? 2 : 0) + REGISTER_MOVE_COST (mode, regclass, regclass);
2576 } 3073 }
2577 3074
2578 /* Convert a CC_MODE to the set of flags that it represents. */ 3075 /* Convert a CC_MODE to the set of flags that it represents. */
2579 3076
2580 static unsigned int 3077 static unsigned int
2581 flags_from_mode (enum machine_mode mode) 3078 flags_from_mode (machine_mode mode)
2582 { 3079 {
2583 switch (mode) 3080 switch (mode)
2584 { 3081 {
2585 case CC_ZSmode: 3082 case E_CC_ZSmode:
2586 return CC_FLAG_S | CC_FLAG_Z; 3083 return CC_FLAG_S | CC_FLAG_Z;
2587 case CC_ZSOmode: 3084 case E_CC_ZSOmode:
2588 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O; 3085 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O;
2589 case CC_ZSCmode: 3086 case E_CC_ZSCmode:
2590 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C; 3087 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_C;
2591 case CCmode: 3088 case E_CCmode:
2592 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C; 3089 return CC_FLAG_S | CC_FLAG_Z | CC_FLAG_O | CC_FLAG_C;
2593 case CC_Fmode: 3090 case E_CC_Fmode:
2594 return CC_FLAG_FP; 3091 return CC_FLAG_FP;
2595 default: 3092 default:
2596 gcc_unreachable (); 3093 gcc_unreachable ();
2597 } 3094 }
2598 } 3095 }
2599 3096
2600 /* Convert a set of flags to a CC_MODE that can implement it. */ 3097 /* Convert a set of flags to a CC_MODE that can implement it. */
2601 3098
2602 static enum machine_mode 3099 static machine_mode
2603 mode_from_flags (unsigned int f) 3100 mode_from_flags (unsigned int f)
2604 { 3101 {
2605 if (f & CC_FLAG_FP) 3102 if (f & CC_FLAG_FP)
2606 return CC_Fmode; 3103 return CC_Fmode;
2607 if (f & CC_FLAG_O) 3104 if (f & CC_FLAG_O)
2645 } 3142 }
2646 } 3143 }
2647 3144
2648 /* Return a CC_MODE of which both M1 and M2 are subsets. */ 3145 /* Return a CC_MODE of which both M1 and M2 are subsets. */
2649 3146
2650 static enum machine_mode 3147 static machine_mode
2651 rx_cc_modes_compatible (enum machine_mode m1, enum machine_mode m2) 3148 rx_cc_modes_compatible (machine_mode m1, machine_mode m2)
2652 { 3149 {
2653 unsigned f; 3150 unsigned f;
2654 3151
2655 /* Early out for identical modes. */ 3152 /* Early out for identical modes. */
2656 if (m1 == m2) 3153 if (m1 == m2)
2665 return mode_from_flags (f); 3162 return mode_from_flags (f);
2666 } 3163 }
2667 3164
2668 /* Return the minimal CC mode needed to implement (CMP_CODE X Y). */ 3165 /* Return the minimal CC mode needed to implement (CMP_CODE X Y). */
2669 3166
2670 enum machine_mode 3167 machine_mode
2671 rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y) 3168 rx_select_cc_mode (enum rtx_code cmp_code, rtx x, rtx y)
2672 { 3169 {
2673 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT) 3170 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2674 return CC_Fmode; 3171 return CC_Fmode;
2675 3172
2681 3178
2682 /* Split the conditional branch. Emit (COMPARE C1 C2) into CC_REG with 3179 /* Split the conditional branch. Emit (COMPARE C1 C2) into CC_REG with
2683 CC_MODE, and use that in branches based on that compare. */ 3180 CC_MODE, and use that in branches based on that compare. */
2684 3181
2685 void 3182 void
2686 rx_split_cbranch (enum machine_mode cc_mode, enum rtx_code cmp1, 3183 rx_split_cbranch (machine_mode cc_mode, enum rtx_code cmp1,
2687 rtx c1, rtx c2, rtx label) 3184 rtx c1, rtx c2, rtx label)
2688 { 3185 {
2689 rtx flags, x; 3186 rtx flags, x;
2690 3187
2691 flags = gen_rtx_REG (cc_mode, CC_REG); 3188 flags = gen_rtx_REG (cc_mode, CC_REG);
2692 x = gen_rtx_COMPARE (cc_mode, c1, c2); 3189 x = gen_rtx_COMPARE (cc_mode, c1, c2);
2693 x = gen_rtx_SET (VOIDmode, flags, x); 3190 x = gen_rtx_SET (flags, x);
2694 emit_insn (x); 3191 emit_insn (x);
2695 3192
2696 x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx); 3193 x = gen_rtx_fmt_ee (cmp1, VOIDmode, flags, const0_rtx);
2697 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx); 3194 x = gen_rtx_IF_THEN_ELSE (VOIDmode, x, label, pc_rtx);
2698 x = gen_rtx_SET (VOIDmode, pc_rtx, x); 3195 x = gen_rtx_SET (pc_rtx, x);
2699 emit_jump_insn (x); 3196 emit_jump_insn (x);
2700 } 3197 }
2701 3198
2702 /* A helper function for matching parallels that set the flags. */ 3199 /* A helper function for matching parallels that set the flags. */
2703 3200
2704 bool 3201 bool
2705 rx_match_ccmode (rtx insn, enum machine_mode cc_mode) 3202 rx_match_ccmode (rtx insn, machine_mode cc_mode)
2706 { 3203 {
2707 rtx op1, flags; 3204 rtx op1, flags;
2708 enum machine_mode flags_mode; 3205 machine_mode flags_mode;
2709 3206
2710 gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2); 3207 gcc_checking_assert (XVECLEN (PATTERN (insn), 0) == 2);
2711 3208
2712 op1 = XVECEXP (PATTERN (insn), 0, 1); 3209 op1 = XVECEXP (PATTERN (insn), 0, 0);
2713 gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE); 3210 gcc_checking_assert (GET_CODE (SET_SRC (op1)) == COMPARE);
2714 3211
2715 flags = SET_DEST (op1); 3212 flags = SET_DEST (op1);
2716 flags_mode = GET_MODE (flags); 3213 flags_mode = GET_MODE (flags);
2717 3214
2724 if (flags_from_mode (flags_mode) & ~flags_from_mode (cc_mode)) 3221 if (flags_from_mode (flags_mode) & ~flags_from_mode (cc_mode))
2725 return false; 3222 return false;
2726 3223
2727 return true; 3224 return true;
2728 } 3225 }
2729
2730 3226
3227 int
3228 rx_align_for_label (rtx lab, int uses_threshold)
3229 {
3230 /* This is a simple heuristic to guess when an alignment would not be useful
3231 because the delay due to the inserted NOPs would be greater than the delay
3232 due to the misaligned branch. If uses_threshold is zero then the alignment
3233 is always useful. */
3234 if (LABEL_P (lab) && LABEL_NUSES (lab) < uses_threshold)
3235 return 0;
3236
3237 if (optimize_size)
3238 return 0;
3239 /* These values are log, not bytes. */
3240 if (rx_cpu_type == RX100 || rx_cpu_type == RX200)
3241 return 2; /* 4 bytes */
3242 return 3; /* 8 bytes */
3243 }
3244
3245 static int
3246 rx_max_skip_for_label (rtx_insn *lab)
3247 {
3248 int opsize;
3249 rtx_insn *op;
3250
3251 if (optimize_size)
3252 return 0;
3253
3254 if (lab == NULL)
3255 return 0;
3256
3257 op = lab;
3258 do
3259 {
3260 op = next_nonnote_nondebug_insn (op);
3261 }
3262 while (op && (LABEL_P (op)
3263 || (INSN_P (op) && GET_CODE (PATTERN (op)) == USE)));
3264 if (!op)
3265 return 0;
3266
3267 opsize = get_attr_length (op);
3268 if (opsize >= 0 && opsize < 8)
3269 return opsize - 1;
3270 return 0;
3271 }
3272
3273 /* Compute the real length of the extending load-and-op instructions. */
3274
3275 int
3276 rx_adjust_insn_length (rtx_insn *insn, int current_length)
3277 {
3278 rtx extend, mem, offset;
3279 bool zero;
3280 int factor;
3281
3282 if (!INSN_P (insn))
3283 return current_length;
3284
3285 switch (INSN_CODE (insn))
3286 {
3287 default:
3288 return current_length;
3289
3290 case CODE_FOR_plussi3_zero_extendhi:
3291 case CODE_FOR_andsi3_zero_extendhi:
3292 case CODE_FOR_iorsi3_zero_extendhi:
3293 case CODE_FOR_xorsi3_zero_extendhi:
3294 case CODE_FOR_divsi3_zero_extendhi:
3295 case CODE_FOR_udivsi3_zero_extendhi:
3296 case CODE_FOR_minussi3_zero_extendhi:
3297 case CODE_FOR_smaxsi3_zero_extendhi:
3298 case CODE_FOR_sminsi3_zero_extendhi:
3299 case CODE_FOR_multsi3_zero_extendhi:
3300 case CODE_FOR_comparesi3_zero_extendhi:
3301 zero = true;
3302 factor = 2;
3303 break;
3304
3305 case CODE_FOR_plussi3_sign_extendhi:
3306 case CODE_FOR_andsi3_sign_extendhi:
3307 case CODE_FOR_iorsi3_sign_extendhi:
3308 case CODE_FOR_xorsi3_sign_extendhi:
3309 case CODE_FOR_divsi3_sign_extendhi:
3310 case CODE_FOR_udivsi3_sign_extendhi:
3311 case CODE_FOR_minussi3_sign_extendhi:
3312 case CODE_FOR_smaxsi3_sign_extendhi:
3313 case CODE_FOR_sminsi3_sign_extendhi:
3314 case CODE_FOR_multsi3_sign_extendhi:
3315 case CODE_FOR_comparesi3_sign_extendhi:
3316 zero = false;
3317 factor = 2;
3318 break;
3319
3320 case CODE_FOR_plussi3_zero_extendqi:
3321 case CODE_FOR_andsi3_zero_extendqi:
3322 case CODE_FOR_iorsi3_zero_extendqi:
3323 case CODE_FOR_xorsi3_zero_extendqi:
3324 case CODE_FOR_divsi3_zero_extendqi:
3325 case CODE_FOR_udivsi3_zero_extendqi:
3326 case CODE_FOR_minussi3_zero_extendqi:
3327 case CODE_FOR_smaxsi3_zero_extendqi:
3328 case CODE_FOR_sminsi3_zero_extendqi:
3329 case CODE_FOR_multsi3_zero_extendqi:
3330 case CODE_FOR_comparesi3_zero_extendqi:
3331 zero = true;
3332 factor = 1;
3333 break;
3334
3335 case CODE_FOR_plussi3_sign_extendqi:
3336 case CODE_FOR_andsi3_sign_extendqi:
3337 case CODE_FOR_iorsi3_sign_extendqi:
3338 case CODE_FOR_xorsi3_sign_extendqi:
3339 case CODE_FOR_divsi3_sign_extendqi:
3340 case CODE_FOR_udivsi3_sign_extendqi:
3341 case CODE_FOR_minussi3_sign_extendqi:
3342 case CODE_FOR_smaxsi3_sign_extendqi:
3343 case CODE_FOR_sminsi3_sign_extendqi:
3344 case CODE_FOR_multsi3_sign_extendqi:
3345 case CODE_FOR_comparesi3_sign_extendqi:
3346 zero = false;
3347 factor = 1;
3348 break;
3349 }
3350
3351 /* We are expecting: (SET (REG) (<OP> (REG) (<EXTEND> (MEM)))). */
3352 extend = single_set (insn);
3353 gcc_assert (extend != NULL_RTX);
3354
3355 extend = SET_SRC (extend);
3356 if (GET_CODE (XEXP (extend, 0)) == ZERO_EXTEND
3357 || GET_CODE (XEXP (extend, 0)) == SIGN_EXTEND)
3358 extend = XEXP (extend, 0);
3359 else
3360 extend = XEXP (extend, 1);
3361
3362 gcc_assert ((zero && (GET_CODE (extend) == ZERO_EXTEND))
3363 || (! zero && (GET_CODE (extend) == SIGN_EXTEND)));
3364
3365 mem = XEXP (extend, 0);
3366 gcc_checking_assert (MEM_P (mem));
3367 if (REG_P (XEXP (mem, 0)))
3368 return (zero && factor == 1) ? 2 : 3;
3369
3370 /* We are expecting: (MEM (PLUS (REG) (CONST_INT))). */
3371 gcc_checking_assert (GET_CODE (XEXP (mem, 0)) == PLUS);
3372 gcc_checking_assert (REG_P (XEXP (XEXP (mem, 0), 0)));
3373
3374 offset = XEXP (XEXP (mem, 0), 1);
3375 gcc_checking_assert (GET_CODE (offset) == CONST_INT);
3376
3377 if (IN_RANGE (INTVAL (offset), 0, 255 * factor))
3378 return (zero && factor == 1) ? 3 : 4;
3379
3380 return (zero && factor == 1) ? 4 : 5;
3381 }
3382
3383 static bool
3384 rx_narrow_volatile_bitfield (void)
3385 {
3386 return true;
3387 }
3388
3389 static bool
3390 rx_ok_to_inline (tree caller, tree callee)
3391 {
3392 /* Do not inline functions with local variables
3393 into a naked CALLER - naked function have no stack frame and
3394 locals need a frame in order to have somewhere to live.
3395
3396 Unfortunately we have no way to determine the presence of
3397 local variables in CALLEE, so we have to be cautious and
3398 assume that there might be some there.
3399
3400 We do allow inlining when CALLEE has the "inline" type
3401 modifier or the "always_inline" or "gnu_inline" attributes. */
3402 return lookup_attribute ("naked", DECL_ATTRIBUTES (caller)) == NULL_TREE
3403 || DECL_DECLARED_INLINE_P (callee)
3404 || lookup_attribute ("always_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE
3405 || lookup_attribute ("gnu_inline", DECL_ATTRIBUTES (callee)) != NULL_TREE;
3406 }
3407
3408 static bool
3409 rx_enable_lra (void)
3410 {
3411 return TARGET_ENABLE_LRA;
3412 }
3413
3414 rx_atomic_sequence::rx_atomic_sequence (const_tree fun_decl)
3415 {
3416 if (is_fast_interrupt_func (fun_decl) || is_interrupt_func (fun_decl))
3417 {
3418 /* If we are inside an interrupt handler, assume that interrupts are
3419 off -- which is the default hardware behavior. In this case, there
3420 is no need to disable the interrupts. */
3421 m_prev_psw_reg = NULL;
3422 }
3423 else
3424 {
3425 m_prev_psw_reg = gen_reg_rtx (SImode);
3426 emit_insn (gen_mvfc (m_prev_psw_reg, GEN_INT (CTRLREG_PSW)));
3427 emit_insn (gen_clrpsw (GEN_INT ('I')));
3428 }
3429 }
3430
3431 rx_atomic_sequence::~rx_atomic_sequence (void)
3432 {
3433 if (m_prev_psw_reg != NULL)
3434 emit_insn (gen_mvtc (GEN_INT (CTRLREG_PSW), m_prev_psw_reg));
3435 }
3436
3437 /* Implement TARGET_HARD_REGNO_NREGS. */
3438
3439 static unsigned int
3440 rx_hard_regno_nregs (unsigned int, machine_mode mode)
3441 {
3442 return CLASS_MAX_NREGS (0, mode);
3443 }
3444
3445 /* Implement TARGET_HARD_REGNO_MODE_OK. */
3446
3447 static bool
3448 rx_hard_regno_mode_ok (unsigned int regno, machine_mode)
3449 {
3450 return REGNO_REG_CLASS (regno) == GR_REGS;
3451 }
3452
3453 /* Implement TARGET_MODES_TIEABLE_P. */
3454
3455 static bool
3456 rx_modes_tieable_p (machine_mode mode1, machine_mode mode2)
3457 {
3458 return ((GET_MODE_CLASS (mode1) == MODE_FLOAT
3459 || GET_MODE_CLASS (mode1) == MODE_COMPLEX_FLOAT)
3460 == (GET_MODE_CLASS (mode2) == MODE_FLOAT
3461 || GET_MODE_CLASS (mode2) == MODE_COMPLEX_FLOAT));
3462 }
3463
3464 #undef TARGET_NARROW_VOLATILE_BITFIELD
3465 #define TARGET_NARROW_VOLATILE_BITFIELD rx_narrow_volatile_bitfield
3466
3467 #undef TARGET_CAN_INLINE_P
3468 #define TARGET_CAN_INLINE_P rx_ok_to_inline
3469
3470 #undef TARGET_ASM_JUMP_ALIGN_MAX_SKIP
3471 #define TARGET_ASM_JUMP_ALIGN_MAX_SKIP rx_max_skip_for_label
3472 #undef TARGET_ASM_LOOP_ALIGN_MAX_SKIP
3473 #define TARGET_ASM_LOOP_ALIGN_MAX_SKIP rx_max_skip_for_label
3474 #undef TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP
3475 #define TARGET_LABEL_ALIGN_AFTER_BARRIER_MAX_SKIP rx_max_skip_for_label
3476 #undef TARGET_ASM_LABEL_ALIGN_MAX_SKIP
3477 #define TARGET_ASM_LABEL_ALIGN_MAX_SKIP rx_max_skip_for_label
3478
2731 #undef TARGET_FUNCTION_VALUE 3479 #undef TARGET_FUNCTION_VALUE
2732 #define TARGET_FUNCTION_VALUE rx_function_value 3480 #define TARGET_FUNCTION_VALUE rx_function_value
2733 3481
2734 #undef TARGET_RETURN_IN_MSB 3482 #undef TARGET_RETURN_IN_MSB
2735 #define TARGET_RETURN_IN_MSB rx_return_in_msb 3483 #define TARGET_RETURN_IN_MSB rx_return_in_msb
2750 #define TARGET_ASM_SELECT_SECTION rx_select_section 3498 #define TARGET_ASM_SELECT_SECTION rx_select_section
2751 3499
2752 #undef TARGET_INIT_BUILTINS 3500 #undef TARGET_INIT_BUILTINS
2753 #define TARGET_INIT_BUILTINS rx_init_builtins 3501 #define TARGET_INIT_BUILTINS rx_init_builtins
2754 3502
3503 #undef TARGET_BUILTIN_DECL
3504 #define TARGET_BUILTIN_DECL rx_builtin_decl
3505
2755 #undef TARGET_EXPAND_BUILTIN 3506 #undef TARGET_EXPAND_BUILTIN
2756 #define TARGET_EXPAND_BUILTIN rx_expand_builtin 3507 #define TARGET_EXPAND_BUILTIN rx_expand_builtin
2757 3508
2758 #undef TARGET_ASM_CONSTRUCTOR 3509 #undef TARGET_ASM_CONSTRUCTOR
2759 #define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor 3510 #define TARGET_ASM_CONSTRUCTOR rx_elf_asm_constructor
2801 #define TARGET_FUNCTION_ARG_BOUNDARY rx_function_arg_boundary 3552 #define TARGET_FUNCTION_ARG_BOUNDARY rx_function_arg_boundary
2802 3553
2803 #undef TARGET_SET_CURRENT_FUNCTION 3554 #undef TARGET_SET_CURRENT_FUNCTION
2804 #define TARGET_SET_CURRENT_FUNCTION rx_set_current_function 3555 #define TARGET_SET_CURRENT_FUNCTION rx_set_current_function
2805 3556
2806 #undef TARGET_HANDLE_OPTION
2807 #define TARGET_HANDLE_OPTION rx_handle_option
2808
2809 #undef TARGET_ASM_INTEGER 3557 #undef TARGET_ASM_INTEGER
2810 #define TARGET_ASM_INTEGER rx_assemble_integer 3558 #define TARGET_ASM_INTEGER rx_assemble_integer
2811 3559
2812 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P 3560 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
2813 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true 3561 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
2843 #define TARGET_MEMORY_MOVE_COST rx_memory_move_cost 3591 #define TARGET_MEMORY_MOVE_COST rx_memory_move_cost
2844 3592
2845 #undef TARGET_OPTION_OVERRIDE 3593 #undef TARGET_OPTION_OVERRIDE
2846 #define TARGET_OPTION_OVERRIDE rx_option_override 3594 #define TARGET_OPTION_OVERRIDE rx_option_override
2847 3595
2848 #undef TARGET_OPTION_OPTIMIZATION_TABLE
2849 #define TARGET_OPTION_OPTIMIZATION_TABLE rx_option_optimization_table
2850
2851 #undef TARGET_PROMOTE_FUNCTION_MODE 3596 #undef TARGET_PROMOTE_FUNCTION_MODE
2852 #define TARGET_PROMOTE_FUNCTION_MODE rx_promote_function_mode 3597 #define TARGET_PROMOTE_FUNCTION_MODE rx_promote_function_mode
2853 3598
2854 #undef TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE 3599 #undef TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
2855 #define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE rx_override_options_after_change 3600 #define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE rx_override_options_after_change
2856 3601
2857 #undef TARGET_EXCEPT_UNWIND_INFO
2858 #define TARGET_EXCEPT_UNWIND_INFO sjlj_except_unwind_info
2859
2860 #undef TARGET_FLAGS_REGNUM 3602 #undef TARGET_FLAGS_REGNUM
2861 #define TARGET_FLAGS_REGNUM CC_REG 3603 #define TARGET_FLAGS_REGNUM CC_REG
2862 3604
3605 #undef TARGET_LEGITIMATE_CONSTANT_P
3606 #define TARGET_LEGITIMATE_CONSTANT_P rx_is_legitimate_constant
3607
3608 #undef TARGET_LEGITIMIZE_ADDRESS
3609 #define TARGET_LEGITIMIZE_ADDRESS rx_legitimize_address
3610
3611 #undef TARGET_WARN_FUNC_RETURN
3612 #define TARGET_WARN_FUNC_RETURN rx_warn_func_return
3613
3614 #undef TARGET_LRA_P
3615 #define TARGET_LRA_P rx_enable_lra
3616
3617 #undef TARGET_HARD_REGNO_NREGS
3618 #define TARGET_HARD_REGNO_NREGS rx_hard_regno_nregs
3619 #undef TARGET_HARD_REGNO_MODE_OK
3620 #define TARGET_HARD_REGNO_MODE_OK rx_hard_regno_mode_ok
3621
3622 #undef TARGET_MODES_TIEABLE_P
3623 #define TARGET_MODES_TIEABLE_P rx_modes_tieable_p
3624
2863 struct gcc_target targetm = TARGET_INITIALIZER; 3625 struct gcc_target targetm = TARGET_INITIALIZER;
2864 3626
2865 /* #include "gt-rx.h" */ 3627 #include "gt-rx.h"