comparison gcc/config/v850/v850.c @ 0:a06113de4d67

first commit
author kent <kent@cr.ie.u-ryukyu.ac.jp>
date Fri, 17 Jul 2009 14:47:48 +0900
parents
children 77e2b8dfacca
comparison
equal deleted inserted replaced
-1:000000000000 0:a06113de4d67
1 /* Subroutines for insn-output.c for NEC V850 series
2 Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008 Free Software Foundation, Inc.
4 Contributed by Jeff Law (law@cygnus.com).
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it
9 under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT
14 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "rtl.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "conditions.h"
33 #include "output.h"
34 #include "insn-attr.h"
35 #include "flags.h"
36 #include "recog.h"
37 #include "expr.h"
38 #include "function.h"
39 #include "toplev.h"
40 #include "ggc.h"
41 #include "integrate.h"
42 #include "tm_p.h"
43 #include "target.h"
44 #include "target-def.h"
45 #include "df.h"
46
47 #ifndef streq
48 #define streq(a,b) (strcmp (a, b) == 0)
49 #endif
50
51 /* Function prototypes for stupid compilers: */
52 static bool v850_handle_option (size_t, const char *, int);
53 static void const_double_split (rtx, HOST_WIDE_INT *, HOST_WIDE_INT *);
54 static int const_costs_int (HOST_WIDE_INT, int);
55 static int const_costs (rtx, enum rtx_code);
56 static bool v850_rtx_costs (rtx, int, int, int *, bool);
57 static void substitute_ep_register (rtx, rtx, int, int, rtx *, rtx *);
58 static void v850_reorg (void);
59 static int ep_memory_offset (enum machine_mode, int);
60 static void v850_set_data_area (tree, v850_data_area);
61 const struct attribute_spec v850_attribute_table[];
62 static tree v850_handle_interrupt_attribute (tree *, tree, tree, int, bool *);
63 static tree v850_handle_data_area_attribute (tree *, tree, tree, int, bool *);
64 static void v850_insert_attributes (tree, tree *);
65 static void v850_asm_init_sections (void);
66 static section *v850_select_section (tree, int, unsigned HOST_WIDE_INT);
67 static void v850_encode_data_area (tree, rtx);
68 static void v850_encode_section_info (tree, rtx, int);
69 static bool v850_return_in_memory (const_tree, const_tree);
70 static void v850_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode,
71 tree, int *, int);
72 static bool v850_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
73 const_tree, bool);
74 static int v850_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
75 tree, bool);
76
77 /* Information about the various small memory areas. */
78 struct small_memory_info small_memory[ (int)SMALL_MEMORY_max ] =
79 {
80 /* name max physical max */
81 { "tda", 0, 256 },
82 { "sda", 0, 65536 },
83 { "zda", 0, 32768 },
84 };
85
86 /* Names of the various data areas used on the v850. */
87 tree GHS_default_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
88 tree GHS_current_section_names [(int) COUNT_OF_GHS_SECTION_KINDS];
89
90 /* Track the current data area set by the data area pragma (which
91 can be nested). Tested by check_default_data_area. */
92 data_area_stack_element * data_area_stack = NULL;
93
94 /* True if we don't need to check any more if the current
95 function is an interrupt handler. */
96 static int v850_interrupt_cache_p = FALSE;
97
98 /* Whether current function is an interrupt handler. */
99 static int v850_interrupt_p = FALSE;
100
101 static GTY(()) section *rosdata_section;
102 static GTY(()) section *rozdata_section;
103 static GTY(()) section *tdata_section;
104 static GTY(()) section *zdata_section;
105 static GTY(()) section *zbss_section;
106
107 /* Initialize the GCC target structure. */
108 #undef TARGET_ASM_ALIGNED_HI_OP
109 #define TARGET_ASM_ALIGNED_HI_OP "\t.hword\t"
110
111 #undef TARGET_ATTRIBUTE_TABLE
112 #define TARGET_ATTRIBUTE_TABLE v850_attribute_table
113
114 #undef TARGET_INSERT_ATTRIBUTES
115 #define TARGET_INSERT_ATTRIBUTES v850_insert_attributes
116
117 #undef TARGET_ASM_SELECT_SECTION
118 #define TARGET_ASM_SELECT_SECTION v850_select_section
119
120 /* The assembler supports switchable .bss sections, but
121 v850_select_section doesn't yet make use of them. */
122 #undef TARGET_HAVE_SWITCHABLE_BSS_SECTIONS
123 #define TARGET_HAVE_SWITCHABLE_BSS_SECTIONS false
124
125 #undef TARGET_ENCODE_SECTION_INFO
126 #define TARGET_ENCODE_SECTION_INFO v850_encode_section_info
127
128 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
129 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
130
131 #undef TARGET_DEFAULT_TARGET_FLAGS
132 #define TARGET_DEFAULT_TARGET_FLAGS (MASK_DEFAULT | MASK_APP_REGS)
133 #undef TARGET_HANDLE_OPTION
134 #define TARGET_HANDLE_OPTION v850_handle_option
135
136 #undef TARGET_RTX_COSTS
137 #define TARGET_RTX_COSTS v850_rtx_costs
138
139 #undef TARGET_ADDRESS_COST
140 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
141
142 #undef TARGET_MACHINE_DEPENDENT_REORG
143 #define TARGET_MACHINE_DEPENDENT_REORG v850_reorg
144
145 #undef TARGET_PROMOTE_PROTOTYPES
146 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
147
148 #undef TARGET_RETURN_IN_MEMORY
149 #define TARGET_RETURN_IN_MEMORY v850_return_in_memory
150
151 #undef TARGET_PASS_BY_REFERENCE
152 #define TARGET_PASS_BY_REFERENCE v850_pass_by_reference
153
154 #undef TARGET_CALLEE_COPIES
155 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
156
157 #undef TARGET_SETUP_INCOMING_VARARGS
158 #define TARGET_SETUP_INCOMING_VARARGS v850_setup_incoming_varargs
159
160 #undef TARGET_ARG_PARTIAL_BYTES
161 #define TARGET_ARG_PARTIAL_BYTES v850_arg_partial_bytes
162
163 struct gcc_target targetm = TARGET_INITIALIZER;
164
165 /* Set the maximum size of small memory area TYPE to the value given
166 by VALUE. Return true if VALUE was syntactically correct. VALUE
167 starts with the argument separator: either "-" or "=". */
168
169 static bool
170 v850_handle_memory_option (enum small_memory_type type, const char *value)
171 {
172 int i, size;
173
174 if (*value != '-' && *value != '=')
175 return false;
176
177 value++;
178 for (i = 0; value[i]; i++)
179 if (!ISDIGIT (value[i]))
180 return false;
181
182 size = atoi (value);
183 if (size > small_memory[type].physical_max)
184 error ("value passed to %<-m%s%> is too large", small_memory[type].name);
185 else
186 small_memory[type].max = size;
187 return true;
188 }
189
190 /* Implement TARGET_HANDLE_OPTION. */
191
192 static bool
193 v850_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED)
194 {
195 switch (code)
196 {
197 case OPT_mspace:
198 target_flags |= MASK_EP | MASK_PROLOG_FUNCTION;
199 return true;
200
201 case OPT_mv850:
202 target_flags &= ~(MASK_CPU ^ MASK_V850);
203 return true;
204
205 case OPT_mv850e:
206 case OPT_mv850e1:
207 target_flags &= ~(MASK_CPU ^ MASK_V850E);
208 return true;
209
210 case OPT_mtda:
211 return v850_handle_memory_option (SMALL_MEMORY_TDA, arg);
212
213 case OPT_msda:
214 return v850_handle_memory_option (SMALL_MEMORY_SDA, arg);
215
216 case OPT_mzda:
217 return v850_handle_memory_option (SMALL_MEMORY_ZDA, arg);
218
219 default:
220 return true;
221 }
222 }
223
224 static bool
225 v850_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED,
226 enum machine_mode mode, const_tree type,
227 bool named ATTRIBUTE_UNUSED)
228 {
229 unsigned HOST_WIDE_INT size;
230
231 if (type)
232 size = int_size_in_bytes (type);
233 else
234 size = GET_MODE_SIZE (mode);
235
236 return size > 8;
237 }
238
239 /* Return an RTX to represent where a value with mode MODE will be returned
240 from a function. If the result is 0, the argument is pushed. */
241
242 rtx
243 function_arg (CUMULATIVE_ARGS * cum,
244 enum machine_mode mode,
245 tree type,
246 int named)
247 {
248 rtx result = 0;
249 int size, align;
250
251 if (TARGET_GHS && !named)
252 return NULL_RTX;
253
254 if (mode == BLKmode)
255 size = int_size_in_bytes (type);
256 else
257 size = GET_MODE_SIZE (mode);
258
259 if (size < 1)
260 return 0;
261
262 if (type)
263 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
264 else
265 align = size;
266
267 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
268
269 if (cum->nbytes > 4 * UNITS_PER_WORD)
270 return 0;
271
272 if (type == NULL_TREE
273 && cum->nbytes + size > 4 * UNITS_PER_WORD)
274 return 0;
275
276 switch (cum->nbytes / UNITS_PER_WORD)
277 {
278 case 0:
279 result = gen_rtx_REG (mode, 6);
280 break;
281 case 1:
282 result = gen_rtx_REG (mode, 7);
283 break;
284 case 2:
285 result = gen_rtx_REG (mode, 8);
286 break;
287 case 3:
288 result = gen_rtx_REG (mode, 9);
289 break;
290 default:
291 result = 0;
292 }
293
294 return result;
295 }
296
297
298 /* Return the number of bytes which must be put into registers
299 for values which are part in registers and part in memory. */
300
301 static int
302 v850_arg_partial_bytes (CUMULATIVE_ARGS * cum, enum machine_mode mode,
303 tree type, bool named)
304 {
305 int size, align;
306
307 if (TARGET_GHS && !named)
308 return 0;
309
310 if (mode == BLKmode)
311 size = int_size_in_bytes (type);
312 else
313 size = GET_MODE_SIZE (mode);
314
315 if (type)
316 align = TYPE_ALIGN (type) / BITS_PER_UNIT;
317 else
318 align = size;
319
320 cum->nbytes = (cum->nbytes + align - 1) &~(align - 1);
321
322 if (cum->nbytes > 4 * UNITS_PER_WORD)
323 return 0;
324
325 if (cum->nbytes + size <= 4 * UNITS_PER_WORD)
326 return 0;
327
328 if (type == NULL_TREE
329 && cum->nbytes + size > 4 * UNITS_PER_WORD)
330 return 0;
331
332 return 4 * UNITS_PER_WORD - cum->nbytes;
333 }
334
335
336 /* Return the high and low words of a CONST_DOUBLE */
337
338 static void
339 const_double_split (rtx x, HOST_WIDE_INT * p_high, HOST_WIDE_INT * p_low)
340 {
341 if (GET_CODE (x) == CONST_DOUBLE)
342 {
343 long t[2];
344 REAL_VALUE_TYPE rv;
345
346 switch (GET_MODE (x))
347 {
348 case DFmode:
349 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
350 REAL_VALUE_TO_TARGET_DOUBLE (rv, t);
351 *p_high = t[1]; /* since v850 is little endian */
352 *p_low = t[0]; /* high is second word */
353 return;
354
355 case SFmode:
356 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
357 REAL_VALUE_TO_TARGET_SINGLE (rv, *p_high);
358 *p_low = 0;
359 return;
360
361 case VOIDmode:
362 case DImode:
363 *p_high = CONST_DOUBLE_HIGH (x);
364 *p_low = CONST_DOUBLE_LOW (x);
365 return;
366
367 default:
368 break;
369 }
370 }
371
372 fatal_insn ("const_double_split got a bad insn:", x);
373 }
374
375
376 /* Return the cost of the rtx R with code CODE. */
377
378 static int
379 const_costs_int (HOST_WIDE_INT value, int zero_cost)
380 {
381 if (CONST_OK_FOR_I (value))
382 return zero_cost;
383 else if (CONST_OK_FOR_J (value))
384 return 1;
385 else if (CONST_OK_FOR_K (value))
386 return 2;
387 else
388 return 4;
389 }
390
391 static int
392 const_costs (rtx r, enum rtx_code c)
393 {
394 HOST_WIDE_INT high, low;
395
396 switch (c)
397 {
398 case CONST_INT:
399 return const_costs_int (INTVAL (r), 0);
400
401 case CONST_DOUBLE:
402 const_double_split (r, &high, &low);
403 if (GET_MODE (r) == SFmode)
404 return const_costs_int (high, 1);
405 else
406 return const_costs_int (high, 1) + const_costs_int (low, 1);
407
408 case SYMBOL_REF:
409 case LABEL_REF:
410 case CONST:
411 return 2;
412
413 case HIGH:
414 return 1;
415
416 default:
417 return 4;
418 }
419 }
420
421 static bool
422 v850_rtx_costs (rtx x,
423 int code,
424 int outer_code ATTRIBUTE_UNUSED,
425 int * total, bool speed)
426 {
427 switch (code)
428 {
429 case CONST_INT:
430 case CONST_DOUBLE:
431 case CONST:
432 case SYMBOL_REF:
433 case LABEL_REF:
434 *total = COSTS_N_INSNS (const_costs (x, code));
435 return true;
436
437 case MOD:
438 case DIV:
439 case UMOD:
440 case UDIV:
441 if (TARGET_V850E && !speed)
442 *total = 6;
443 else
444 *total = 60;
445 return true;
446
447 case MULT:
448 if (TARGET_V850E
449 && ( GET_MODE (x) == SImode
450 || GET_MODE (x) == HImode
451 || GET_MODE (x) == QImode))
452 {
453 if (GET_CODE (XEXP (x, 1)) == REG)
454 *total = 4;
455 else if (GET_CODE (XEXP (x, 1)) == CONST_INT)
456 {
457 if (CONST_OK_FOR_O (INTVAL (XEXP (x, 1))))
458 *total = 6;
459 else if (CONST_OK_FOR_K (INTVAL (XEXP (x, 1))))
460 *total = 10;
461 }
462 }
463 else
464 *total = 20;
465 return true;
466
467 default:
468 return false;
469 }
470 }
471
472 /* Print operand X using operand code CODE to assembly language output file
473 FILE. */
474
475 void
476 print_operand (FILE * file, rtx x, int code)
477 {
478 HOST_WIDE_INT high, low;
479
480 switch (code)
481 {
482 case 'c':
483 /* We use 'c' operands with symbols for .vtinherit */
484 if (GET_CODE (x) == SYMBOL_REF)
485 {
486 output_addr_const(file, x);
487 break;
488 }
489 /* fall through */
490 case 'b':
491 case 'B':
492 case 'C':
493 switch ((code == 'B' || code == 'C')
494 ? reverse_condition (GET_CODE (x)) : GET_CODE (x))
495 {
496 case NE:
497 if (code == 'c' || code == 'C')
498 fprintf (file, "nz");
499 else
500 fprintf (file, "ne");
501 break;
502 case EQ:
503 if (code == 'c' || code == 'C')
504 fprintf (file, "z");
505 else
506 fprintf (file, "e");
507 break;
508 case GE:
509 fprintf (file, "ge");
510 break;
511 case GT:
512 fprintf (file, "gt");
513 break;
514 case LE:
515 fprintf (file, "le");
516 break;
517 case LT:
518 fprintf (file, "lt");
519 break;
520 case GEU:
521 fprintf (file, "nl");
522 break;
523 case GTU:
524 fprintf (file, "h");
525 break;
526 case LEU:
527 fprintf (file, "nh");
528 break;
529 case LTU:
530 fprintf (file, "l");
531 break;
532 default:
533 gcc_unreachable ();
534 }
535 break;
536 case 'F': /* high word of CONST_DOUBLE */
537 switch (GET_CODE (x))
538 {
539 case CONST_INT:
540 fprintf (file, "%d", (INTVAL (x) >= 0) ? 0 : -1);
541 break;
542
543 case CONST_DOUBLE:
544 const_double_split (x, &high, &low);
545 fprintf (file, "%ld", (long) high);
546 break;
547
548 default:
549 gcc_unreachable ();
550 }
551 break;
552 case 'G': /* low word of CONST_DOUBLE */
553 switch (GET_CODE (x))
554 {
555 case CONST_INT:
556 fprintf (file, "%ld", (long) INTVAL (x));
557 break;
558
559 case CONST_DOUBLE:
560 const_double_split (x, &high, &low);
561 fprintf (file, "%ld", (long) low);
562 break;
563
564 default:
565 gcc_unreachable ();
566 }
567 break;
568 case 'L':
569 fprintf (file, "%d\n", (int)(INTVAL (x) & 0xffff));
570 break;
571 case 'M':
572 fprintf (file, "%d", exact_log2 (INTVAL (x)));
573 break;
574 case 'O':
575 gcc_assert (special_symbolref_operand (x, VOIDmode));
576
577 if (GET_CODE (x) == CONST)
578 x = XEXP (XEXP (x, 0), 0);
579 else
580 gcc_assert (GET_CODE (x) == SYMBOL_REF);
581
582 if (SYMBOL_REF_ZDA_P (x))
583 fprintf (file, "zdaoff");
584 else if (SYMBOL_REF_SDA_P (x))
585 fprintf (file, "sdaoff");
586 else if (SYMBOL_REF_TDA_P (x))
587 fprintf (file, "tdaoff");
588 else
589 gcc_unreachable ();
590 break;
591 case 'P':
592 gcc_assert (special_symbolref_operand (x, VOIDmode));
593 output_addr_const (file, x);
594 break;
595 case 'Q':
596 gcc_assert (special_symbolref_operand (x, VOIDmode));
597
598 if (GET_CODE (x) == CONST)
599 x = XEXP (XEXP (x, 0), 0);
600 else
601 gcc_assert (GET_CODE (x) == SYMBOL_REF);
602
603 if (SYMBOL_REF_ZDA_P (x))
604 fprintf (file, "r0");
605 else if (SYMBOL_REF_SDA_P (x))
606 fprintf (file, "gp");
607 else if (SYMBOL_REF_TDA_P (x))
608 fprintf (file, "ep");
609 else
610 gcc_unreachable ();
611 break;
612 case 'R': /* 2nd word of a double. */
613 switch (GET_CODE (x))
614 {
615 case REG:
616 fprintf (file, reg_names[REGNO (x) + 1]);
617 break;
618 case MEM:
619 x = XEXP (adjust_address (x, SImode, 4), 0);
620 print_operand_address (file, x);
621 if (GET_CODE (x) == CONST_INT)
622 fprintf (file, "[r0]");
623 break;
624
625 default:
626 break;
627 }
628 break;
629 case 'S':
630 {
631 /* If it's a reference to a TDA variable, use sst/sld vs. st/ld. */
632 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), FALSE))
633 fputs ("s", file);
634
635 break;
636 }
637 case 'T':
638 {
639 /* Like an 'S' operand above, but for unsigned loads only. */
640 if (GET_CODE (x) == MEM && ep_memory_operand (x, GET_MODE (x), TRUE))
641 fputs ("s", file);
642
643 break;
644 }
645 case 'W': /* print the instruction suffix */
646 switch (GET_MODE (x))
647 {
648 default:
649 gcc_unreachable ();
650
651 case QImode: fputs (".b", file); break;
652 case HImode: fputs (".h", file); break;
653 case SImode: fputs (".w", file); break;
654 case SFmode: fputs (".w", file); break;
655 }
656 break;
657 case '.': /* register r0 */
658 fputs (reg_names[0], file);
659 break;
660 case 'z': /* reg or zero */
661 if (GET_CODE (x) == REG)
662 fputs (reg_names[REGNO (x)], file);
663 else
664 {
665 gcc_assert (x == const0_rtx);
666 fputs (reg_names[0], file);
667 }
668 break;
669 default:
670 switch (GET_CODE (x))
671 {
672 case MEM:
673 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
674 output_address (gen_rtx_PLUS (SImode, gen_rtx_REG (SImode, 0),
675 XEXP (x, 0)));
676 else
677 output_address (XEXP (x, 0));
678 break;
679
680 case REG:
681 fputs (reg_names[REGNO (x)], file);
682 break;
683 case SUBREG:
684 fputs (reg_names[subreg_regno (x)], file);
685 break;
686 case CONST_INT:
687 case SYMBOL_REF:
688 case CONST:
689 case LABEL_REF:
690 case CODE_LABEL:
691 print_operand_address (file, x);
692 break;
693 default:
694 gcc_unreachable ();
695 }
696 break;
697
698 }
699 }
700
701
702 /* Output assembly language output for the address ADDR to FILE. */
703
704 void
705 print_operand_address (FILE * file, rtx addr)
706 {
707 switch (GET_CODE (addr))
708 {
709 case REG:
710 fprintf (file, "0[");
711 print_operand (file, addr, 0);
712 fprintf (file, "]");
713 break;
714 case LO_SUM:
715 if (GET_CODE (XEXP (addr, 0)) == REG)
716 {
717 /* reg,foo */
718 fprintf (file, "lo(");
719 print_operand (file, XEXP (addr, 1), 0);
720 fprintf (file, ")[");
721 print_operand (file, XEXP (addr, 0), 0);
722 fprintf (file, "]");
723 }
724 break;
725 case PLUS:
726 if (GET_CODE (XEXP (addr, 0)) == REG
727 || GET_CODE (XEXP (addr, 0)) == SUBREG)
728 {
729 /* reg,foo */
730 print_operand (file, XEXP (addr, 1), 0);
731 fprintf (file, "[");
732 print_operand (file, XEXP (addr, 0), 0);
733 fprintf (file, "]");
734 }
735 else
736 {
737 print_operand (file, XEXP (addr, 0), 0);
738 fprintf (file, "+");
739 print_operand (file, XEXP (addr, 1), 0);
740 }
741 break;
742 case SYMBOL_REF:
743 {
744 const char *off_name = NULL;
745 const char *reg_name = NULL;
746
747 if (SYMBOL_REF_ZDA_P (addr))
748 {
749 off_name = "zdaoff";
750 reg_name = "r0";
751 }
752 else if (SYMBOL_REF_SDA_P (addr))
753 {
754 off_name = "sdaoff";
755 reg_name = "gp";
756 }
757 else if (SYMBOL_REF_TDA_P (addr))
758 {
759 off_name = "tdaoff";
760 reg_name = "ep";
761 }
762
763 if (off_name)
764 fprintf (file, "%s(", off_name);
765 output_addr_const (file, addr);
766 if (reg_name)
767 fprintf (file, ")[%s]", reg_name);
768 }
769 break;
770 case CONST:
771 if (special_symbolref_operand (addr, VOIDmode))
772 {
773 rtx x = XEXP (XEXP (addr, 0), 0);
774 const char *off_name;
775 const char *reg_name;
776
777 if (SYMBOL_REF_ZDA_P (x))
778 {
779 off_name = "zdaoff";
780 reg_name = "r0";
781 }
782 else if (SYMBOL_REF_SDA_P (x))
783 {
784 off_name = "sdaoff";
785 reg_name = "gp";
786 }
787 else if (SYMBOL_REF_TDA_P (x))
788 {
789 off_name = "tdaoff";
790 reg_name = "ep";
791 }
792 else
793 gcc_unreachable ();
794
795 fprintf (file, "%s(", off_name);
796 output_addr_const (file, addr);
797 fprintf (file, ")[%s]", reg_name);
798 }
799 else
800 output_addr_const (file, addr);
801 break;
802 default:
803 output_addr_const (file, addr);
804 break;
805 }
806 }
807
808 /* When assemble_integer is used to emit the offsets for a switch
809 table it can encounter (TRUNCATE:HI (MINUS:SI (LABEL_REF:SI) (LABEL_REF:SI))).
810 output_addr_const will normally barf at this, but it is OK to omit
811 the truncate and just emit the difference of the two labels. The
812 .hword directive will automatically handle the truncation for us.
813
814 Returns 1 if rtx was handled, 0 otherwise. */
815
816 int
817 v850_output_addr_const_extra (FILE * file, rtx x)
818 {
819 if (GET_CODE (x) != TRUNCATE)
820 return 0;
821
822 x = XEXP (x, 0);
823
824 /* We must also handle the case where the switch table was passed a
825 constant value and so has been collapsed. In this case the first
826 label will have been deleted. In such a case it is OK to emit
827 nothing, since the table will not be used.
828 (cf gcc.c-torture/compile/990801-1.c). */
829 if (GET_CODE (x) == MINUS
830 && GET_CODE (XEXP (x, 0)) == LABEL_REF
831 && GET_CODE (XEXP (XEXP (x, 0), 0)) == CODE_LABEL
832 && INSN_DELETED_P (XEXP (XEXP (x, 0), 0)))
833 return 1;
834
835 output_addr_const (file, x);
836 return 1;
837 }
838
839 /* Return appropriate code to load up a 1, 2, or 4 integer/floating
840 point value. */
841
842 const char *
843 output_move_single (rtx * operands)
844 {
845 rtx dst = operands[0];
846 rtx src = operands[1];
847
848 if (REG_P (dst))
849 {
850 if (REG_P (src))
851 return "mov %1,%0";
852
853 else if (GET_CODE (src) == CONST_INT)
854 {
855 HOST_WIDE_INT value = INTVAL (src);
856
857 if (CONST_OK_FOR_J (value)) /* Signed 5-bit immediate. */
858 return "mov %1,%0";
859
860 else if (CONST_OK_FOR_K (value)) /* Signed 16-bit immediate. */
861 return "movea lo(%1),%.,%0";
862
863 else if (CONST_OK_FOR_L (value)) /* Upper 16 bits were set. */
864 return "movhi hi(%1),%.,%0";
865
866 /* A random constant. */
867 else if (TARGET_V850E)
868 return "mov %1,%0";
869 else
870 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
871 }
872
873 else if (GET_CODE (src) == CONST_DOUBLE && GET_MODE (src) == SFmode)
874 {
875 HOST_WIDE_INT high, low;
876
877 const_double_split (src, &high, &low);
878
879 if (CONST_OK_FOR_J (high)) /* Signed 5-bit immediate. */
880 return "mov %F1,%0";
881
882 else if (CONST_OK_FOR_K (high)) /* Signed 16-bit immediate. */
883 return "movea lo(%F1),%.,%0";
884
885 else if (CONST_OK_FOR_L (high)) /* Upper 16 bits were set. */
886 return "movhi hi(%F1),%.,%0";
887
888 /* A random constant. */
889 else if (TARGET_V850E)
890 return "mov %F1,%0";
891
892 else
893 return "movhi hi(%F1),%.,%0\n\tmovea lo(%F1),%0,%0";
894 }
895
896 else if (GET_CODE (src) == MEM)
897 return "%S1ld%W1 %1,%0";
898
899 else if (special_symbolref_operand (src, VOIDmode))
900 return "movea %O1(%P1),%Q1,%0";
901
902 else if (GET_CODE (src) == LABEL_REF
903 || GET_CODE (src) == SYMBOL_REF
904 || GET_CODE (src) == CONST)
905 {
906 if (TARGET_V850E)
907 return "mov hilo(%1),%0";
908 else
909 return "movhi hi(%1),%.,%0\n\tmovea lo(%1),%0,%0";
910 }
911
912 else if (GET_CODE (src) == HIGH)
913 return "movhi hi(%1),%.,%0";
914
915 else if (GET_CODE (src) == LO_SUM)
916 {
917 operands[2] = XEXP (src, 0);
918 operands[3] = XEXP (src, 1);
919 return "movea lo(%3),%2,%0";
920 }
921 }
922
923 else if (GET_CODE (dst) == MEM)
924 {
925 if (REG_P (src))
926 return "%S0st%W0 %1,%0";
927
928 else if (GET_CODE (src) == CONST_INT && INTVAL (src) == 0)
929 return "%S0st%W0 %.,%0";
930
931 else if (GET_CODE (src) == CONST_DOUBLE
932 && CONST0_RTX (GET_MODE (dst)) == src)
933 return "%S0st%W0 %.,%0";
934 }
935
936 fatal_insn ("output_move_single:", gen_rtx_SET (VOIDmode, dst, src));
937 return "";
938 }
939
940
941 /* Return maximum offset supported for a short EP memory reference of mode
942 MODE and signedness UNSIGNEDP. */
943
944 static int
945 ep_memory_offset (enum machine_mode mode, int unsignedp ATTRIBUTE_UNUSED)
946 {
947 int max_offset = 0;
948
949 switch (mode)
950 {
951 case QImode:
952 if (TARGET_SMALL_SLD)
953 max_offset = (1 << 4);
954 else if (TARGET_V850E
955 && ( ( unsignedp && ! TARGET_US_BIT_SET)
956 || (! unsignedp && TARGET_US_BIT_SET)))
957 max_offset = (1 << 4);
958 else
959 max_offset = (1 << 7);
960 break;
961
962 case HImode:
963 if (TARGET_SMALL_SLD)
964 max_offset = (1 << 5);
965 else if (TARGET_V850E
966 && ( ( unsignedp && ! TARGET_US_BIT_SET)
967 || (! unsignedp && TARGET_US_BIT_SET)))
968 max_offset = (1 << 5);
969 else
970 max_offset = (1 << 8);
971 break;
972
973 case SImode:
974 case SFmode:
975 max_offset = (1 << 8);
976 break;
977
978 default:
979 break;
980 }
981
982 return max_offset;
983 }
984
985 /* Return true if OP is a valid short EP memory reference */
986
987 int
988 ep_memory_operand (rtx op, enum machine_mode mode, int unsigned_load)
989 {
990 rtx addr, op0, op1;
991 int max_offset;
992 int mask;
993
994 /* If we are not using the EP register on a per-function basis
995 then do not allow this optimization at all. This is to
996 prevent the use of the SLD/SST instructions which cannot be
997 guaranteed to work properly due to a hardware bug. */
998 if (!TARGET_EP)
999 return FALSE;
1000
1001 if (GET_CODE (op) != MEM)
1002 return FALSE;
1003
1004 max_offset = ep_memory_offset (mode, unsigned_load);
1005
1006 mask = GET_MODE_SIZE (mode) - 1;
1007
1008 addr = XEXP (op, 0);
1009 if (GET_CODE (addr) == CONST)
1010 addr = XEXP (addr, 0);
1011
1012 switch (GET_CODE (addr))
1013 {
1014 default:
1015 break;
1016
1017 case SYMBOL_REF:
1018 return SYMBOL_REF_TDA_P (addr);
1019
1020 case REG:
1021 return REGNO (addr) == EP_REGNUM;
1022
1023 case PLUS:
1024 op0 = XEXP (addr, 0);
1025 op1 = XEXP (addr, 1);
1026 if (GET_CODE (op1) == CONST_INT
1027 && INTVAL (op1) < max_offset
1028 && INTVAL (op1) >= 0
1029 && (INTVAL (op1) & mask) == 0)
1030 {
1031 if (GET_CODE (op0) == REG && REGNO (op0) == EP_REGNUM)
1032 return TRUE;
1033
1034 if (GET_CODE (op0) == SYMBOL_REF && SYMBOL_REF_TDA_P (op0))
1035 return TRUE;
1036 }
1037 break;
1038 }
1039
1040 return FALSE;
1041 }
1042
1043 /* Substitute memory references involving a pointer, to use the ep pointer,
1044 taking care to save and preserve the ep. */
1045
1046 static void
1047 substitute_ep_register (rtx first_insn,
1048 rtx last_insn,
1049 int uses,
1050 int regno,
1051 rtx * p_r1,
1052 rtx * p_ep)
1053 {
1054 rtx reg = gen_rtx_REG (Pmode, regno);
1055 rtx insn;
1056
1057 if (!*p_r1)
1058 {
1059 df_set_regs_ever_live (1, true);
1060 *p_r1 = gen_rtx_REG (Pmode, 1);
1061 *p_ep = gen_rtx_REG (Pmode, 30);
1062 }
1063
1064 if (TARGET_DEBUG)
1065 fprintf (stderr, "\
1066 Saved %d bytes (%d uses of register %s) in function %s, starting as insn %d, ending at %d\n",
1067 2 * (uses - 3), uses, reg_names[regno],
1068 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)),
1069 INSN_UID (first_insn), INSN_UID (last_insn));
1070
1071 if (GET_CODE (first_insn) == NOTE)
1072 first_insn = next_nonnote_insn (first_insn);
1073
1074 last_insn = next_nonnote_insn (last_insn);
1075 for (insn = first_insn; insn && insn != last_insn; insn = NEXT_INSN (insn))
1076 {
1077 if (GET_CODE (insn) == INSN)
1078 {
1079 rtx pattern = single_set (insn);
1080
1081 /* Replace the memory references. */
1082 if (pattern)
1083 {
1084 rtx *p_mem;
1085 /* Memory operands are signed by default. */
1086 int unsignedp = FALSE;
1087
1088 if (GET_CODE (SET_DEST (pattern)) == MEM
1089 && GET_CODE (SET_SRC (pattern)) == MEM)
1090 p_mem = (rtx *)0;
1091
1092 else if (GET_CODE (SET_DEST (pattern)) == MEM)
1093 p_mem = &SET_DEST (pattern);
1094
1095 else if (GET_CODE (SET_SRC (pattern)) == MEM)
1096 p_mem = &SET_SRC (pattern);
1097
1098 else if (GET_CODE (SET_SRC (pattern)) == SIGN_EXTEND
1099 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1100 p_mem = &XEXP (SET_SRC (pattern), 0);
1101
1102 else if (GET_CODE (SET_SRC (pattern)) == ZERO_EXTEND
1103 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == MEM)
1104 {
1105 p_mem = &XEXP (SET_SRC (pattern), 0);
1106 unsignedp = TRUE;
1107 }
1108 else
1109 p_mem = (rtx *)0;
1110
1111 if (p_mem)
1112 {
1113 rtx addr = XEXP (*p_mem, 0);
1114
1115 if (GET_CODE (addr) == REG && REGNO (addr) == (unsigned) regno)
1116 *p_mem = change_address (*p_mem, VOIDmode, *p_ep);
1117
1118 else if (GET_CODE (addr) == PLUS
1119 && GET_CODE (XEXP (addr, 0)) == REG
1120 && REGNO (XEXP (addr, 0)) == (unsigned) regno
1121 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1122 && ((INTVAL (XEXP (addr, 1)))
1123 < ep_memory_offset (GET_MODE (*p_mem),
1124 unsignedp))
1125 && ((INTVAL (XEXP (addr, 1))) >= 0))
1126 *p_mem = change_address (*p_mem, VOIDmode,
1127 gen_rtx_PLUS (Pmode,
1128 *p_ep,
1129 XEXP (addr, 1)));
1130 }
1131 }
1132 }
1133 }
1134
1135 /* Optimize back to back cases of ep <- r1 & r1 <- ep. */
1136 insn = prev_nonnote_insn (first_insn);
1137 if (insn && GET_CODE (insn) == INSN
1138 && GET_CODE (PATTERN (insn)) == SET
1139 && SET_DEST (PATTERN (insn)) == *p_ep
1140 && SET_SRC (PATTERN (insn)) == *p_r1)
1141 delete_insn (insn);
1142 else
1143 emit_insn_before (gen_rtx_SET (Pmode, *p_r1, *p_ep), first_insn);
1144
1145 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, reg), first_insn);
1146 emit_insn_before (gen_rtx_SET (Pmode, *p_ep, *p_r1), last_insn);
1147 }
1148
1149
1150 /* TARGET_MACHINE_DEPENDENT_REORG. On the 850, we use it to implement
1151 the -mep mode to copy heavily used pointers to ep to use the implicit
1152 addressing. */
1153
1154 static void
1155 v850_reorg (void)
1156 {
1157 struct
1158 {
1159 int uses;
1160 rtx first_insn;
1161 rtx last_insn;
1162 }
1163 regs[FIRST_PSEUDO_REGISTER];
1164
1165 int i;
1166 int use_ep = FALSE;
1167 rtx r1 = NULL_RTX;
1168 rtx ep = NULL_RTX;
1169 rtx insn;
1170 rtx pattern;
1171
1172 /* If not ep mode, just return now. */
1173 if (!TARGET_EP)
1174 return;
1175
1176 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1177 {
1178 regs[i].uses = 0;
1179 regs[i].first_insn = NULL_RTX;
1180 regs[i].last_insn = NULL_RTX;
1181 }
1182
1183 for (insn = get_insns (); insn != NULL_RTX; insn = NEXT_INSN (insn))
1184 {
1185 switch (GET_CODE (insn))
1186 {
1187 /* End of basic block */
1188 default:
1189 if (!use_ep)
1190 {
1191 int max_uses = -1;
1192 int max_regno = -1;
1193
1194 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1195 {
1196 if (max_uses < regs[i].uses)
1197 {
1198 max_uses = regs[i].uses;
1199 max_regno = i;
1200 }
1201 }
1202
1203 if (max_uses > 3)
1204 substitute_ep_register (regs[max_regno].first_insn,
1205 regs[max_regno].last_insn,
1206 max_uses, max_regno, &r1, &ep);
1207 }
1208
1209 use_ep = FALSE;
1210 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1211 {
1212 regs[i].uses = 0;
1213 regs[i].first_insn = NULL_RTX;
1214 regs[i].last_insn = NULL_RTX;
1215 }
1216 break;
1217
1218 case NOTE:
1219 break;
1220
1221 case INSN:
1222 pattern = single_set (insn);
1223
1224 /* See if there are any memory references we can shorten */
1225 if (pattern)
1226 {
1227 rtx src = SET_SRC (pattern);
1228 rtx dest = SET_DEST (pattern);
1229 rtx mem;
1230 /* Memory operands are signed by default. */
1231 int unsignedp = FALSE;
1232
1233 /* We might have (SUBREG (MEM)) here, so just get rid of the
1234 subregs to make this code simpler. */
1235 if (GET_CODE (dest) == SUBREG
1236 && (GET_CODE (SUBREG_REG (dest)) == MEM
1237 || GET_CODE (SUBREG_REG (dest)) == REG))
1238 alter_subreg (&dest);
1239 if (GET_CODE (src) == SUBREG
1240 && (GET_CODE (SUBREG_REG (src)) == MEM
1241 || GET_CODE (SUBREG_REG (src)) == REG))
1242 alter_subreg (&src);
1243
1244 if (GET_CODE (dest) == MEM && GET_CODE (src) == MEM)
1245 mem = NULL_RTX;
1246
1247 else if (GET_CODE (dest) == MEM)
1248 mem = dest;
1249
1250 else if (GET_CODE (src) == MEM)
1251 mem = src;
1252
1253 else if (GET_CODE (src) == SIGN_EXTEND
1254 && GET_CODE (XEXP (src, 0)) == MEM)
1255 mem = XEXP (src, 0);
1256
1257 else if (GET_CODE (src) == ZERO_EXTEND
1258 && GET_CODE (XEXP (src, 0)) == MEM)
1259 {
1260 mem = XEXP (src, 0);
1261 unsignedp = TRUE;
1262 }
1263 else
1264 mem = NULL_RTX;
1265
1266 if (mem && ep_memory_operand (mem, GET_MODE (mem), unsignedp))
1267 use_ep = TRUE;
1268
1269 else if (!use_ep && mem
1270 && GET_MODE_SIZE (GET_MODE (mem)) <= UNITS_PER_WORD)
1271 {
1272 rtx addr = XEXP (mem, 0);
1273 int regno = -1;
1274 int short_p;
1275
1276 if (GET_CODE (addr) == REG)
1277 {
1278 short_p = TRUE;
1279 regno = REGNO (addr);
1280 }
1281
1282 else if (GET_CODE (addr) == PLUS
1283 && GET_CODE (XEXP (addr, 0)) == REG
1284 && GET_CODE (XEXP (addr, 1)) == CONST_INT
1285 && ((INTVAL (XEXP (addr, 1)))
1286 < ep_memory_offset (GET_MODE (mem), unsignedp))
1287 && ((INTVAL (XEXP (addr, 1))) >= 0))
1288 {
1289 short_p = TRUE;
1290 regno = REGNO (XEXP (addr, 0));
1291 }
1292
1293 else
1294 short_p = FALSE;
1295
1296 if (short_p)
1297 {
1298 regs[regno].uses++;
1299 regs[regno].last_insn = insn;
1300 if (!regs[regno].first_insn)
1301 regs[regno].first_insn = insn;
1302 }
1303 }
1304
1305 /* Loading up a register in the basic block zaps any savings
1306 for the register */
1307 if (GET_CODE (dest) == REG)
1308 {
1309 enum machine_mode mode = GET_MODE (dest);
1310 int regno;
1311 int endregno;
1312
1313 regno = REGNO (dest);
1314 endregno = regno + HARD_REGNO_NREGS (regno, mode);
1315
1316 if (!use_ep)
1317 {
1318 /* See if we can use the pointer before this
1319 modification. */
1320 int max_uses = -1;
1321 int max_regno = -1;
1322
1323 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1324 {
1325 if (max_uses < regs[i].uses)
1326 {
1327 max_uses = regs[i].uses;
1328 max_regno = i;
1329 }
1330 }
1331
1332 if (max_uses > 3
1333 && max_regno >= regno
1334 && max_regno < endregno)
1335 {
1336 substitute_ep_register (regs[max_regno].first_insn,
1337 regs[max_regno].last_insn,
1338 max_uses, max_regno, &r1,
1339 &ep);
1340
1341 /* Since we made a substitution, zap all remembered
1342 registers. */
1343 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1344 {
1345 regs[i].uses = 0;
1346 regs[i].first_insn = NULL_RTX;
1347 regs[i].last_insn = NULL_RTX;
1348 }
1349 }
1350 }
1351
1352 for (i = regno; i < endregno; i++)
1353 {
1354 regs[i].uses = 0;
1355 regs[i].first_insn = NULL_RTX;
1356 regs[i].last_insn = NULL_RTX;
1357 }
1358 }
1359 }
1360 }
1361 }
1362 }
1363
1364
1365 /* # of registers saved by the interrupt handler. */
1366 #define INTERRUPT_FIXED_NUM 4
1367
1368 /* # of bytes for registers saved by the interrupt handler. */
1369 #define INTERRUPT_FIXED_SAVE_SIZE (4 * INTERRUPT_FIXED_NUM)
1370
1371 /* # of registers saved in register parameter area. */
1372 #define INTERRUPT_REGPARM_NUM 4
1373 /* # of words saved for other registers. */
1374 #define INTERRUPT_ALL_SAVE_NUM \
1375 (30 - INTERRUPT_FIXED_NUM + INTERRUPT_REGPARM_NUM)
1376
1377 #define INTERRUPT_ALL_SAVE_SIZE (4 * INTERRUPT_ALL_SAVE_NUM)
1378
1379 int
1380 compute_register_save_size (long * p_reg_saved)
1381 {
1382 int size = 0;
1383 int i;
1384 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1385 int call_p = df_regs_ever_live_p (LINK_POINTER_REGNUM);
1386 long reg_saved = 0;
1387
1388 /* Count the return pointer if we need to save it. */
1389 if (crtl->profile && !call_p)
1390 {
1391 df_set_regs_ever_live (LINK_POINTER_REGNUM, true);
1392 call_p = 1;
1393 }
1394
1395 /* Count space for the register saves. */
1396 if (interrupt_handler)
1397 {
1398 for (i = 0; i <= 31; i++)
1399 switch (i)
1400 {
1401 default:
1402 if (df_regs_ever_live_p (i) || call_p)
1403 {
1404 size += 4;
1405 reg_saved |= 1L << i;
1406 }
1407 break;
1408
1409 /* We don't save/restore r0 or the stack pointer */
1410 case 0:
1411 case STACK_POINTER_REGNUM:
1412 break;
1413
1414 /* For registers with fixed use, we save them, set them to the
1415 appropriate value, and then restore them.
1416 These registers are handled specially, so don't list them
1417 on the list of registers to save in the prologue. */
1418 case 1: /* temp used to hold ep */
1419 case 4: /* gp */
1420 case 10: /* temp used to call interrupt save/restore */
1421 case EP_REGNUM: /* ep */
1422 size += 4;
1423 break;
1424 }
1425 }
1426 else
1427 {
1428 /* Find the first register that needs to be saved. */
1429 for (i = 0; i <= 31; i++)
1430 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1431 || i == LINK_POINTER_REGNUM))
1432 break;
1433
1434 /* If it is possible that an out-of-line helper function might be
1435 used to generate the prologue for the current function, then we
1436 need to cover the possibility that such a helper function will
1437 be used, despite the fact that there might be gaps in the list of
1438 registers that need to be saved. To detect this we note that the
1439 helper functions always push at least register r29 (provided
1440 that the function is not an interrupt handler). */
1441
1442 if (TARGET_PROLOG_FUNCTION
1443 && (i == 2 || ((i >= 20) && (i < 30))))
1444 {
1445 if (i == 2)
1446 {
1447 size += 4;
1448 reg_saved |= 1L << i;
1449
1450 i = 20;
1451 }
1452
1453 /* Helper functions save all registers between the starting
1454 register and the last register, regardless of whether they
1455 are actually used by the function or not. */
1456 for (; i <= 29; i++)
1457 {
1458 size += 4;
1459 reg_saved |= 1L << i;
1460 }
1461
1462 if (df_regs_ever_live_p (LINK_POINTER_REGNUM))
1463 {
1464 size += 4;
1465 reg_saved |= 1L << LINK_POINTER_REGNUM;
1466 }
1467 }
1468 else
1469 {
1470 for (; i <= 31; i++)
1471 if (df_regs_ever_live_p (i) && ((! call_used_regs[i])
1472 || i == LINK_POINTER_REGNUM))
1473 {
1474 size += 4;
1475 reg_saved |= 1L << i;
1476 }
1477 }
1478 }
1479
1480 if (p_reg_saved)
1481 *p_reg_saved = reg_saved;
1482
1483 return size;
1484 }
1485
1486 int
1487 compute_frame_size (int size, long * p_reg_saved)
1488 {
1489 return (size
1490 + compute_register_save_size (p_reg_saved)
1491 + crtl->outgoing_args_size);
1492 }
1493
1494
1495 void
1496 expand_prologue (void)
1497 {
1498 unsigned int i;
1499 int offset;
1500 unsigned int size = get_frame_size ();
1501 unsigned int actual_fsize;
1502 unsigned int init_stack_alloc = 0;
1503 rtx save_regs[32];
1504 rtx save_all;
1505 unsigned int num_save;
1506 unsigned int default_stack;
1507 int code;
1508 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1509 long reg_saved = 0;
1510
1511 actual_fsize = compute_frame_size (size, &reg_saved);
1512
1513 /* Save/setup global registers for interrupt functions right now. */
1514 if (interrupt_handler)
1515 {
1516 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
1517 emit_insn (gen_callt_save_interrupt ());
1518 else
1519 emit_insn (gen_save_interrupt ());
1520
1521 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1522
1523 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1524 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1525 }
1526
1527 /* Save arg registers to the stack if necessary. */
1528 else if (crtl->args.info.anonymous_args)
1529 {
1530 if (TARGET_PROLOG_FUNCTION && TARGET_V850E && !TARGET_DISABLE_CALLT)
1531 emit_insn (gen_save_r6_r9_v850e ());
1532 else if (TARGET_PROLOG_FUNCTION && ! TARGET_LONG_CALLS)
1533 emit_insn (gen_save_r6_r9 ());
1534 else
1535 {
1536 offset = 0;
1537 for (i = 6; i < 10; i++)
1538 {
1539 emit_move_insn (gen_rtx_MEM (SImode,
1540 plus_constant (stack_pointer_rtx,
1541 offset)),
1542 gen_rtx_REG (SImode, i));
1543 offset += 4;
1544 }
1545 }
1546 }
1547
1548 /* Identify all of the saved registers. */
1549 num_save = 0;
1550 default_stack = 0;
1551 for (i = 1; i < 31; i++)
1552 {
1553 if (((1L << i) & reg_saved) != 0)
1554 save_regs[num_save++] = gen_rtx_REG (Pmode, i);
1555 }
1556
1557 /* If the return pointer is saved, the helper functions also allocate
1558 16 bytes of stack for arguments to be saved in. */
1559 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1560 {
1561 save_regs[num_save++] = gen_rtx_REG (Pmode, LINK_POINTER_REGNUM);
1562 default_stack = 16;
1563 }
1564
1565 /* See if we have an insn that allocates stack space and saves the particular
1566 registers we want to. */
1567 save_all = NULL_RTX;
1568 if (TARGET_PROLOG_FUNCTION && num_save > 0 && actual_fsize >= default_stack)
1569 {
1570 int alloc_stack = (4 * num_save) + default_stack;
1571 int unalloc_stack = actual_fsize - alloc_stack;
1572 int save_func_len = 4;
1573 int save_normal_len;
1574
1575 if (unalloc_stack)
1576 save_func_len += CONST_OK_FOR_J (unalloc_stack) ? 2 : 4;
1577
1578 /* see if we would have used ep to save the stack */
1579 if (TARGET_EP && num_save > 3 && (unsigned)actual_fsize < 255)
1580 save_normal_len = (3 * 2) + (2 * num_save);
1581 else
1582 save_normal_len = 4 * num_save;
1583
1584 save_normal_len += CONST_OK_FOR_J (actual_fsize) ? 2 : 4;
1585
1586 /* Don't bother checking if we don't actually save any space.
1587 This happens for instance if one register is saved and additional
1588 stack space is allocated. */
1589 if (save_func_len < save_normal_len)
1590 {
1591 save_all = gen_rtx_PARALLEL
1592 (VOIDmode,
1593 rtvec_alloc (num_save + 1
1594 + (TARGET_V850 ? (TARGET_LONG_CALLS ? 2 : 1) : 0)));
1595
1596 XVECEXP (save_all, 0, 0)
1597 = gen_rtx_SET (VOIDmode,
1598 stack_pointer_rtx,
1599 plus_constant (stack_pointer_rtx, -alloc_stack));
1600
1601 offset = - default_stack;
1602 for (i = 0; i < num_save; i++)
1603 {
1604 XVECEXP (save_all, 0, i+1)
1605 = gen_rtx_SET (VOIDmode,
1606 gen_rtx_MEM (Pmode,
1607 plus_constant (stack_pointer_rtx,
1608 offset)),
1609 save_regs[i]);
1610 offset -= 4;
1611 }
1612
1613 if (TARGET_V850)
1614 {
1615 XVECEXP (save_all, 0, num_save + 1)
1616 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 10));
1617
1618 if (TARGET_LONG_CALLS)
1619 XVECEXP (save_all, 0, num_save + 2)
1620 = gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (Pmode, 11));
1621 }
1622
1623 code = recog (save_all, NULL_RTX, NULL);
1624 if (code >= 0)
1625 {
1626 rtx insn = emit_insn (save_all);
1627 INSN_CODE (insn) = code;
1628 actual_fsize -= alloc_stack;
1629
1630 if (TARGET_DEBUG)
1631 fprintf (stderr, "\
1632 Saved %d bytes via prologue function (%d vs. %d) for function %s\n",
1633 save_normal_len - save_func_len,
1634 save_normal_len, save_func_len,
1635 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)));
1636 }
1637 else
1638 save_all = NULL_RTX;
1639 }
1640 }
1641
1642 /* If no prolog save function is available, store the registers the old
1643 fashioned way (one by one). */
1644 if (!save_all)
1645 {
1646 /* Special case interrupt functions that save all registers for a call. */
1647 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1648 {
1649 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
1650 emit_insn (gen_callt_save_all_interrupt ());
1651 else
1652 emit_insn (gen_save_all_interrupt ());
1653 }
1654 else
1655 {
1656 /* If the stack is too big, allocate it in chunks so we can do the
1657 register saves. We use the register save size so we use the ep
1658 register. */
1659 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1660 init_stack_alloc = compute_register_save_size (NULL);
1661 else
1662 init_stack_alloc = actual_fsize;
1663
1664 /* Save registers at the beginning of the stack frame. */
1665 offset = init_stack_alloc - 4;
1666
1667 if (init_stack_alloc)
1668 emit_insn (gen_addsi3 (stack_pointer_rtx,
1669 stack_pointer_rtx,
1670 GEN_INT (- (signed) init_stack_alloc)));
1671
1672 /* Save the return pointer first. */
1673 if (num_save > 0 && REGNO (save_regs[num_save-1]) == LINK_POINTER_REGNUM)
1674 {
1675 emit_move_insn (gen_rtx_MEM (SImode,
1676 plus_constant (stack_pointer_rtx,
1677 offset)),
1678 save_regs[--num_save]);
1679 offset -= 4;
1680 }
1681
1682 for (i = 0; i < num_save; i++)
1683 {
1684 emit_move_insn (gen_rtx_MEM (SImode,
1685 plus_constant (stack_pointer_rtx,
1686 offset)),
1687 save_regs[i]);
1688 offset -= 4;
1689 }
1690 }
1691 }
1692
1693 /* Allocate the rest of the stack that was not allocated above (either it is
1694 > 32K or we just called a function to save the registers and needed more
1695 stack. */
1696 if (actual_fsize > init_stack_alloc)
1697 {
1698 int diff = actual_fsize - init_stack_alloc;
1699 if (CONST_OK_FOR_K (diff))
1700 emit_insn (gen_addsi3 (stack_pointer_rtx,
1701 stack_pointer_rtx,
1702 GEN_INT (-diff)));
1703 else
1704 {
1705 rtx reg = gen_rtx_REG (Pmode, 12);
1706 emit_move_insn (reg, GEN_INT (-diff));
1707 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, reg));
1708 }
1709 }
1710
1711 /* If we need a frame pointer, set it up now. */
1712 if (frame_pointer_needed)
1713 emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
1714 }
1715
1716
1717 void
1718 expand_epilogue (void)
1719 {
1720 unsigned int i;
1721 int offset;
1722 unsigned int size = get_frame_size ();
1723 long reg_saved = 0;
1724 int actual_fsize = compute_frame_size (size, &reg_saved);
1725 unsigned int init_stack_free = 0;
1726 rtx restore_regs[32];
1727 rtx restore_all;
1728 unsigned int num_restore;
1729 unsigned int default_stack;
1730 int code;
1731 int interrupt_handler = v850_interrupt_function_p (current_function_decl);
1732
1733 /* Eliminate the initial stack stored by interrupt functions. */
1734 if (interrupt_handler)
1735 {
1736 actual_fsize -= INTERRUPT_FIXED_SAVE_SIZE;
1737 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1738 actual_fsize -= INTERRUPT_ALL_SAVE_SIZE;
1739 }
1740
1741 /* Cut off any dynamic stack created. */
1742 if (frame_pointer_needed)
1743 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
1744
1745 /* Identify all of the saved registers. */
1746 num_restore = 0;
1747 default_stack = 0;
1748 for (i = 1; i < 31; i++)
1749 {
1750 if (((1L << i) & reg_saved) != 0)
1751 restore_regs[num_restore++] = gen_rtx_REG (Pmode, i);
1752 }
1753
1754 /* If the return pointer is saved, the helper functions also allocate
1755 16 bytes of stack for arguments to be saved in. */
1756 if (((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1757 {
1758 restore_regs[num_restore++] = gen_rtx_REG (Pmode, LINK_POINTER_REGNUM);
1759 default_stack = 16;
1760 }
1761
1762 /* See if we have an insn that restores the particular registers we
1763 want to. */
1764 restore_all = NULL_RTX;
1765
1766 if (TARGET_PROLOG_FUNCTION
1767 && num_restore > 0
1768 && actual_fsize >= (signed) default_stack
1769 && !interrupt_handler)
1770 {
1771 int alloc_stack = (4 * num_restore) + default_stack;
1772 int unalloc_stack = actual_fsize - alloc_stack;
1773 int restore_func_len = 4;
1774 int restore_normal_len;
1775
1776 if (unalloc_stack)
1777 restore_func_len += CONST_OK_FOR_J (unalloc_stack) ? 2 : 4;
1778
1779 /* See if we would have used ep to restore the registers. */
1780 if (TARGET_EP && num_restore > 3 && (unsigned)actual_fsize < 255)
1781 restore_normal_len = (3 * 2) + (2 * num_restore);
1782 else
1783 restore_normal_len = 4 * num_restore;
1784
1785 restore_normal_len += (CONST_OK_FOR_J (actual_fsize) ? 2 : 4) + 2;
1786
1787 /* Don't bother checking if we don't actually save any space. */
1788 if (restore_func_len < restore_normal_len)
1789 {
1790 restore_all = gen_rtx_PARALLEL (VOIDmode,
1791 rtvec_alloc (num_restore + 2));
1792 XVECEXP (restore_all, 0, 0) = gen_rtx_RETURN (VOIDmode);
1793 XVECEXP (restore_all, 0, 1)
1794 = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
1795 gen_rtx_PLUS (Pmode,
1796 stack_pointer_rtx,
1797 GEN_INT (alloc_stack)));
1798
1799 offset = alloc_stack - 4;
1800 for (i = 0; i < num_restore; i++)
1801 {
1802 XVECEXP (restore_all, 0, i+2)
1803 = gen_rtx_SET (VOIDmode,
1804 restore_regs[i],
1805 gen_rtx_MEM (Pmode,
1806 plus_constant (stack_pointer_rtx,
1807 offset)));
1808 offset -= 4;
1809 }
1810
1811 code = recog (restore_all, NULL_RTX, NULL);
1812
1813 if (code >= 0)
1814 {
1815 rtx insn;
1816
1817 actual_fsize -= alloc_stack;
1818 if (actual_fsize)
1819 {
1820 if (CONST_OK_FOR_K (actual_fsize))
1821 emit_insn (gen_addsi3 (stack_pointer_rtx,
1822 stack_pointer_rtx,
1823 GEN_INT (actual_fsize)));
1824 else
1825 {
1826 rtx reg = gen_rtx_REG (Pmode, 12);
1827 emit_move_insn (reg, GEN_INT (actual_fsize));
1828 emit_insn (gen_addsi3 (stack_pointer_rtx,
1829 stack_pointer_rtx,
1830 reg));
1831 }
1832 }
1833
1834 insn = emit_jump_insn (restore_all);
1835 INSN_CODE (insn) = code;
1836
1837 if (TARGET_DEBUG)
1838 fprintf (stderr, "\
1839 Saved %d bytes via epilogue function (%d vs. %d) in function %s\n",
1840 restore_normal_len - restore_func_len,
1841 restore_normal_len, restore_func_len,
1842 IDENTIFIER_POINTER (DECL_NAME (current_function_decl)));
1843 }
1844 else
1845 restore_all = NULL_RTX;
1846 }
1847 }
1848
1849 /* If no epilogue save function is available, restore the registers the
1850 old fashioned way (one by one). */
1851 if (!restore_all)
1852 {
1853 /* If the stack is large, we need to cut it down in 2 pieces. */
1854 if (actual_fsize && !CONST_OK_FOR_K (-actual_fsize))
1855 init_stack_free = 4 * num_restore;
1856 else
1857 init_stack_free = (signed) actual_fsize;
1858
1859 /* Deallocate the rest of the stack if it is > 32K. */
1860 if (actual_fsize > init_stack_free)
1861 {
1862 int diff;
1863
1864 diff = actual_fsize - ((interrupt_handler) ? 0 : init_stack_free);
1865
1866 if (CONST_OK_FOR_K (diff))
1867 emit_insn (gen_addsi3 (stack_pointer_rtx,
1868 stack_pointer_rtx,
1869 GEN_INT (diff)));
1870 else
1871 {
1872 rtx reg = gen_rtx_REG (Pmode, 12);
1873 emit_move_insn (reg, GEN_INT (diff));
1874 emit_insn (gen_addsi3 (stack_pointer_rtx,
1875 stack_pointer_rtx,
1876 reg));
1877 }
1878 }
1879
1880 /* Special case interrupt functions that save all registers
1881 for a call. */
1882 if (interrupt_handler && ((1L << LINK_POINTER_REGNUM) & reg_saved) != 0)
1883 {
1884 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
1885 emit_insn (gen_callt_restore_all_interrupt ());
1886 else
1887 emit_insn (gen_restore_all_interrupt ());
1888 }
1889 else
1890 {
1891 /* Restore registers from the beginning of the stack frame. */
1892 offset = init_stack_free - 4;
1893
1894 /* Restore the return pointer first. */
1895 if (num_restore > 0
1896 && REGNO (restore_regs [num_restore - 1]) == LINK_POINTER_REGNUM)
1897 {
1898 emit_move_insn (restore_regs[--num_restore],
1899 gen_rtx_MEM (SImode,
1900 plus_constant (stack_pointer_rtx,
1901 offset)));
1902 offset -= 4;
1903 }
1904
1905 for (i = 0; i < num_restore; i++)
1906 {
1907 emit_move_insn (restore_regs[i],
1908 gen_rtx_MEM (SImode,
1909 plus_constant (stack_pointer_rtx,
1910 offset)));
1911
1912 emit_use (restore_regs[i]);
1913 offset -= 4;
1914 }
1915
1916 /* Cut back the remainder of the stack. */
1917 if (init_stack_free)
1918 emit_insn (gen_addsi3 (stack_pointer_rtx,
1919 stack_pointer_rtx,
1920 GEN_INT (init_stack_free)));
1921 }
1922
1923 /* And return or use reti for interrupt handlers. */
1924 if (interrupt_handler)
1925 {
1926 if (TARGET_V850E && ! TARGET_DISABLE_CALLT)
1927 emit_insn (gen_callt_return_interrupt ());
1928 else
1929 emit_jump_insn (gen_return_interrupt ());
1930 }
1931 else if (actual_fsize)
1932 emit_jump_insn (gen_return_internal ());
1933 else
1934 emit_jump_insn (gen_return ());
1935 }
1936
1937 v850_interrupt_cache_p = FALSE;
1938 v850_interrupt_p = FALSE;
1939 }
1940
1941
1942 /* Update the condition code from the insn. */
1943
1944 void
1945 notice_update_cc (rtx body, rtx insn)
1946 {
1947 switch (get_attr_cc (insn))
1948 {
1949 case CC_NONE:
1950 /* Insn does not affect CC at all. */
1951 break;
1952
1953 case CC_NONE_0HIT:
1954 /* Insn does not change CC, but the 0'th operand has been changed. */
1955 if (cc_status.value1 != 0
1956 && reg_overlap_mentioned_p (recog_data.operand[0], cc_status.value1))
1957 cc_status.value1 = 0;
1958 break;
1959
1960 case CC_SET_ZN:
1961 /* Insn sets the Z,N flags of CC to recog_data.operand[0].
1962 V,C is in an unusable state. */
1963 CC_STATUS_INIT;
1964 cc_status.flags |= CC_OVERFLOW_UNUSABLE | CC_NO_CARRY;
1965 cc_status.value1 = recog_data.operand[0];
1966 break;
1967
1968 case CC_SET_ZNV:
1969 /* Insn sets the Z,N,V flags of CC to recog_data.operand[0].
1970 C is in an unusable state. */
1971 CC_STATUS_INIT;
1972 cc_status.flags |= CC_NO_CARRY;
1973 cc_status.value1 = recog_data.operand[0];
1974 break;
1975
1976 case CC_COMPARE:
1977 /* The insn is a compare instruction. */
1978 CC_STATUS_INIT;
1979 cc_status.value1 = SET_SRC (body);
1980 break;
1981
1982 case CC_CLOBBER:
1983 /* Insn doesn't leave CC in a usable state. */
1984 CC_STATUS_INIT;
1985 break;
1986 }
1987 }
1988
1989 /* Retrieve the data area that has been chosen for the given decl. */
1990
1991 v850_data_area
1992 v850_get_data_area (tree decl)
1993 {
1994 if (lookup_attribute ("sda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
1995 return DATA_AREA_SDA;
1996
1997 if (lookup_attribute ("tda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
1998 return DATA_AREA_TDA;
1999
2000 if (lookup_attribute ("zda", DECL_ATTRIBUTES (decl)) != NULL_TREE)
2001 return DATA_AREA_ZDA;
2002
2003 return DATA_AREA_NORMAL;
2004 }
2005
2006 /* Store the indicated data area in the decl's attributes. */
2007
2008 static void
2009 v850_set_data_area (tree decl, v850_data_area data_area)
2010 {
2011 tree name;
2012
2013 switch (data_area)
2014 {
2015 case DATA_AREA_SDA: name = get_identifier ("sda"); break;
2016 case DATA_AREA_TDA: name = get_identifier ("tda"); break;
2017 case DATA_AREA_ZDA: name = get_identifier ("zda"); break;
2018 default:
2019 return;
2020 }
2021
2022 DECL_ATTRIBUTES (decl) = tree_cons
2023 (name, NULL, DECL_ATTRIBUTES (decl));
2024 }
2025
2026 const struct attribute_spec v850_attribute_table[] =
2027 {
2028 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
2029 { "interrupt_handler", 0, 0, true, false, false, v850_handle_interrupt_attribute },
2030 { "interrupt", 0, 0, true, false, false, v850_handle_interrupt_attribute },
2031 { "sda", 0, 0, true, false, false, v850_handle_data_area_attribute },
2032 { "tda", 0, 0, true, false, false, v850_handle_data_area_attribute },
2033 { "zda", 0, 0, true, false, false, v850_handle_data_area_attribute },
2034 { NULL, 0, 0, false, false, false, NULL }
2035 };
2036
2037 /* Handle an "interrupt" attribute; arguments as in
2038 struct attribute_spec.handler. */
2039 static tree
2040 v850_handle_interrupt_attribute (tree * node,
2041 tree name,
2042 tree args ATTRIBUTE_UNUSED,
2043 int flags ATTRIBUTE_UNUSED,
2044 bool * no_add_attrs)
2045 {
2046 if (TREE_CODE (*node) != FUNCTION_DECL)
2047 {
2048 warning (OPT_Wattributes, "%qs attribute only applies to functions",
2049 IDENTIFIER_POINTER (name));
2050 *no_add_attrs = true;
2051 }
2052
2053 return NULL_TREE;
2054 }
2055
2056 /* Handle a "sda", "tda" or "zda" attribute; arguments as in
2057 struct attribute_spec.handler. */
2058 static tree
2059 v850_handle_data_area_attribute (tree* node,
2060 tree name,
2061 tree args ATTRIBUTE_UNUSED,
2062 int flags ATTRIBUTE_UNUSED,
2063 bool * no_add_attrs)
2064 {
2065 v850_data_area data_area;
2066 v850_data_area area;
2067 tree decl = *node;
2068
2069 /* Implement data area attribute. */
2070 if (is_attribute_p ("sda", name))
2071 data_area = DATA_AREA_SDA;
2072 else if (is_attribute_p ("tda", name))
2073 data_area = DATA_AREA_TDA;
2074 else if (is_attribute_p ("zda", name))
2075 data_area = DATA_AREA_ZDA;
2076 else
2077 gcc_unreachable ();
2078
2079 switch (TREE_CODE (decl))
2080 {
2081 case VAR_DECL:
2082 if (current_function_decl != NULL_TREE)
2083 {
2084 error ("%Jdata area attributes cannot be specified for "
2085 "local variables", decl);
2086 *no_add_attrs = true;
2087 }
2088
2089 /* Drop through. */
2090
2091 case FUNCTION_DECL:
2092 area = v850_get_data_area (decl);
2093 if (area != DATA_AREA_NORMAL && data_area != area)
2094 {
2095 error ("data area of %q+D conflicts with previous declaration",
2096 decl);
2097 *no_add_attrs = true;
2098 }
2099 break;
2100
2101 default:
2102 break;
2103 }
2104
2105 return NULL_TREE;
2106 }
2107
2108
2109 /* Return nonzero if FUNC is an interrupt function as specified
2110 by the "interrupt" attribute. */
2111
2112 int
2113 v850_interrupt_function_p (tree func)
2114 {
2115 tree a;
2116 int ret = 0;
2117
2118 if (v850_interrupt_cache_p)
2119 return v850_interrupt_p;
2120
2121 if (TREE_CODE (func) != FUNCTION_DECL)
2122 return 0;
2123
2124 a = lookup_attribute ("interrupt_handler", DECL_ATTRIBUTES (func));
2125 if (a != NULL_TREE)
2126 ret = 1;
2127
2128 else
2129 {
2130 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
2131 ret = a != NULL_TREE;
2132 }
2133
2134 /* Its not safe to trust global variables until after function inlining has
2135 been done. */
2136 if (reload_completed | reload_in_progress)
2137 v850_interrupt_p = ret;
2138
2139 return ret;
2140 }
2141
2142
2143 static void
2144 v850_encode_data_area (tree decl, rtx symbol)
2145 {
2146 int flags;
2147
2148 /* Map explicit sections into the appropriate attribute */
2149 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2150 {
2151 if (DECL_SECTION_NAME (decl))
2152 {
2153 const char *name = TREE_STRING_POINTER (DECL_SECTION_NAME (decl));
2154
2155 if (streq (name, ".zdata") || streq (name, ".zbss"))
2156 v850_set_data_area (decl, DATA_AREA_ZDA);
2157
2158 else if (streq (name, ".sdata") || streq (name, ".sbss"))
2159 v850_set_data_area (decl, DATA_AREA_SDA);
2160
2161 else if (streq (name, ".tdata"))
2162 v850_set_data_area (decl, DATA_AREA_TDA);
2163 }
2164
2165 /* If no attribute, support -m{zda,sda,tda}=n */
2166 else
2167 {
2168 int size = int_size_in_bytes (TREE_TYPE (decl));
2169 if (size <= 0)
2170 ;
2171
2172 else if (size <= small_memory [(int) SMALL_MEMORY_TDA].max)
2173 v850_set_data_area (decl, DATA_AREA_TDA);
2174
2175 else if (size <= small_memory [(int) SMALL_MEMORY_SDA].max)
2176 v850_set_data_area (decl, DATA_AREA_SDA);
2177
2178 else if (size <= small_memory [(int) SMALL_MEMORY_ZDA].max)
2179 v850_set_data_area (decl, DATA_AREA_ZDA);
2180 }
2181
2182 if (v850_get_data_area (decl) == DATA_AREA_NORMAL)
2183 return;
2184 }
2185
2186 flags = SYMBOL_REF_FLAGS (symbol);
2187 switch (v850_get_data_area (decl))
2188 {
2189 case DATA_AREA_ZDA: flags |= SYMBOL_FLAG_ZDA; break;
2190 case DATA_AREA_TDA: flags |= SYMBOL_FLAG_TDA; break;
2191 case DATA_AREA_SDA: flags |= SYMBOL_FLAG_SDA; break;
2192 default: gcc_unreachable ();
2193 }
2194 SYMBOL_REF_FLAGS (symbol) = flags;
2195 }
2196
2197 static void
2198 v850_encode_section_info (tree decl, rtx rtl, int first)
2199 {
2200 default_encode_section_info (decl, rtl, first);
2201
2202 if (TREE_CODE (decl) == VAR_DECL
2203 && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)))
2204 v850_encode_data_area (decl, XEXP (rtl, 0));
2205 }
2206
2207 /* Construct a JR instruction to a routine that will perform the equivalent of
2208 the RTL passed in as an argument. This RTL is a function epilogue that
2209 pops registers off the stack and possibly releases some extra stack space
2210 as well. The code has already verified that the RTL matches these
2211 requirements. */
2212 char *
2213 construct_restore_jr (rtx op)
2214 {
2215 int count = XVECLEN (op, 0);
2216 int stack_bytes;
2217 unsigned long int mask;
2218 unsigned long int first;
2219 unsigned long int last;
2220 int i;
2221 static char buff [100]; /* XXX */
2222
2223 if (count <= 2)
2224 {
2225 error ("bogus JR construction: %d", count);
2226 return NULL;
2227 }
2228
2229 /* Work out how many bytes to pop off the stack before retrieving
2230 registers. */
2231 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2232 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2233 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2234
2235 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2236
2237 /* Each pop will remove 4 bytes from the stack.... */
2238 stack_bytes -= (count - 2) * 4;
2239
2240 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2241 if (stack_bytes != 0 && stack_bytes != 16)
2242 {
2243 error ("bad amount of stack space removal: %d", stack_bytes);
2244 return NULL;
2245 }
2246
2247 /* Now compute the bit mask of registers to push. */
2248 mask = 0;
2249 for (i = 2; i < count; i++)
2250 {
2251 rtx vector_element = XVECEXP (op, 0, i);
2252
2253 gcc_assert (GET_CODE (vector_element) == SET);
2254 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2255 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2256 SImode));
2257
2258 mask |= 1 << REGNO (SET_DEST (vector_element));
2259 }
2260
2261 /* Scan for the first register to pop. */
2262 for (first = 0; first < 32; first++)
2263 {
2264 if (mask & (1 << first))
2265 break;
2266 }
2267
2268 gcc_assert (first < 32);
2269
2270 /* Discover the last register to pop. */
2271 if (mask & (1 << LINK_POINTER_REGNUM))
2272 {
2273 gcc_assert (stack_bytes == 16);
2274
2275 last = LINK_POINTER_REGNUM;
2276 }
2277 else
2278 {
2279 gcc_assert (!stack_bytes);
2280 gcc_assert (mask & (1 << 29));
2281
2282 last = 29;
2283 }
2284
2285 /* Note, it is possible to have gaps in the register mask.
2286 We ignore this here, and generate a JR anyway. We will
2287 be popping more registers than is strictly necessary, but
2288 it does save code space. */
2289
2290 if (TARGET_LONG_CALLS)
2291 {
2292 char name[40];
2293
2294 if (first == last)
2295 sprintf (name, "__return_%s", reg_names [first]);
2296 else
2297 sprintf (name, "__return_%s_%s", reg_names [first], reg_names [last]);
2298
2299 sprintf (buff, "movhi hi(%s), r0, r6\n\tmovea lo(%s), r6, r6\n\tjmp r6",
2300 name, name);
2301 }
2302 else
2303 {
2304 if (first == last)
2305 sprintf (buff, "jr __return_%s", reg_names [first]);
2306 else
2307 sprintf (buff, "jr __return_%s_%s", reg_names [first], reg_names [last]);
2308 }
2309
2310 return buff;
2311 }
2312
2313
2314 /* Construct a JARL instruction to a routine that will perform the equivalent
2315 of the RTL passed as a parameter. This RTL is a function prologue that
2316 saves some of the registers r20 - r31 onto the stack, and possibly acquires
2317 some stack space as well. The code has already verified that the RTL
2318 matches these requirements. */
2319 char *
2320 construct_save_jarl (rtx op)
2321 {
2322 int count = XVECLEN (op, 0);
2323 int stack_bytes;
2324 unsigned long int mask;
2325 unsigned long int first;
2326 unsigned long int last;
2327 int i;
2328 static char buff [100]; /* XXX */
2329
2330 if (count <= 2)
2331 {
2332 error ("bogus JARL construction: %d\n", count);
2333 return NULL;
2334 }
2335
2336 /* Paranoia. */
2337 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2338 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2339 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 0)) == REG);
2340 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2341
2342 /* Work out how many bytes to push onto the stack after storing the
2343 registers. */
2344 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2345
2346 /* Each push will put 4 bytes from the stack.... */
2347 stack_bytes += (count - (TARGET_LONG_CALLS ? 3 : 2)) * 4;
2348
2349 /* Make sure that the amount we are popping either 0 or 16 bytes. */
2350 if (stack_bytes != 0 && stack_bytes != -16)
2351 {
2352 error ("bad amount of stack space removal: %d", stack_bytes);
2353 return NULL;
2354 }
2355
2356 /* Now compute the bit mask of registers to push. */
2357 mask = 0;
2358 for (i = 1; i < count - (TARGET_LONG_CALLS ? 2 : 1); i++)
2359 {
2360 rtx vector_element = XVECEXP (op, 0, i);
2361
2362 gcc_assert (GET_CODE (vector_element) == SET);
2363 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2364 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2365 SImode));
2366
2367 mask |= 1 << REGNO (SET_SRC (vector_element));
2368 }
2369
2370 /* Scan for the first register to push. */
2371 for (first = 0; first < 32; first++)
2372 {
2373 if (mask & (1 << first))
2374 break;
2375 }
2376
2377 gcc_assert (first < 32);
2378
2379 /* Discover the last register to push. */
2380 if (mask & (1 << LINK_POINTER_REGNUM))
2381 {
2382 gcc_assert (stack_bytes == -16);
2383
2384 last = LINK_POINTER_REGNUM;
2385 }
2386 else
2387 {
2388 gcc_assert (!stack_bytes);
2389 gcc_assert (mask & (1 << 29));
2390
2391 last = 29;
2392 }
2393
2394 /* Note, it is possible to have gaps in the register mask.
2395 We ignore this here, and generate a JARL anyway. We will
2396 be pushing more registers than is strictly necessary, but
2397 it does save code space. */
2398
2399 if (TARGET_LONG_CALLS)
2400 {
2401 char name[40];
2402
2403 if (first == last)
2404 sprintf (name, "__save_%s", reg_names [first]);
2405 else
2406 sprintf (name, "__save_%s_%s", reg_names [first], reg_names [last]);
2407
2408 sprintf (buff, "movhi hi(%s), r0, r11\n\tmovea lo(%s), r11, r11\n\tjarl .+4, r10\n\tadd 4, r10\n\tjmp r11",
2409 name, name);
2410 }
2411 else
2412 {
2413 if (first == last)
2414 sprintf (buff, "jarl __save_%s, r10", reg_names [first]);
2415 else
2416 sprintf (buff, "jarl __save_%s_%s, r10", reg_names [first],
2417 reg_names [last]);
2418 }
2419
2420 return buff;
2421 }
2422
2423 extern tree last_assemble_variable_decl;
2424 extern int size_directive_output;
2425
2426 /* A version of asm_output_aligned_bss() that copes with the special
2427 data areas of the v850. */
2428 void
2429 v850_output_aligned_bss (FILE * file,
2430 tree decl,
2431 const char * name,
2432 unsigned HOST_WIDE_INT size,
2433 int align)
2434 {
2435 switch (v850_get_data_area (decl))
2436 {
2437 case DATA_AREA_ZDA:
2438 switch_to_section (zbss_section);
2439 break;
2440
2441 case DATA_AREA_SDA:
2442 switch_to_section (sbss_section);
2443 break;
2444
2445 case DATA_AREA_TDA:
2446 switch_to_section (tdata_section);
2447
2448 default:
2449 switch_to_section (bss_section);
2450 break;
2451 }
2452
2453 ASM_OUTPUT_ALIGN (file, floor_log2 (align / BITS_PER_UNIT));
2454 #ifdef ASM_DECLARE_OBJECT_NAME
2455 last_assemble_variable_decl = decl;
2456 ASM_DECLARE_OBJECT_NAME (file, name, decl);
2457 #else
2458 /* Standard thing is just output label for the object. */
2459 ASM_OUTPUT_LABEL (file, name);
2460 #endif /* ASM_DECLARE_OBJECT_NAME */
2461 ASM_OUTPUT_SKIP (file, size ? size : 1);
2462 }
2463
2464 /* Called via the macro ASM_OUTPUT_DECL_COMMON */
2465 void
2466 v850_output_common (FILE * file,
2467 tree decl,
2468 const char * name,
2469 int size,
2470 int align)
2471 {
2472 if (decl == NULL_TREE)
2473 {
2474 fprintf (file, "%s", COMMON_ASM_OP);
2475 }
2476 else
2477 {
2478 switch (v850_get_data_area (decl))
2479 {
2480 case DATA_AREA_ZDA:
2481 fprintf (file, "%s", ZCOMMON_ASM_OP);
2482 break;
2483
2484 case DATA_AREA_SDA:
2485 fprintf (file, "%s", SCOMMON_ASM_OP);
2486 break;
2487
2488 case DATA_AREA_TDA:
2489 fprintf (file, "%s", TCOMMON_ASM_OP);
2490 break;
2491
2492 default:
2493 fprintf (file, "%s", COMMON_ASM_OP);
2494 break;
2495 }
2496 }
2497
2498 assemble_name (file, name);
2499 fprintf (file, ",%u,%u\n", size, align / BITS_PER_UNIT);
2500 }
2501
2502 /* Called via the macro ASM_OUTPUT_DECL_LOCAL */
2503 void
2504 v850_output_local (FILE * file,
2505 tree decl,
2506 const char * name,
2507 int size,
2508 int align)
2509 {
2510 fprintf (file, "%s", LOCAL_ASM_OP);
2511 assemble_name (file, name);
2512 fprintf (file, "\n");
2513
2514 ASM_OUTPUT_ALIGNED_DECL_COMMON (file, decl, name, size, align);
2515 }
2516
2517 /* Add data area to the given declaration if a ghs data area pragma is
2518 currently in effect (#pragma ghs startXXX/endXXX). */
2519 static void
2520 v850_insert_attributes (tree decl, tree * attr_ptr ATTRIBUTE_UNUSED )
2521 {
2522 if (data_area_stack
2523 && data_area_stack->data_area
2524 && current_function_decl == NULL_TREE
2525 && (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == CONST_DECL)
2526 && v850_get_data_area (decl) == DATA_AREA_NORMAL)
2527 v850_set_data_area (decl, data_area_stack->data_area);
2528
2529 /* Initialize the default names of the v850 specific sections,
2530 if this has not been done before. */
2531
2532 if (GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA] == NULL)
2533 {
2534 GHS_default_section_names [(int) GHS_SECTION_KIND_SDATA]
2535 = build_string (sizeof (".sdata")-1, ".sdata");
2536
2537 GHS_default_section_names [(int) GHS_SECTION_KIND_ROSDATA]
2538 = build_string (sizeof (".rosdata")-1, ".rosdata");
2539
2540 GHS_default_section_names [(int) GHS_SECTION_KIND_TDATA]
2541 = build_string (sizeof (".tdata")-1, ".tdata");
2542
2543 GHS_default_section_names [(int) GHS_SECTION_KIND_ZDATA]
2544 = build_string (sizeof (".zdata")-1, ".zdata");
2545
2546 GHS_default_section_names [(int) GHS_SECTION_KIND_ROZDATA]
2547 = build_string (sizeof (".rozdata")-1, ".rozdata");
2548 }
2549
2550 if (current_function_decl == NULL_TREE
2551 && (TREE_CODE (decl) == VAR_DECL
2552 || TREE_CODE (decl) == CONST_DECL
2553 || TREE_CODE (decl) == FUNCTION_DECL)
2554 && (!DECL_EXTERNAL (decl) || DECL_INITIAL (decl))
2555 && !DECL_SECTION_NAME (decl))
2556 {
2557 enum GHS_section_kind kind = GHS_SECTION_KIND_DEFAULT;
2558 tree chosen_section;
2559
2560 if (TREE_CODE (decl) == FUNCTION_DECL)
2561 kind = GHS_SECTION_KIND_TEXT;
2562 else
2563 {
2564 /* First choose a section kind based on the data area of the decl. */
2565 switch (v850_get_data_area (decl))
2566 {
2567 default:
2568 gcc_unreachable ();
2569
2570 case DATA_AREA_SDA:
2571 kind = ((TREE_READONLY (decl))
2572 ? GHS_SECTION_KIND_ROSDATA
2573 : GHS_SECTION_KIND_SDATA);
2574 break;
2575
2576 case DATA_AREA_TDA:
2577 kind = GHS_SECTION_KIND_TDATA;
2578 break;
2579
2580 case DATA_AREA_ZDA:
2581 kind = ((TREE_READONLY (decl))
2582 ? GHS_SECTION_KIND_ROZDATA
2583 : GHS_SECTION_KIND_ZDATA);
2584 break;
2585
2586 case DATA_AREA_NORMAL: /* default data area */
2587 if (TREE_READONLY (decl))
2588 kind = GHS_SECTION_KIND_RODATA;
2589 else if (DECL_INITIAL (decl))
2590 kind = GHS_SECTION_KIND_DATA;
2591 else
2592 kind = GHS_SECTION_KIND_BSS;
2593 }
2594 }
2595
2596 /* Now, if the section kind has been explicitly renamed,
2597 then attach a section attribute. */
2598 chosen_section = GHS_current_section_names [(int) kind];
2599
2600 /* Otherwise, if this kind of section needs an explicit section
2601 attribute, then also attach one. */
2602 if (chosen_section == NULL)
2603 chosen_section = GHS_default_section_names [(int) kind];
2604
2605 if (chosen_section)
2606 {
2607 /* Only set the section name if specified by a pragma, because
2608 otherwise it will force those variables to get allocated storage
2609 in this module, rather than by the linker. */
2610 DECL_SECTION_NAME (decl) = chosen_section;
2611 }
2612 }
2613 }
2614
2615 /* Construct a DISPOSE instruction that is the equivalent of
2616 the given RTX. We have already verified that this should
2617 be possible. */
2618
2619 char *
2620 construct_dispose_instruction (rtx op)
2621 {
2622 int count = XVECLEN (op, 0);
2623 int stack_bytes;
2624 unsigned long int mask;
2625 int i;
2626 static char buff[ 100 ]; /* XXX */
2627 int use_callt = 0;
2628
2629 if (count <= 2)
2630 {
2631 error ("bogus DISPOSE construction: %d", count);
2632 return NULL;
2633 }
2634
2635 /* Work out how many bytes to pop off the
2636 stack before retrieving registers. */
2637 gcc_assert (GET_CODE (XVECEXP (op, 0, 1)) == SET);
2638 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 1))) == PLUS);
2639 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1)) == CONST_INT);
2640
2641 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 1)), 1));
2642
2643 /* Each pop will remove 4 bytes from the stack.... */
2644 stack_bytes -= (count - 2) * 4;
2645
2646 /* Make sure that the amount we are popping
2647 will fit into the DISPOSE instruction. */
2648 if (stack_bytes > 128)
2649 {
2650 error ("too much stack space to dispose of: %d", stack_bytes);
2651 return NULL;
2652 }
2653
2654 /* Now compute the bit mask of registers to push. */
2655 mask = 0;
2656
2657 for (i = 2; i < count; i++)
2658 {
2659 rtx vector_element = XVECEXP (op, 0, i);
2660
2661 gcc_assert (GET_CODE (vector_element) == SET);
2662 gcc_assert (GET_CODE (SET_DEST (vector_element)) == REG);
2663 gcc_assert (register_is_ok_for_epilogue (SET_DEST (vector_element),
2664 SImode));
2665
2666 if (REGNO (SET_DEST (vector_element)) == 2)
2667 use_callt = 1;
2668 else
2669 mask |= 1 << REGNO (SET_DEST (vector_element));
2670 }
2671
2672 if (! TARGET_DISABLE_CALLT
2673 && (use_callt || stack_bytes == 0 || stack_bytes == 16))
2674 {
2675 if (use_callt)
2676 {
2677 sprintf (buff, "callt ctoff(__callt_return_r2_r%d)", (mask & (1 << 31)) ? 31 : 29);
2678 return buff;
2679 }
2680 else
2681 {
2682 for (i = 20; i < 32; i++)
2683 if (mask & (1 << i))
2684 break;
2685
2686 if (i == 31)
2687 sprintf (buff, "callt ctoff(__callt_return_r31c)");
2688 else
2689 sprintf (buff, "callt ctoff(__callt_return_r%d_r%d%s)",
2690 i, (mask & (1 << 31)) ? 31 : 29, stack_bytes ? "c" : "");
2691 }
2692 }
2693 else
2694 {
2695 static char regs [100]; /* XXX */
2696 int done_one;
2697
2698 /* Generate the DISPOSE instruction. Note we could just issue the
2699 bit mask as a number as the assembler can cope with this, but for
2700 the sake of our readers we turn it into a textual description. */
2701 regs[0] = 0;
2702 done_one = 0;
2703
2704 for (i = 20; i < 32; i++)
2705 {
2706 if (mask & (1 << i))
2707 {
2708 int first;
2709
2710 if (done_one)
2711 strcat (regs, ", ");
2712 else
2713 done_one = 1;
2714
2715 first = i;
2716 strcat (regs, reg_names[ first ]);
2717
2718 for (i++; i < 32; i++)
2719 if ((mask & (1 << i)) == 0)
2720 break;
2721
2722 if (i > first + 1)
2723 {
2724 strcat (regs, " - ");
2725 strcat (regs, reg_names[ i - 1 ] );
2726 }
2727 }
2728 }
2729
2730 sprintf (buff, "dispose %d {%s}, r31", stack_bytes / 4, regs);
2731 }
2732
2733 return buff;
2734 }
2735
2736 /* Construct a PREPARE instruction that is the equivalent of
2737 the given RTL. We have already verified that this should
2738 be possible. */
2739
2740 char *
2741 construct_prepare_instruction (rtx op)
2742 {
2743 int count = XVECLEN (op, 0);
2744 int stack_bytes;
2745 unsigned long int mask;
2746 int i;
2747 static char buff[ 100 ]; /* XXX */
2748 int use_callt = 0;
2749
2750 if (count <= 1)
2751 {
2752 error ("bogus PREPEARE construction: %d", count);
2753 return NULL;
2754 }
2755
2756 /* Work out how many bytes to push onto
2757 the stack after storing the registers. */
2758 gcc_assert (GET_CODE (XVECEXP (op, 0, 0)) == SET);
2759 gcc_assert (GET_CODE (SET_SRC (XVECEXP (op, 0, 0))) == PLUS);
2760 gcc_assert (GET_CODE (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1)) == CONST_INT);
2761
2762 stack_bytes = INTVAL (XEXP (SET_SRC (XVECEXP (op, 0, 0)), 1));
2763
2764 /* Each push will put 4 bytes from the stack. */
2765 stack_bytes += (count - 1) * 4;
2766
2767 /* Make sure that the amount we are popping
2768 will fit into the DISPOSE instruction. */
2769 if (stack_bytes < -128)
2770 {
2771 error ("too much stack space to prepare: %d", stack_bytes);
2772 return NULL;
2773 }
2774
2775 /* Now compute the bit mask of registers to push. */
2776 mask = 0;
2777 for (i = 1; i < count; i++)
2778 {
2779 rtx vector_element = XVECEXP (op, 0, i);
2780
2781 gcc_assert (GET_CODE (vector_element) == SET);
2782 gcc_assert (GET_CODE (SET_SRC (vector_element)) == REG);
2783 gcc_assert (register_is_ok_for_epilogue (SET_SRC (vector_element),
2784 SImode));
2785
2786 if (REGNO (SET_SRC (vector_element)) == 2)
2787 use_callt = 1;
2788 else
2789 mask |= 1 << REGNO (SET_SRC (vector_element));
2790 }
2791
2792 if ((! TARGET_DISABLE_CALLT)
2793 && (use_callt || stack_bytes == 0 || stack_bytes == -16))
2794 {
2795 if (use_callt)
2796 {
2797 sprintf (buff, "callt ctoff(__callt_save_r2_r%d)", (mask & (1 << 31)) ? 31 : 29 );
2798 return buff;
2799 }
2800
2801 for (i = 20; i < 32; i++)
2802 if (mask & (1 << i))
2803 break;
2804
2805 if (i == 31)
2806 sprintf (buff, "callt ctoff(__callt_save_r31c)");
2807 else
2808 sprintf (buff, "callt ctoff(__callt_save_r%d_r%d%s)",
2809 i, (mask & (1 << 31)) ? 31 : 29, stack_bytes ? "c" : "");
2810 }
2811 else
2812 {
2813 static char regs [100]; /* XXX */
2814 int done_one;
2815
2816
2817 /* Generate the PREPARE instruction. Note we could just issue the
2818 bit mask as a number as the assembler can cope with this, but for
2819 the sake of our readers we turn it into a textual description. */
2820 regs[0] = 0;
2821 done_one = 0;
2822
2823 for (i = 20; i < 32; i++)
2824 {
2825 if (mask & (1 << i))
2826 {
2827 int first;
2828
2829 if (done_one)
2830 strcat (regs, ", ");
2831 else
2832 done_one = 1;
2833
2834 first = i;
2835 strcat (regs, reg_names[ first ]);
2836
2837 for (i++; i < 32; i++)
2838 if ((mask & (1 << i)) == 0)
2839 break;
2840
2841 if (i > first + 1)
2842 {
2843 strcat (regs, " - ");
2844 strcat (regs, reg_names[ i - 1 ] );
2845 }
2846 }
2847 }
2848
2849 sprintf (buff, "prepare {%s}, %d", regs, (- stack_bytes) / 4);
2850 }
2851
2852 return buff;
2853 }
2854
2855 /* Return an RTX indicating where the return address to the
2856 calling function can be found. */
2857
2858 rtx
2859 v850_return_addr (int count)
2860 {
2861 if (count != 0)
2862 return const0_rtx;
2863
2864 return get_hard_reg_initial_val (Pmode, LINK_POINTER_REGNUM);
2865 }
2866
2867 /* Implement TARGET_ASM_INIT_SECTIONS. */
2868
2869 static void
2870 v850_asm_init_sections (void)
2871 {
2872 rosdata_section
2873 = get_unnamed_section (0, output_section_asm_op,
2874 "\t.section .rosdata,\"a\"");
2875
2876 rozdata_section
2877 = get_unnamed_section (0, output_section_asm_op,
2878 "\t.section .rozdata,\"a\"");
2879
2880 tdata_section
2881 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2882 "\t.section .tdata,\"aw\"");
2883
2884 zdata_section
2885 = get_unnamed_section (SECTION_WRITE, output_section_asm_op,
2886 "\t.section .zdata,\"aw\"");
2887
2888 zbss_section
2889 = get_unnamed_section (SECTION_WRITE | SECTION_BSS,
2890 output_section_asm_op,
2891 "\t.section .zbss,\"aw\"");
2892 }
2893
2894 static section *
2895 v850_select_section (tree exp,
2896 int reloc ATTRIBUTE_UNUSED,
2897 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
2898 {
2899 if (TREE_CODE (exp) == VAR_DECL)
2900 {
2901 int is_const;
2902 if (!TREE_READONLY (exp)
2903 || TREE_SIDE_EFFECTS (exp)
2904 || !DECL_INITIAL (exp)
2905 || (DECL_INITIAL (exp) != error_mark_node
2906 && !TREE_CONSTANT (DECL_INITIAL (exp))))
2907 is_const = FALSE;
2908 else
2909 is_const = TRUE;
2910
2911 switch (v850_get_data_area (exp))
2912 {
2913 case DATA_AREA_ZDA:
2914 return is_const ? rozdata_section : zdata_section;
2915
2916 case DATA_AREA_TDA:
2917 return tdata_section;
2918
2919 case DATA_AREA_SDA:
2920 return is_const ? rosdata_section : sdata_section;
2921
2922 default:
2923 return is_const ? readonly_data_section : data_section;
2924 }
2925 }
2926 return readonly_data_section;
2927 }
2928
2929 /* Worker function for TARGET_RETURN_IN_MEMORY. */
2930
2931 static bool
2932 v850_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
2933 {
2934 /* Return values > 8 bytes in length in memory. */
2935 return int_size_in_bytes (type) > 8 || TYPE_MODE (type) == BLKmode;
2936 }
2937
2938 /* Worker function for TARGET_SETUP_INCOMING_VARARGS. */
2939
2940 static void
2941 v850_setup_incoming_varargs (CUMULATIVE_ARGS *ca,
2942 enum machine_mode mode ATTRIBUTE_UNUSED,
2943 tree type ATTRIBUTE_UNUSED,
2944 int *pretend_arg_size ATTRIBUTE_UNUSED,
2945 int second_time ATTRIBUTE_UNUSED)
2946 {
2947 ca->anonymous_args = (!TARGET_GHS ? 1 : 0);
2948 }
2949
2950 #include "gt-v850.h"