comparison gcc/config/mcore/mcore.c @ 111:04ced10e8804

gcc 7
author kono
date Fri, 27 Oct 2017 22:46:09 +0900
parents f6334be47118
children 84e7813d76e9
comparison
equal deleted inserted replaced
68:561a7518be6b 111:04ced10e8804
1 /* Output routines for Motorola MCore processor 1 /* Output routines for Motorola MCore processor
2 Copyright (C) 1993, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2 Copyright (C) 1993-2017 Free Software Foundation, Inc.
3 2009, 2010 Free Software Foundation, Inc.
4 3
5 This file is part of GCC. 4 This file is part of GCC.
6 5
7 GCC is free software; you can redistribute it and/or modify it 6 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published 7 under the terms of the GNU General Public License as published
19 <http://www.gnu.org/licenses/>. */ 18 <http://www.gnu.org/licenses/>. */
20 19
21 #include "config.h" 20 #include "config.h"
22 #include "system.h" 21 #include "system.h"
23 #include "coretypes.h" 22 #include "coretypes.h"
24 #include "tm.h" 23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h" 25 #include "rtl.h"
26 #include "tree.h" 26 #include "tree.h"
27 #include "df.h"
28 #include "memmodel.h"
27 #include "tm_p.h" 29 #include "tm_p.h"
30 #include "stringpool.h"
31 #include "attribs.h"
32 #include "emit-rtl.h"
33 #include "diagnostic-core.h"
34 #include "stor-layout.h"
35 #include "varasm.h"
36 #include "calls.h"
28 #include "mcore.h" 37 #include "mcore.h"
38 #include "output.h"
39 #include "explow.h"
40 #include "expr.h"
41 #include "cfgrtl.h"
42 #include "builtins.h"
29 #include "regs.h" 43 #include "regs.h"
30 #include "hard-reg-set.h" 44
31 #include "insn-config.h" 45 /* This file should be included last. */
32 #include "conditions.h"
33 #include "output.h"
34 #include "insn-attr.h"
35 #include "flags.h"
36 #include "obstack.h"
37 #include "expr.h"
38 #include "reload.h"
39 #include "recog.h"
40 #include "function.h"
41 #include "ggc.h"
42 #include "diagnostic-core.h"
43 #include "target.h"
44 #include "target-def.h" 46 #include "target-def.h"
45 #include "df.h"
46 47
47 /* For dumping information about frame sizes. */ 48 /* For dumping information about frame sizes. */
48 char * mcore_current_function_name = 0; 49 char * mcore_current_function_name = 0;
49 long mcore_current_compilation_timestamp = 0; 50 long mcore_current_compilation_timestamp = 0;
50 51
91 } 92 }
92 cond_type; 93 cond_type;
93 94
94 static void output_stack_adjust (int, int); 95 static void output_stack_adjust (int, int);
95 static int calc_live_regs (int *); 96 static int calc_live_regs (int *);
96 static int try_constant_tricks (long, HOST_WIDE_INT *, HOST_WIDE_INT *); 97 static int try_constant_tricks (HOST_WIDE_INT, HOST_WIDE_INT *, HOST_WIDE_INT *);
97 static const char * output_inline_const (enum machine_mode, rtx *); 98 static const char * output_inline_const (machine_mode, rtx *);
98 static void layout_mcore_frame (struct mcore_frame *); 99 static void layout_mcore_frame (struct mcore_frame *);
99 static void mcore_setup_incoming_varargs (CUMULATIVE_ARGS *, enum machine_mode, tree, int *, int); 100 static void mcore_setup_incoming_varargs (cumulative_args_t, machine_mode, tree, int *, int);
100 static cond_type is_cond_candidate (rtx); 101 static cond_type is_cond_candidate (rtx);
101 static rtx emit_new_cond_insn (rtx, int); 102 static rtx_insn *emit_new_cond_insn (rtx_insn *, int);
102 static rtx conditionalize_block (rtx); 103 static rtx_insn *conditionalize_block (rtx_insn *);
103 static void conditionalize_optimization (void); 104 static void conditionalize_optimization (void);
104 static void mcore_reorg (void); 105 static void mcore_reorg (void);
105 static rtx handle_structs_in_regs (enum machine_mode, const_tree, int); 106 static rtx handle_structs_in_regs (machine_mode, const_tree, int);
106 static void mcore_mark_dllexport (tree); 107 static void mcore_mark_dllexport (tree);
107 static void mcore_mark_dllimport (tree); 108 static void mcore_mark_dllimport (tree);
108 static int mcore_dllexport_p (tree); 109 static int mcore_dllexport_p (tree);
109 static int mcore_dllimport_p (tree); 110 static int mcore_dllimport_p (tree);
110 static tree mcore_handle_naked_attribute (tree *, tree, tree, int, bool *); 111 static tree mcore_handle_naked_attribute (tree *, tree, tree, int, bool *);
111 #ifdef OBJECT_FORMAT_ELF 112 #ifdef OBJECT_FORMAT_ELF
112 static void mcore_asm_named_section (const char *, 113 static void mcore_asm_named_section (const char *,
113 unsigned int, tree); 114 unsigned int, tree);
114 #endif 115 #endif
115 static void mcore_print_operand (FILE *, rtx, int); 116 static void mcore_print_operand (FILE *, rtx, int);
116 static void mcore_print_operand_address (FILE *, rtx); 117 static void mcore_print_operand_address (FILE *, machine_mode, rtx);
117 static bool mcore_print_operand_punct_valid_p (unsigned char code); 118 static bool mcore_print_operand_punct_valid_p (unsigned char code);
118 static void mcore_unique_section (tree, int); 119 static void mcore_unique_section (tree, int);
119 static void mcore_encode_section_info (tree, rtx, int); 120 static void mcore_encode_section_info (tree, rtx, int);
120 static const char *mcore_strip_name_encoding (const char *); 121 static const char *mcore_strip_name_encoding (const char *);
121 static int mcore_const_costs (rtx, RTX_CODE); 122 static int mcore_const_costs (rtx, RTX_CODE);
122 static int mcore_and_cost (rtx); 123 static int mcore_and_cost (rtx);
123 static int mcore_ior_cost (rtx); 124 static int mcore_ior_cost (rtx);
124 static bool mcore_rtx_costs (rtx, int, int, int *, bool); 125 static bool mcore_rtx_costs (rtx, machine_mode, int, int,
126 int *, bool);
125 static void mcore_external_libcall (rtx); 127 static void mcore_external_libcall (rtx);
126 static bool mcore_return_in_memory (const_tree, const_tree); 128 static bool mcore_return_in_memory (const_tree, const_tree);
127 static int mcore_arg_partial_bytes (CUMULATIVE_ARGS *, 129 static int mcore_arg_partial_bytes (cumulative_args_t,
128 enum machine_mode, 130 machine_mode,
129 tree, bool); 131 tree, bool);
130 static rtx mcore_function_arg (CUMULATIVE_ARGS *, 132 static rtx mcore_function_arg (cumulative_args_t,
131 enum machine_mode, 133 machine_mode,
132 const_tree, bool); 134 const_tree, bool);
133 static void mcore_function_arg_advance (CUMULATIVE_ARGS *, 135 static void mcore_function_arg_advance (cumulative_args_t,
134 enum machine_mode, 136 machine_mode,
135 const_tree, bool); 137 const_tree, bool);
136 static unsigned int mcore_function_arg_boundary (enum machine_mode, 138 static unsigned int mcore_function_arg_boundary (machine_mode,
137 const_tree); 139 const_tree);
138 static void mcore_asm_trampoline_template (FILE *); 140 static void mcore_asm_trampoline_template (FILE *);
139 static void mcore_trampoline_init (rtx, tree, rtx); 141 static void mcore_trampoline_init (rtx, tree, rtx);
142 static bool mcore_warn_func_return (tree);
140 static void mcore_option_override (void); 143 static void mcore_option_override (void);
144 static bool mcore_legitimate_constant_p (machine_mode, rtx);
145 static bool mcore_legitimate_address_p (machine_mode, rtx, bool,
146 addr_space_t);
147 static bool mcore_hard_regno_mode_ok (unsigned int, machine_mode);
148 static bool mcore_modes_tieable_p (machine_mode, machine_mode);
141 149
142 /* MCore specific attributes. */ 150 /* MCore specific attributes. */
143 151
144 static const struct attribute_spec mcore_attribute_table[] = 152 static const struct attribute_spec mcore_attribute_table[] =
145 { 153 {
146 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */ 154 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
147 { "dllexport", 0, 0, true, false, false, NULL }, 155 affects_type_identity } */
148 { "dllimport", 0, 0, true, false, false, NULL }, 156 { "dllexport", 0, 0, true, false, false, NULL, false },
149 { "naked", 0, 0, true, false, false, mcore_handle_naked_attribute }, 157 { "dllimport", 0, 0, true, false, false, NULL, false },
150 { NULL, 0, 0, false, false, false, NULL } 158 { "naked", 0, 0, true, false, false, mcore_handle_naked_attribute,
159 false },
160 { NULL, 0, 0, false, false, false, NULL, false }
151 }; 161 };
152
153 /* What options are we going to default to specific settings when
154 -O* happens; the user can subsequently override these settings.
155
156 Omitting the frame pointer is a very good idea on the MCore.
157 Scheduling isn't worth anything on the current MCore implementation. */
158
159 static const struct default_options mcore_option_optimization_table[] =
160 {
161 { OPT_LEVELS_1_PLUS, OPT_ffunction_cse, NULL, 0 },
162 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
163 { OPT_LEVELS_ALL, OPT_fcaller_saves, NULL, 0 },
164 { OPT_LEVELS_ALL, OPT_fschedule_insns, NULL, 0 },
165 { OPT_LEVELS_ALL, OPT_fschedule_insns2, NULL, 0 },
166 { OPT_LEVELS_SIZE, OPT_mhardlit, NULL, 0 },
167 { OPT_LEVELS_NONE, 0, NULL, 0 }
168 };
169 162
170 /* Initialize the GCC target structure. */ 163 /* Initialize the GCC target structure. */
171 #undef TARGET_ASM_EXTERNAL_LIBCALL 164 #undef TARGET_ASM_EXTERNAL_LIBCALL
172 #define TARGET_ASM_EXTERNAL_LIBCALL mcore_external_libcall 165 #define TARGET_ASM_EXTERNAL_LIBCALL mcore_external_libcall
173 166
194 #define TARGET_ATTRIBUTE_TABLE mcore_attribute_table 187 #define TARGET_ATTRIBUTE_TABLE mcore_attribute_table
195 #undef TARGET_ASM_UNIQUE_SECTION 188 #undef TARGET_ASM_UNIQUE_SECTION
196 #define TARGET_ASM_UNIQUE_SECTION mcore_unique_section 189 #define TARGET_ASM_UNIQUE_SECTION mcore_unique_section
197 #undef TARGET_ASM_FUNCTION_RODATA_SECTION 190 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
198 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section 191 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
199 #undef TARGET_DEFAULT_TARGET_FLAGS
200 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT
201 #undef TARGET_ENCODE_SECTION_INFO 192 #undef TARGET_ENCODE_SECTION_INFO
202 #define TARGET_ENCODE_SECTION_INFO mcore_encode_section_info 193 #define TARGET_ENCODE_SECTION_INFO mcore_encode_section_info
203 #undef TARGET_STRIP_NAME_ENCODING 194 #undef TARGET_STRIP_NAME_ENCODING
204 #define TARGET_STRIP_NAME_ENCODING mcore_strip_name_encoding 195 #define TARGET_STRIP_NAME_ENCODING mcore_strip_name_encoding
205 #undef TARGET_RTX_COSTS 196 #undef TARGET_RTX_COSTS
206 #define TARGET_RTX_COSTS mcore_rtx_costs 197 #define TARGET_RTX_COSTS mcore_rtx_costs
207 #undef TARGET_ADDRESS_COST 198 #undef TARGET_ADDRESS_COST
208 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0 199 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
209 #undef TARGET_MACHINE_DEPENDENT_REORG 200 #undef TARGET_MACHINE_DEPENDENT_REORG
210 #define TARGET_MACHINE_DEPENDENT_REORG mcore_reorg 201 #define TARGET_MACHINE_DEPENDENT_REORG mcore_reorg
211 202
212 #undef TARGET_PROMOTE_FUNCTION_MODE 203 #undef TARGET_PROMOTE_FUNCTION_MODE
213 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote 204 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
237 #undef TARGET_TRAMPOLINE_INIT 228 #undef TARGET_TRAMPOLINE_INIT
238 #define TARGET_TRAMPOLINE_INIT mcore_trampoline_init 229 #define TARGET_TRAMPOLINE_INIT mcore_trampoline_init
239 230
240 #undef TARGET_OPTION_OVERRIDE 231 #undef TARGET_OPTION_OVERRIDE
241 #define TARGET_OPTION_OVERRIDE mcore_option_override 232 #define TARGET_OPTION_OVERRIDE mcore_option_override
242 #undef TARGET_OPTION_OPTIMIZATION_TABLE 233
243 #define TARGET_OPTION_OPTIMIZATION_TABLE mcore_option_optimization_table 234 #undef TARGET_LEGITIMATE_CONSTANT_P
244 235 #define TARGET_LEGITIMATE_CONSTANT_P mcore_legitimate_constant_p
245 #undef TARGET_EXCEPT_UNWIND_INFO 236 #undef TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P
246 #define TARGET_EXCEPT_UNWIND_INFO sjlj_except_unwind_info 237 #define TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P mcore_legitimate_address_p
238
239 #undef TARGET_LRA_P
240 #define TARGET_LRA_P hook_bool_void_false
241
242 #undef TARGET_WARN_FUNC_RETURN
243 #define TARGET_WARN_FUNC_RETURN mcore_warn_func_return
244
245 #undef TARGET_HARD_REGNO_MODE_OK
246 #define TARGET_HARD_REGNO_MODE_OK mcore_hard_regno_mode_ok
247
248 #undef TARGET_MODES_TIEABLE_P
249 #define TARGET_MODES_TIEABLE_P mcore_modes_tieable_p
250
251 #undef TARGET_CONSTANT_ALIGNMENT
252 #define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings
247 253
248 struct gcc_target targetm = TARGET_INITIALIZER; 254 struct gcc_target targetm = TARGET_INITIALIZER;
249 255
250 /* Adjust the stack and return the number of bytes taken to do it. */ 256 /* Adjust the stack and return the number of bytes taken to do it. */
251 static void 257 static void
317 } 323 }
318 324
319 /* Print the operand address in x to the stream. */ 325 /* Print the operand address in x to the stream. */
320 326
321 static void 327 static void
322 mcore_print_operand_address (FILE * stream, rtx x) 328 mcore_print_operand_address (FILE * stream, machine_mode /*mode*/, rtx x)
323 { 329 {
324 switch (GET_CODE (x)) 330 switch (GET_CODE (x))
325 { 331 {
326 case REG: 332 case REG:
327 fprintf (stream, "(%s)", reg_names[REGNO (x)]); 333 fprintf (stream, "(%s)", reg_names[REGNO (x)]);
409 case REG: 415 case REG:
410 fputs (reg_names[REGNO (x) + 1], (stream)); 416 fputs (reg_names[REGNO (x) + 1], (stream));
411 break; 417 break;
412 case MEM: 418 case MEM:
413 mcore_print_operand_address 419 mcore_print_operand_address
414 (stream, XEXP (adjust_address (x, SImode, 4), 0)); 420 (stream, GET_MODE (x), XEXP (adjust_address (x, SImode, 4), 0));
415 break; 421 break;
416 default: 422 default:
417 gcc_unreachable (); 423 gcc_unreachable ();
418 } 424 }
419 break; 425 break;
433 { 439 {
434 case REG: 440 case REG:
435 fputs (reg_names[REGNO (x)], (stream)); 441 fputs (reg_names[REGNO (x)], (stream));
436 break; 442 break;
437 case MEM: 443 case MEM:
438 output_address (XEXP (x, 0)); 444 output_address (GET_MODE (x), XEXP (x, 0));
439 break; 445 break;
440 default: 446 default:
441 output_addr_const (stream, x); 447 output_addr_const (stream, x);
442 break; 448 break;
443 } 449 }
524 /* Takes a lrw to load. */ 530 /* Takes a lrw to load. */
525 return 5; 531 return 5;
526 } 532 }
527 533
528 static bool 534 static bool
529 mcore_rtx_costs (rtx x, int code, int outer_code, int * total, 535 mcore_rtx_costs (rtx x, machine_mode mode ATTRIBUTE_UNUSED, int outer_code,
530 bool speed ATTRIBUTE_UNUSED) 536 int opno ATTRIBUTE_UNUSED,
531 { 537 int * total, bool speed ATTRIBUTE_UNUSED)
538 {
539 int code = GET_CODE (x);
540
532 switch (code) 541 switch (code)
533 { 542 {
534 case CONST_INT: 543 case CONST_INT:
535 *total = mcore_const_costs (x, (enum rtx_code) outer_code); 544 *total = mcore_const_costs (x, (enum rtx_code) outer_code);
536 return true; 545 return true;
612 switch (code) 621 switch (code)
613 { 622 {
614 case EQ: /* Use inverted condition, cmpne. */ 623 case EQ: /* Use inverted condition, cmpne. */
615 code = NE; 624 code = NE;
616 invert = true; 625 invert = true;
617 /* Drop through. */ 626 /* FALLTHRU */
618 627
619 case NE: /* Use normal condition, cmpne. */ 628 case NE: /* Use normal condition, cmpne. */
620 if (GET_CODE (op1) == CONST_INT && ! CONST_OK_FOR_K (INTVAL (op1))) 629 if (GET_CODE (op1) == CONST_INT && ! CONST_OK_FOR_K (INTVAL (op1)))
621 op1 = force_reg (SImode, op1); 630 op1 = force_reg (SImode, op1);
622 break; 631 break;
623 632
624 case LE: /* Use inverted condition, reversed cmplt. */ 633 case LE: /* Use inverted condition, reversed cmplt. */
625 code = GT; 634 code = GT;
626 invert = true; 635 invert = true;
627 /* Drop through. */ 636 /* FALLTHRU */
628 637
629 case GT: /* Use normal condition, reversed cmplt. */ 638 case GT: /* Use normal condition, reversed cmplt. */
630 if (GET_CODE (op1) == CONST_INT) 639 if (GET_CODE (op1) == CONST_INT)
631 op1 = force_reg (SImode, op1); 640 op1 = force_reg (SImode, op1);
632 break; 641 break;
633 642
634 case GE: /* Use inverted condition, cmplt. */ 643 case GE: /* Use inverted condition, cmplt. */
635 code = LT; 644 code = LT;
636 invert = true; 645 invert = true;
637 /* Drop through. */ 646 /* FALLTHRU */
638 647
639 case LT: /* Use normal condition, cmplt. */ 648 case LT: /* Use normal condition, cmplt. */
640 if (GET_CODE (op1) == CONST_INT && 649 if (GET_CODE (op1) == CONST_INT &&
641 /* covered by btsti x,31. */ 650 /* covered by btsti x,31. */
642 INTVAL (op1) != 0 && 651 INTVAL (op1) != 0 &&
647 case GTU: /* Use inverted condition, cmple. */ 656 case GTU: /* Use inverted condition, cmple. */
648 /* We coped with unsigned > 0 above. */ 657 /* We coped with unsigned > 0 above. */
649 gcc_assert (GET_CODE (op1) != CONST_INT || INTVAL (op1) != 0); 658 gcc_assert (GET_CODE (op1) != CONST_INT || INTVAL (op1) != 0);
650 code = LEU; 659 code = LEU;
651 invert = true; 660 invert = true;
652 /* Drop through. */ 661 /* FALLTHRU */
653 662
654 case LEU: /* Use normal condition, reversed cmphs. */ 663 case LEU: /* Use normal condition, reversed cmphs. */
655 if (GET_CODE (op1) == CONST_INT && INTVAL (op1) != 0) 664 if (GET_CODE (op1) == CONST_INT && INTVAL (op1) != 0)
656 op1 = force_reg (SImode, op1); 665 op1 = force_reg (SImode, op1);
657 break; 666 break;
658 667
659 case LTU: /* Use inverted condition, cmphs. */ 668 case LTU: /* Use inverted condition, cmphs. */
660 code = GEU; 669 code = GEU;
661 invert = true; 670 invert = true;
662 /* Drop through. */ 671 /* FALLTHRU */
663 672
664 case GEU: /* Use normal condition, cmphs. */ 673 case GEU: /* Use normal condition, cmphs. */
665 if (GET_CODE (op1) == CONST_INT && INTVAL (op1) != 0) 674 if (GET_CODE (op1) == CONST_INT && INTVAL (op1) != 0)
666 op1 = force_reg (SImode, op1); 675 op1 = force_reg (SImode, op1);
667 break; 676 break;
668 677
669 default: 678 default:
670 break; 679 break;
671 } 680 }
672 681
673 emit_insn (gen_rtx_SET (VOIDmode, 682 emit_insn (gen_rtx_SET (cc_reg, gen_rtx_fmt_ee (code, CCmode, op0, op1)));
674 cc_reg,
675 gen_rtx_fmt_ee (code, CCmode, op0, op1)));
676 return invert; 683 return invert;
677 } 684 }
678 685
679 int 686 int
680 mcore_symbolic_address_p (rtx x) 687 mcore_symbolic_address_p (rtx x)
910 reg. Don't just use dead_or_set_p() since reload does not always mark 917 reg. Don't just use dead_or_set_p() since reload does not always mark
911 deaths (especially if PRESERVE_DEATH_NOTES_REGNO_P is not defined). We 918 deaths (especially if PRESERVE_DEATH_NOTES_REGNO_P is not defined). We
912 can ignore subregs by extracting the actual register. BRC */ 919 can ignore subregs by extracting the actual register. BRC */
913 920
914 int 921 int
915 mcore_is_dead (rtx first, rtx reg) 922 mcore_is_dead (rtx_insn *first, rtx reg)
916 { 923 {
917 rtx insn; 924 rtx_insn *insn;
918 925
919 /* For mcore, subregs can't live independently of their parent regs. */ 926 /* For mcore, subregs can't live independently of their parent regs. */
920 if (GET_CODE (reg) == SUBREG) 927 if (GET_CODE (reg) == SUBREG)
921 reg = SUBREG_REG (reg); 928 reg = SUBREG_REG (reg);
922 929
926 933
927 /* Look for conclusive evidence of live/death, otherwise we have 934 /* Look for conclusive evidence of live/death, otherwise we have
928 to assume that it is live. */ 935 to assume that it is live. */
929 for (insn = NEXT_INSN (first); insn; insn = NEXT_INSN (insn)) 936 for (insn = NEXT_INSN (first); insn; insn = NEXT_INSN (insn))
930 { 937 {
931 if (GET_CODE (insn) == JUMP_INSN) 938 if (JUMP_P (insn))
932 return 0; /* We lose track, assume it is alive. */ 939 return 0; /* We lose track, assume it is alive. */
933 940
934 else if (GET_CODE(insn) == CALL_INSN) 941 else if (CALL_P (insn))
935 { 942 {
936 /* Call's might use it for target or register parms. */ 943 /* Call's might use it for target or register parms. */
937 if (reg_referenced_p (reg, PATTERN (insn)) 944 if (reg_referenced_p (reg, PATTERN (insn))
938 || find_reg_fusage (insn, USE, reg)) 945 || find_reg_fusage (insn, USE, reg))
939 return 0; 946 return 0;
940 else if (dead_or_set_p (insn, reg)) 947 else if (dead_or_set_p (insn, reg))
941 return 1; 948 return 1;
942 } 949 }
943 else if (GET_CODE (insn) == INSN) 950 else if (NONJUMP_INSN_P (insn))
944 { 951 {
945 if (reg_referenced_p (reg, PATTERN (insn))) 952 if (reg_referenced_p (reg, PATTERN (insn)))
946 return 0; 953 return 0;
947 else if (dead_or_set_p (insn, reg)) 954 else if (dead_or_set_p (insn, reg))
948 return 1; 955 return 1;
1163 } 1170 }
1164 1171
1165 /* Output an inline constant. */ 1172 /* Output an inline constant. */
1166 1173
1167 static const char * 1174 static const char *
1168 output_inline_const (enum machine_mode mode, rtx operands[]) 1175 output_inline_const (machine_mode mode, rtx operands[])
1169 { 1176 {
1170 HOST_WIDE_INT x = 0, y = 0; 1177 HOST_WIDE_INT x = 0, y = 0;
1171 int trick_no; 1178 int trick_no;
1172 rtx out_operands[3]; 1179 rtx out_operands[3];
1173 char buf[256]; 1180 char buf[256];
1263 1270
1264 /* Output a move of a word or less value. */ 1271 /* Output a move of a word or less value. */
1265 1272
1266 const char * 1273 const char *
1267 mcore_output_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[], 1274 mcore_output_move (rtx insn ATTRIBUTE_UNUSED, rtx operands[],
1268 enum machine_mode mode ATTRIBUTE_UNUSED) 1275 machine_mode mode ATTRIBUTE_UNUSED)
1269 { 1276 {
1270 rtx dst = operands[0]; 1277 rtx dst = operands[0];
1271 rtx src = operands[1]; 1278 rtx src = operands[1];
1272 1279
1273 if (GET_CODE (dst) == REG) 1280 if (GET_CODE (dst) == REG)
1284 if (GET_CODE (XEXP (src, 0)) == LABEL_REF) 1291 if (GET_CODE (XEXP (src, 0)) == LABEL_REF)
1285 return "lrw\t%0,[%1]"; /* a-R */ 1292 return "lrw\t%0,[%1]"; /* a-R */
1286 else 1293 else
1287 switch (GET_MODE (src)) /* r-m */ 1294 switch (GET_MODE (src)) /* r-m */
1288 { 1295 {
1289 case SImode: 1296 case E_SImode:
1290 return "ldw\t%0,%1"; 1297 return "ldw\t%0,%1";
1291 case HImode: 1298 case E_HImode:
1292 return "ld.h\t%0,%1"; 1299 return "ld.h\t%0,%1";
1293 case QImode: 1300 case E_QImode:
1294 return "ld.b\t%0,%1"; 1301 return "ld.b\t%0,%1";
1295 default: 1302 default:
1296 gcc_unreachable (); 1303 gcc_unreachable ();
1297 } 1304 }
1298 } 1305 }
1315 return "lrw\t%0, %1"; /* Into the literal pool. */ 1322 return "lrw\t%0, %1"; /* Into the literal pool. */
1316 } 1323 }
1317 else if (GET_CODE (dst) == MEM) /* m-r */ 1324 else if (GET_CODE (dst) == MEM) /* m-r */
1318 switch (GET_MODE (dst)) 1325 switch (GET_MODE (dst))
1319 { 1326 {
1320 case SImode: 1327 case E_SImode:
1321 return "stw\t%1,%0"; 1328 return "stw\t%1,%0";
1322 case HImode: 1329 case E_HImode:
1323 return "st.h\t%1,%0"; 1330 return "st.h\t%1,%0";
1324 case QImode: 1331 case E_QImode:
1325 return "st.b\t%1,%0"; 1332 return "st.b\t%1,%0";
1326 default: 1333 default:
1327 gcc_unreachable (); 1334 gcc_unreachable ();
1328 } 1335 }
1329 1336
1333 /* Return a sequence of instructions to perform DI or DF move. 1340 /* Return a sequence of instructions to perform DI or DF move.
1334 Since the MCORE cannot move a DI or DF in one instruction, we have 1341 Since the MCORE cannot move a DI or DF in one instruction, we have
1335 to take care when we see overlapping source and dest registers. */ 1342 to take care when we see overlapping source and dest registers. */
1336 1343
1337 const char * 1344 const char *
1338 mcore_output_movedouble (rtx operands[], enum machine_mode mode ATTRIBUTE_UNUSED) 1345 mcore_output_movedouble (rtx operands[], machine_mode mode ATTRIBUTE_UNUSED)
1339 { 1346 {
1340 rtx dst = operands[0]; 1347 rtx dst = operands[0];
1341 rtx src = operands[1]; 1348 rtx src = operands[1];
1342 1349
1343 if (GET_CODE (dst) == REG) 1350 if (GET_CODE (dst) == REG)
1353 else 1360 else
1354 return "mov %0,%1\n\tmov %R0,%R1"; 1361 return "mov %0,%1\n\tmov %R0,%R1";
1355 } 1362 }
1356 else if (GET_CODE (src) == MEM) 1363 else if (GET_CODE (src) == MEM)
1357 { 1364 {
1358 rtx memexp = memexp = XEXP (src, 0); 1365 rtx memexp = XEXP (src, 0);
1359 int dstreg = REGNO (dst); 1366 int dstreg = REGNO (dst);
1360 int basereg = -1; 1367 int basereg = -1;
1361 1368
1362 if (GET_CODE (memexp) == LABEL_REF) 1369 if (GET_CODE (memexp) == LABEL_REF)
1363 return "lrw\t%0,[%1]\n\tlrw\t%R0,[%R1]"; 1370 return "lrw\t%0,[%1]\n\tlrw\t%R0,[%R1]";
1460 /* Do directly with bseti or bclri. */ 1467 /* Do directly with bseti or bclri. */
1461 /* RBE: 2/97 consider only low bit of constant. */ 1468 /* RBE: 2/97 consider only low bit of constant. */
1462 if ((INTVAL (operands[3]) & 1) == 0) 1469 if ((INTVAL (operands[3]) & 1) == 0)
1463 { 1470 {
1464 mask = ~(1 << posn); 1471 mask = ~(1 << posn);
1465 emit_insn (gen_rtx_SET (SImode, operands[0], 1472 emit_insn (gen_rtx_SET (operands[0],
1466 gen_rtx_AND (SImode, operands[0], GEN_INT (mask)))); 1473 gen_rtx_AND (SImode, operands[0],
1474 GEN_INT (mask))));
1467 } 1475 }
1468 else 1476 else
1469 { 1477 {
1470 mask = 1 << posn; 1478 mask = 1 << posn;
1471 emit_insn (gen_rtx_SET (SImode, operands[0], 1479 emit_insn (gen_rtx_SET (operands[0],
1472 gen_rtx_IOR (SImode, operands[0], GEN_INT (mask)))); 1480 gen_rtx_IOR (SImode, operands[0],
1481 GEN_INT (mask))));
1473 } 1482 }
1474 1483
1475 return 1; 1484 return 1;
1476 } 1485 }
1477 1486
1496 /* If setting the entire field, do it directly. */ 1505 /* If setting the entire field, do it directly. */
1497 if (GET_CODE (operands[3]) == CONST_INT 1506 if (GET_CODE (operands[3]) == CONST_INT
1498 && INTVAL (operands[3]) == ((1 << width) - 1)) 1507 && INTVAL (operands[3]) == ((1 << width) - 1))
1499 { 1508 {
1500 mreg = force_reg (SImode, GEN_INT (INTVAL (operands[3]) << posn)); 1509 mreg = force_reg (SImode, GEN_INT (INTVAL (operands[3]) << posn));
1501 emit_insn (gen_rtx_SET (SImode, operands[0], 1510 emit_insn (gen_rtx_SET (operands[0],
1502 gen_rtx_IOR (SImode, operands[0], mreg))); 1511 gen_rtx_IOR (SImode, operands[0], mreg)));
1503 return 1; 1512 return 1;
1504 } 1513 }
1505 1514
1506 /* Generate the clear mask. */ 1515 /* Generate the clear mask. */
1507 mreg = force_reg (SImode, GEN_INT (~(((1 << width) - 1) << posn))); 1516 mreg = force_reg (SImode, GEN_INT (~(((1 << width) - 1) << posn)));
1508 1517
1509 /* Clear the field, to overlay it later with the source. */ 1518 /* Clear the field, to overlay it later with the source. */
1510 emit_insn (gen_rtx_SET (SImode, operands[0], 1519 emit_insn (gen_rtx_SET (operands[0],
1511 gen_rtx_AND (SImode, operands[0], mreg))); 1520 gen_rtx_AND (SImode, operands[0], mreg)));
1512 1521
1513 /* If the source is constant 0, we've nothing to add back. */ 1522 /* If the source is constant 0, we've nothing to add back. */
1514 if (GET_CODE (operands[3]) == CONST_INT && INTVAL (operands[3]) == 0) 1523 if (GET_CODE (operands[3]) == CONST_INT && INTVAL (operands[3]) == 0)
1515 return 1; 1524 return 1;
1516 1525
1525 MSB of the register (e.g., the shift will push out any hi-order 1534 MSB of the register (e.g., the shift will push out any hi-order
1526 bits. */ 1535 bits. */
1527 if (width + posn != (int) GET_MODE_SIZE (SImode)) 1536 if (width + posn != (int) GET_MODE_SIZE (SImode))
1528 { 1537 {
1529 ereg = force_reg (SImode, GEN_INT ((1 << width) - 1)); 1538 ereg = force_reg (SImode, GEN_INT ((1 << width) - 1));
1530 emit_insn (gen_rtx_SET (SImode, sreg, 1539 emit_insn (gen_rtx_SET (sreg, gen_rtx_AND (SImode, sreg, ereg)));
1531 gen_rtx_AND (SImode, sreg, ereg)));
1532 } 1540 }
1533 1541
1534 /* Insert source value in dest. */ 1542 /* Insert source value in dest. */
1535 if (posn != 0) 1543 if (posn != 0)
1536 emit_insn (gen_rtx_SET (SImode, sreg, 1544 emit_insn (gen_rtx_SET (sreg, gen_rtx_ASHIFT (SImode, sreg,
1537 gen_rtx_ASHIFT (SImode, sreg, GEN_INT (posn)))); 1545 GEN_INT (posn))));
1538 1546
1539 emit_insn (gen_rtx_SET (SImode, operands[0], 1547 emit_insn (gen_rtx_SET (operands[0],
1540 gen_rtx_IOR (SImode, operands[0], sreg))); 1548 gen_rtx_IOR (SImode, operands[0], sreg)));
1541 1549
1542 return 1; 1550 return 1;
1543 } 1551 }
1544 1552
1545 /* ??? Block move stuff stolen from m88k. This code has not been 1553 /* ??? Block move stuff stolen from m88k. This code has not been
1555 /* Emit code to perform a block move with an offset sequence of ldw/st 1563 /* Emit code to perform a block move with an offset sequence of ldw/st
1556 instructions (..., ldw 0, stw 1, ldw 1, stw 0, ...). SIZE and ALIGN are 1564 instructions (..., ldw 0, stw 1, ldw 1, stw 0, ...). SIZE and ALIGN are
1557 known constants. DEST and SRC are registers. OFFSET is the known 1565 known constants. DEST and SRC are registers. OFFSET is the known
1558 starting point for the output pattern. */ 1566 starting point for the output pattern. */
1559 1567
1560 static const enum machine_mode mode_from_align[] = 1568 static const machine_mode mode_from_align[] =
1561 { 1569 {
1562 VOIDmode, QImode, HImode, VOIDmode, SImode, 1570 VOIDmode, QImode, HImode, VOIDmode, SImode,
1563 }; 1571 };
1564 1572
1565 static void 1573 static void
1566 block_move_sequence (rtx dst_mem, rtx src_mem, int size, int align) 1574 block_move_sequence (rtx dst_mem, rtx src_mem, int size, int align)
1567 { 1575 {
1568 rtx temp[2]; 1576 rtx temp[2];
1569 enum machine_mode mode[2]; 1577 machine_mode mode[2];
1570 int amount[2]; 1578 int amount[2];
1571 bool active[2]; 1579 bool active[2];
1572 int phase = 0; 1580 int phase = 0;
1573 int next; 1581 int next;
1574 int offset_ld = 0; 1582 int offset_ld = 0;
1606 amount[next] = next_amount; 1614 amount[next] = next_amount;
1607 mode[next] = mode_from_align[next_amount]; 1615 mode[next] = mode_from_align[next_amount];
1608 temp[next] = gen_reg_rtx (mode[next]); 1616 temp[next] = gen_reg_rtx (mode[next]);
1609 1617
1610 x = adjust_address (src_mem, mode[next], offset_ld); 1618 x = adjust_address (src_mem, mode[next], offset_ld);
1611 emit_insn (gen_rtx_SET (VOIDmode, temp[next], x)); 1619 emit_insn (gen_rtx_SET (temp[next], x));
1612 1620
1613 offset_ld += next_amount; 1621 offset_ld += next_amount;
1614 size -= next_amount; 1622 size -= next_amount;
1615 active[next] = true; 1623 active[next] = true;
1616 } 1624 }
1618 if (active[phase]) 1626 if (active[phase])
1619 { 1627 {
1620 active[phase] = false; 1628 active[phase] = false;
1621 1629
1622 x = adjust_address (dst_mem, mode[phase], offset_st); 1630 x = adjust_address (dst_mem, mode[phase], offset_st);
1623 emit_insn (gen_rtx_SET (VOIDmode, x, temp[phase])); 1631 emit_insn (gen_rtx_SET (x, temp[phase]));
1624 1632
1625 offset_st += amount[phase]; 1633 offset_st += amount[phase];
1626 } 1634 }
1627 } 1635 }
1628 while (active[next]); 1636 while (active[next]);
1927 } 1935 }
1928 1936
1929 /* Keep track of some information about varargs for the prolog. */ 1937 /* Keep track of some information about varargs for the prolog. */
1930 1938
1931 static void 1939 static void
1932 mcore_setup_incoming_varargs (CUMULATIVE_ARGS *args_so_far, 1940 mcore_setup_incoming_varargs (cumulative_args_t args_so_far_v,
1933 enum machine_mode mode, tree type, 1941 machine_mode mode, tree type,
1934 int * ptr_pretend_size ATTRIBUTE_UNUSED, 1942 int * ptr_pretend_size ATTRIBUTE_UNUSED,
1935 int second_time ATTRIBUTE_UNUSED) 1943 int second_time ATTRIBUTE_UNUSED)
1936 { 1944 {
1945 CUMULATIVE_ARGS *args_so_far = get_cumulative_args (args_so_far_v);
1946
1937 current_function_anonymous_args = 1; 1947 current_function_anonymous_args = 1;
1938 1948
1939 /* We need to know how many argument registers are used before 1949 /* We need to know how many argument registers are used before
1940 the varargs start, so that we can push the remaining argument 1950 the varargs start, so that we can push the remaining argument
1941 registers during the prologue. */ 1951 registers during the prologue. */
1976 1986
1977 x = XEXP (x, 0); 1987 x = XEXP (x, 0);
1978 1988
1979 gcc_assert (GET_CODE (x) == SYMBOL_REF); 1989 gcc_assert (GET_CODE (x) == SYMBOL_REF);
1980 1990
1981 if (mcore_current_function_name) 1991 free (mcore_current_function_name);
1982 free (mcore_current_function_name);
1983 1992
1984 mcore_current_function_name = xstrdup (XSTR (x, 0)); 1993 mcore_current_function_name = xstrdup (XSTR (x, 0));
1985 1994
1986 ASM_OUTPUT_CG_NODE (asm_out_file, mcore_current_function_name, space_allocated); 1995 ASM_OUTPUT_CG_NODE (asm_out_file, mcore_current_function_name, space_allocated);
1987 1996
2017 2026
2018 for (offset = fi.arg_offset; remaining >= 4; offset -= 4, rn--, remaining -= 4) 2027 for (offset = fi.arg_offset; remaining >= 4; offset -= 4, rn--, remaining -= 4)
2019 { 2028 {
2020 emit_insn (gen_movsi 2029 emit_insn (gen_movsi
2021 (gen_rtx_MEM (SImode, 2030 (gen_rtx_MEM (SImode,
2022 plus_constant (stack_pointer_rtx, offset)), 2031 plus_constant (Pmode, stack_pointer_rtx,
2032 offset)),
2023 gen_rtx_REG (SImode, rn))); 2033 gen_rtx_REG (SImode, rn)));
2024 } 2034 }
2025 } 2035 }
2026 2036
2027 /* Do we need another stack adjustment before we do the register saves? */ 2037 /* Do we need another stack adjustment before we do the register saves? */
2052 } 2062 }
2053 else if (fi.reg_mask & (1 << i)) 2063 else if (fi.reg_mask & (1 << i))
2054 { 2064 {
2055 emit_insn (gen_movsi 2065 emit_insn (gen_movsi
2056 (gen_rtx_MEM (SImode, 2066 (gen_rtx_MEM (SImode,
2057 plus_constant (stack_pointer_rtx, offs)), 2067 plus_constant (Pmode, stack_pointer_rtx,
2068 offs)),
2058 gen_rtx_REG (SImode, i))); 2069 gen_rtx_REG (SImode, i)));
2059 offs += 4; 2070 offs += 4;
2060 } 2071 }
2061 } 2072 }
2062 } 2073 }
2147 else if (fi.reg_mask & (1 << i)) 2158 else if (fi.reg_mask & (1 << i))
2148 { 2159 {
2149 emit_insn (gen_movsi 2160 emit_insn (gen_movsi
2150 (gen_rtx_REG (SImode, i), 2161 (gen_rtx_REG (SImode, i),
2151 gen_rtx_MEM (SImode, 2162 gen_rtx_MEM (SImode,
2152 plus_constant (stack_pointer_rtx, offs)))); 2163 plus_constant (Pmode, stack_pointer_rtx,
2164 offs))));
2153 offs += 4; 2165 offs += 4;
2154 } 2166 }
2155 } 2167 }
2156 2168
2157 /* Give back anything else. */ 2169 /* Give back anything else. */
2262 { 2274 {
2263 /* The only things we conditionalize are those that can be directly 2275 /* The only things we conditionalize are those that can be directly
2264 changed into a conditional. Only bother with SImode items. If 2276 changed into a conditional. Only bother with SImode items. If
2265 we wanted to be a little more aggressive, we could also do other 2277 we wanted to be a little more aggressive, we could also do other
2266 modes such as DImode with reg-reg move or load 0. */ 2278 modes such as DImode with reg-reg move or load 0. */
2267 if (GET_CODE (insn) == INSN) 2279 if (NONJUMP_INSN_P (insn))
2268 { 2280 {
2269 rtx pat = PATTERN (insn); 2281 rtx pat = PATTERN (insn);
2270 rtx src, dst; 2282 rtx src, dst;
2271 2283
2272 if (GET_CODE (pat) != SET) 2284 if (GET_CODE (pat) != SET)
2313 (set (rx:DI) (ry:DI)) 2325 (set (rx:DI) (ry:DI))
2314 (set (rx:DI) (const_int 0)) 2326 (set (rx:DI) (const_int 0))
2315 */ 2327 */
2316 2328
2317 } 2329 }
2318 else if (GET_CODE (insn) == JUMP_INSN && 2330 else if (JUMP_P (insn)
2319 GET_CODE (PATTERN (insn)) == SET && 2331 && GET_CODE (PATTERN (insn)) == SET
2320 GET_CODE (XEXP (PATTERN (insn), 1)) == LABEL_REF) 2332 && GET_CODE (XEXP (PATTERN (insn), 1)) == LABEL_REF)
2321 return COND_BRANCH_INSN; 2333 return COND_BRANCH_INSN;
2322 2334
2323 return COND_NO; 2335 return COND_NO;
2324 } 2336 }
2325 2337
2326 /* Emit a conditional version of insn and replace the old insn with the 2338 /* Emit a conditional version of insn and replace the old insn with the
2327 new one. Return the new insn if emitted. */ 2339 new one. Return the new insn if emitted. */
2328 2340
2329 static rtx 2341 static rtx_insn *
2330 emit_new_cond_insn (rtx insn, int cond) 2342 emit_new_cond_insn (rtx_insn *insn, int cond)
2331 { 2343 {
2332 rtx c_insn = 0; 2344 rtx c_insn = 0;
2333 rtx pat, dst, src; 2345 rtx pat, dst, src;
2334 cond_type num; 2346 cond_type num;
2335 2347
2336 if ((num = is_cond_candidate (insn)) == COND_NO) 2348 if ((num = is_cond_candidate (insn)) == COND_NO)
2337 return NULL; 2349 return NULL;
2338 2350
2339 pat = PATTERN (insn); 2351 pat = PATTERN (insn);
2340 2352
2341 if (GET_CODE (insn) == INSN) 2353 if (NONJUMP_INSN_P (insn))
2342 { 2354 {
2343 dst = SET_DEST (pat); 2355 dst = SET_DEST (pat);
2344 src = SET_SRC (pat); 2356 src = SET_SRC (pat);
2345 } 2357 }
2346 else 2358 else
2409 else 2421 else
2410 c_insn = emit_insn_after (c_insn, insn); 2422 c_insn = emit_insn_after (c_insn, insn);
2411 2423
2412 delete_insn (insn); 2424 delete_insn (insn);
2413 2425
2414 return c_insn; 2426 return as_a <rtx_insn *> (c_insn);
2415 } 2427 }
2416 2428
2417 /* Attempt to change a basic block into a series of conditional insns. This 2429 /* Attempt to change a basic block into a series of conditional insns. This
2418 works by taking the branch at the end of the 1st block and scanning for the 2430 works by taking the branch at the end of the 1st block and scanning for the
2419 end of the 2nd block. If all instructions in the 2nd block have cond. 2431 end of the 2nd block. If all instructions in the 2nd block have cond.
2441 L3: ... 2453 L3: ...
2442 2454
2443 we can delete the L2 label if NUSES==1 and re-apply the optimization 2455 we can delete the L2 label if NUSES==1 and re-apply the optimization
2444 starting at the last instruction of block 2. This may allow an entire 2456 starting at the last instruction of block 2. This may allow an entire
2445 if-then-else statement to be conditionalized. BRC */ 2457 if-then-else statement to be conditionalized. BRC */
2446 static rtx 2458 static rtx_insn *
2447 conditionalize_block (rtx first) 2459 conditionalize_block (rtx_insn *first)
2448 { 2460 {
2449 rtx insn; 2461 rtx_insn *insn;
2450 rtx br_pat; 2462 rtx br_pat;
2451 rtx end_blk_1_br = 0; 2463 rtx_insn *end_blk_1_br = 0;
2452 rtx end_blk_2_insn = 0; 2464 rtx_insn *end_blk_2_insn = 0;
2453 rtx start_blk_3_lab = 0; 2465 rtx_insn *start_blk_3_lab = 0;
2454 int cond; 2466 int cond;
2455 int br_lab_num; 2467 int br_lab_num;
2456 int blk_size = 0; 2468 int blk_size = 0;
2457 2469
2458 2470
2459 /* Check that the first insn is a candidate conditional jump. This is 2471 /* Check that the first insn is a candidate conditional jump. This is
2460 the one that we'll eliminate. If not, advance to the next insn to 2472 the one that we'll eliminate. If not, advance to the next insn to
2461 try. */ 2473 try. */
2462 if (GET_CODE (first) != JUMP_INSN || 2474 if (! JUMP_P (first)
2463 GET_CODE (PATTERN (first)) != SET || 2475 || GET_CODE (PATTERN (first)) != SET
2464 GET_CODE (XEXP (PATTERN (first), 1)) != IF_THEN_ELSE) 2476 || GET_CODE (XEXP (PATTERN (first), 1)) != IF_THEN_ELSE)
2465 return NEXT_INSN (first); 2477 return NEXT_INSN (first);
2466 2478
2467 /* Extract some information we need. */ 2479 /* Extract some information we need. */
2468 end_blk_1_br = first; 2480 end_blk_1_br = first;
2469 br_pat = PATTERN (end_blk_1_br); 2481 br_pat = PATTERN (end_blk_1_br);
2537 start_blk_3_lab = insn; 2549 start_blk_3_lab = insn;
2538 2550
2539 for (insn = NEXT_INSN (end_blk_1_br); insn != start_blk_3_lab; 2551 for (insn = NEXT_INSN (end_blk_1_br); insn != start_blk_3_lab;
2540 insn = NEXT_INSN (insn)) 2552 insn = NEXT_INSN (insn))
2541 { 2553 {
2542 rtx newinsn; 2554 rtx_insn *newinsn;
2543 2555
2544 if (INSN_DELETED_P (insn)) 2556 if (insn->deleted ())
2545 continue; 2557 continue;
2546 2558
2547 /* Try to form a conditional variant of the instruction and emit it. */ 2559 /* Try to form a conditional variant of the instruction and emit it. */
2548 if ((newinsn = emit_new_cond_insn (insn, cond))) 2560 if ((newinsn = emit_new_cond_insn (insn, cond)))
2549 { 2561 {
2585 in before cse 2). */ 2597 in before cse 2). */
2586 2598
2587 static void 2599 static void
2588 conditionalize_optimization (void) 2600 conditionalize_optimization (void)
2589 { 2601 {
2590 rtx insn; 2602 rtx_insn *insn;
2591 2603
2592 for (insn = get_insns (); insn; insn = conditionalize_block (insn)) 2604 for (insn = get_insns (); insn; insn = conditionalize_block (insn))
2593 continue; 2605 continue;
2594 } 2606 }
2595 2607
2596 static int saved_warn_return_type = -1;
2597 static int saved_warn_return_type_count = 0;
2598
2599 /* This is to handle loads from the constant pool. */ 2608 /* This is to handle loads from the constant pool. */
2600 2609
2601 static void 2610 static void
2602 mcore_reorg (void) 2611 mcore_reorg (void)
2603 { 2612 {
2604 /* Reset this variable. */ 2613 /* Reset this variable. */
2605 current_function_anonymous_args = 0; 2614 current_function_anonymous_args = 0;
2606
2607 /* Restore the warn_return_type if it has been altered. */
2608 if (saved_warn_return_type != -1)
2609 {
2610 /* Only restore the value if we have reached another function.
2611 The test of warn_return_type occurs in final_function () in
2612 c-decl.c a long time after the code for the function is generated,
2613 so we need a counter to tell us when we have finished parsing that
2614 function and can restore the flag. */
2615 if (--saved_warn_return_type_count == 0)
2616 {
2617 warn_return_type = saved_warn_return_type;
2618 saved_warn_return_type = -1;
2619 }
2620 }
2621 2615
2622 if (optimize == 0) 2616 if (optimize == 0)
2623 return; 2617 return;
2624 2618
2625 /* Conditionalize blocks where we can. */ 2619 /* Conditionalize blocks where we can. */
2652 /* Implement SECONDARY_RELOAD_CLASS. If RCLASS contains r15, and we can't 2646 /* Implement SECONDARY_RELOAD_CLASS. If RCLASS contains r15, and we can't
2653 directly move X into it, use r1-r14 as a temporary. */ 2647 directly move X into it, use r1-r14 as a temporary. */
2654 2648
2655 enum reg_class 2649 enum reg_class
2656 mcore_secondary_reload_class (enum reg_class rclass, 2650 mcore_secondary_reload_class (enum reg_class rclass,
2657 enum machine_mode mode ATTRIBUTE_UNUSED, rtx x) 2651 machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2658 { 2652 {
2659 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], 15) 2653 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], 15)
2660 && !mcore_r15_operand_p (x)) 2654 && !mcore_r15_operand_p (x))
2661 return LRW_REGS; 2655 return LRW_REGS;
2662 return NO_REGS; 2656 return NO_REGS;
2708 2702
2709 /* Compute the number of word sized registers needed to 2703 /* Compute the number of word sized registers needed to
2710 hold a function argument of mode MODE and type TYPE. */ 2704 hold a function argument of mode MODE and type TYPE. */
2711 2705
2712 int 2706 int
2713 mcore_num_arg_regs (enum machine_mode mode, const_tree type) 2707 mcore_num_arg_regs (machine_mode mode, const_tree type)
2714 { 2708 {
2715 int size; 2709 int size;
2716 2710
2717 if (targetm.calls.must_pass_in_stack (mode, type)) 2711 if (targetm.calls.must_pass_in_stack (mode, type))
2718 return 0; 2712 return 0;
2724 2718
2725 return ROUND_ADVANCE (size); 2719 return ROUND_ADVANCE (size);
2726 } 2720 }
2727 2721
2728 static rtx 2722 static rtx
2729 handle_structs_in_regs (enum machine_mode mode, const_tree type, int reg) 2723 handle_structs_in_regs (machine_mode mode, const_tree type, int reg)
2730 { 2724 {
2731 int size; 2725 int size;
2732 2726
2733 /* The MCore ABI defines that a structure whose size is not a whole multiple 2727 /* The MCore ABI defines that a structure whose size is not a whole multiple
2734 of bytes is passed packed into registers (or spilled onto the stack if 2728 of bytes is passed packed into registers (or spilled onto the stack if
2770 } 2764 }
2771 2765
2772 rtx 2766 rtx
2773 mcore_function_value (const_tree valtype, const_tree func) 2767 mcore_function_value (const_tree valtype, const_tree func)
2774 { 2768 {
2775 enum machine_mode mode; 2769 machine_mode mode;
2776 int unsigned_p; 2770 int unsigned_p;
2777 2771
2778 mode = TYPE_MODE (valtype); 2772 mode = TYPE_MODE (valtype);
2779 2773
2780 /* Since we promote return types, we must promote the mode here too. */ 2774 /* Since we promote return types, we must promote the mode here too. */
2800 and the rest are pushed. Any arg that starts within the first 2794 and the rest are pushed. Any arg that starts within the first
2801 NPARM_REGS words is at least partially passed in a register unless 2795 NPARM_REGS words is at least partially passed in a register unless
2802 its data type forbids. */ 2796 its data type forbids. */
2803 2797
2804 static rtx 2798 static rtx
2805 mcore_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, 2799 mcore_function_arg (cumulative_args_t cum, machine_mode mode,
2806 const_tree type, bool named) 2800 const_tree type, bool named)
2807 { 2801 {
2808 int arg_reg; 2802 int arg_reg;
2809 2803
2810 if (! named || mode == VOIDmode) 2804 if (! named || mode == VOIDmode)
2811 return 0; 2805 return 0;
2812 2806
2813 if (targetm.calls.must_pass_in_stack (mode, type)) 2807 if (targetm.calls.must_pass_in_stack (mode, type))
2814 return 0; 2808 return 0;
2815 2809
2816 arg_reg = ROUND_REG (*cum, mode); 2810 arg_reg = ROUND_REG (*get_cumulative_args (cum), mode);
2817 2811
2818 if (arg_reg < NPARM_REGS) 2812 if (arg_reg < NPARM_REGS)
2819 return handle_structs_in_regs (mode, type, FIRST_PARM_REG + arg_reg); 2813 return handle_structs_in_regs (mode, type, FIRST_PARM_REG + arg_reg);
2820 2814
2821 return 0; 2815 return 0;
2822 } 2816 }
2823 2817
2824 static void 2818 static void
2825 mcore_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, 2819 mcore_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
2826 const_tree type, bool named ATTRIBUTE_UNUSED) 2820 const_tree type, bool named ATTRIBUTE_UNUSED)
2827 { 2821 {
2822 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2823
2828 *cum = (ROUND_REG (*cum, mode) 2824 *cum = (ROUND_REG (*cum, mode)
2829 + (int)named * mcore_num_arg_regs (mode, type)); 2825 + (int)named * mcore_num_arg_regs (mode, type));
2830 } 2826 }
2831 2827
2832 static unsigned int 2828 static unsigned int
2833 mcore_function_arg_boundary (enum machine_mode mode, 2829 mcore_function_arg_boundary (machine_mode mode,
2834 const_tree type ATTRIBUTE_UNUSED) 2830 const_tree type ATTRIBUTE_UNUSED)
2835 { 2831 {
2836 /* Doubles must be aligned to an 8 byte boundary. */ 2832 /* Doubles must be aligned to an 8 byte boundary. */
2837 return (mode != BLKmode && GET_MODE_SIZE (mode) == 8 2833 return (mode != BLKmode && GET_MODE_SIZE (mode) == 8
2838 ? BIGGEST_ALIGNMENT 2834 ? BIGGEST_ALIGNMENT
2845 registers, or entirely on the stack, then 0 is returned. CUM is the 2841 registers, or entirely on the stack, then 0 is returned. CUM is the
2846 number of argument registers already used by earlier parameters to 2842 number of argument registers already used by earlier parameters to
2847 the function. */ 2843 the function. */
2848 2844
2849 static int 2845 static int
2850 mcore_arg_partial_bytes (CUMULATIVE_ARGS *cum, enum machine_mode mode, 2846 mcore_arg_partial_bytes (cumulative_args_t cum, machine_mode mode,
2851 tree type, bool named) 2847 tree type, bool named)
2852 { 2848 {
2853 int reg = ROUND_REG (*cum, mode); 2849 int reg = ROUND_REG (*get_cumulative_args (cum), mode);
2854 2850
2855 if (named == 0) 2851 if (named == 0)
2856 return 0; 2852 return 0;
2857 2853
2858 if (targetm.calls.must_pass_in_stack (mode, type)) 2854 if (targetm.calls.must_pass_in_stack (mode, type))
3065 3061
3066 static tree 3062 static tree
3067 mcore_handle_naked_attribute (tree * node, tree name, tree args ATTRIBUTE_UNUSED, 3063 mcore_handle_naked_attribute (tree * node, tree name, tree args ATTRIBUTE_UNUSED,
3068 int flags ATTRIBUTE_UNUSED, bool * no_add_attrs) 3064 int flags ATTRIBUTE_UNUSED, bool * no_add_attrs)
3069 { 3065 {
3070 if (TREE_CODE (*node) == FUNCTION_DECL) 3066 if (TREE_CODE (*node) != FUNCTION_DECL)
3071 {
3072 /* PR14310 - don't complain about lack of return statement
3073 in naked functions. The solution here is a gross hack
3074 but this is the only way to solve the problem without
3075 adding a new feature to GCC. I did try submitting a patch
3076 that would add such a new feature, but it was (rightfully)
3077 rejected on the grounds that it was creeping featurism,
3078 so hence this code. */
3079 if (warn_return_type)
3080 {
3081 saved_warn_return_type = warn_return_type;
3082 warn_return_type = 0;
3083 saved_warn_return_type_count = 2;
3084 }
3085 else if (saved_warn_return_type_count)
3086 saved_warn_return_type_count = 2;
3087 }
3088 else
3089 { 3067 {
3090 warning (OPT_Wattributes, "%qE attribute only applies to functions", 3068 warning (OPT_Wattributes, "%qE attribute only applies to functions",
3091 name); 3069 name);
3092 *no_add_attrs = true; 3070 *no_add_attrs = true;
3093 } 3071 }
3126 len = strlen (name) + strlen (prefix); 3104 len = strlen (name) + strlen (prefix);
3127 string = XALLOCAVEC (char, len + 1); 3105 string = XALLOCAVEC (char, len + 1);
3128 3106
3129 sprintf (string, "%s%s", prefix, name); 3107 sprintf (string, "%s%s", prefix, name);
3130 3108
3131 DECL_SECTION_NAME (decl) = build_string (len, string); 3109 set_decl_section_name (decl, string);
3132 } 3110 }
3133 3111
3134 int 3112 int
3135 mcore_naked_function_p (void) 3113 mcore_naked_function_p (void)
3136 { 3114 {
3137 return lookup_attribute ("naked", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE; 3115 return lookup_attribute ("naked", DECL_ATTRIBUTES (current_function_decl)) != NULL_TREE;
3116 }
3117
3118 static bool
3119 mcore_warn_func_return (tree decl)
3120 {
3121 /* Naked functions are implemented entirely in assembly, including the
3122 return sequence, so suppress warnings about this. */
3123 return lookup_attribute ("naked", DECL_ATTRIBUTES (decl)) == NULL_TREE;
3138 } 3124 }
3139 3125
3140 #ifdef OBJECT_FORMAT_ELF 3126 #ifdef OBJECT_FORMAT_ELF
3141 static void 3127 static void
3142 mcore_asm_named_section (const char *name, 3128 mcore_asm_named_section (const char *name,
3202 mem = adjust_address (m_tramp, SImode, 8); 3188 mem = adjust_address (m_tramp, SImode, 8);
3203 emit_move_insn (mem, chain_value); 3189 emit_move_insn (mem, chain_value);
3204 mem = adjust_address (m_tramp, SImode, 12); 3190 mem = adjust_address (m_tramp, SImode, 12);
3205 emit_move_insn (mem, fnaddr); 3191 emit_move_insn (mem, fnaddr);
3206 } 3192 }
3193
3194 /* Implement TARGET_LEGITIMATE_CONSTANT_P
3195
3196 On the MCore, allow anything but a double. */
3197
3198 static bool
3199 mcore_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
3200 {
3201 return GET_CODE (x) != CONST_DOUBLE;
3202 }
3203
3204 /* Helper function for `mcore_legitimate_address_p'. */
3205
3206 static bool
3207 mcore_reg_ok_for_base_p (const_rtx reg, bool strict_p)
3208 {
3209 if (strict_p)
3210 return REGNO_OK_FOR_BASE_P (REGNO (reg));
3211 else
3212 return (REGNO (reg) <= 16 || !HARD_REGISTER_P (reg));
3213 }
3214
3215 static bool
3216 mcore_base_register_rtx_p (const_rtx x, bool strict_p)
3217 {
3218 return REG_P(x) && mcore_reg_ok_for_base_p (x, strict_p);
3219 }
3220
3221 /* A legitimate index for a QI is 0..15, for HI is 0..30, for SI is 0..60,
3222 and for DI is 0..56 because we use two SI loads, etc. */
3223
3224 static bool
3225 mcore_legitimate_index_p (machine_mode mode, const_rtx op)
3226 {
3227 if (CONST_INT_P (op))
3228 {
3229 if (GET_MODE_SIZE (mode) >= 4
3230 && (((unsigned HOST_WIDE_INT) INTVAL (op)) % 4) == 0
3231 && ((unsigned HOST_WIDE_INT) INTVAL (op))
3232 <= (unsigned HOST_WIDE_INT) 64 - GET_MODE_SIZE (mode))
3233 return true;
3234 if (GET_MODE_SIZE (mode) == 2
3235 && (((unsigned HOST_WIDE_INT) INTVAL (op)) % 2) == 0
3236 && ((unsigned HOST_WIDE_INT) INTVAL (op)) <= 30)
3237 return true;
3238 if (GET_MODE_SIZE (mode) == 1
3239 && ((unsigned HOST_WIDE_INT) INTVAL (op)) <= 15)
3240 return true;
3241 }
3242 return false;
3243 }
3244
3245
3246 /* Worker function for TARGET_ADDR_SPACE_LEGITIMATE_ADDRESS_P.
3247
3248 Allow REG
3249 REG + disp */
3250
3251 static bool
3252 mcore_legitimate_address_p (machine_mode mode, rtx x, bool strict_p,
3253 addr_space_t as)
3254 {
3255 gcc_assert (ADDR_SPACE_GENERIC_P (as));
3256
3257 if (mcore_base_register_rtx_p (x, strict_p))
3258 return true;
3259 else if (GET_CODE (x) == PLUS || GET_CODE (x) == LO_SUM)
3260 {
3261 rtx xop0 = XEXP (x, 0);
3262 rtx xop1 = XEXP (x, 1);
3263 if (mcore_base_register_rtx_p (xop0, strict_p)
3264 && mcore_legitimate_index_p (mode, xop1))
3265 return true;
3266 if (mcore_base_register_rtx_p (xop1, strict_p)
3267 && mcore_legitimate_index_p (mode, xop0))
3268 return true;
3269 }
3270
3271 return false;
3272 }
3273
3274 /* Implement TARGET_HARD_REGNO_MODE_OK. We may keep double values in
3275 even registers. */
3276
3277 static bool
3278 mcore_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
3279 {
3280 if (TARGET_8ALIGN && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
3281 return (regno & 1) == 0;
3282
3283 return regno < 18;
3284 }
3285
3286 /* Implement TARGET_MODES_TIEABLE_P. */
3287
3288 static bool
3289 mcore_modes_tieable_p (machine_mode mode1, machine_mode mode2)
3290 {
3291 return mode1 == mode2 || GET_MODE_CLASS (mode1) == GET_MODE_CLASS (mode2);
3292 }