comparison gcc/config/xtensa/xtensa.c @ 111:04ced10e8804

gcc 7
author kono
date Fri, 27 Oct 2017 22:46:09 +0900
parents f6334be47118
children 84e7813d76e9
comparison
equal deleted inserted replaced
68:561a7518be6b 111:04ced10e8804
1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture. 1 /* Subroutines for insn-output.c for Tensilica's Xtensa architecture.
2 Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 2 Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 Free Software Foundation, Inc.
4 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica. 3 Contributed by Bob Wilson (bwilson@tensilica.com) at Tensilica.
5 4
6 This file is part of GCC. 5 This file is part of GCC.
7 6
8 GCC is free software; you can redistribute it and/or modify it under 7 GCC is free software; you can redistribute it and/or modify it under
20 <http://www.gnu.org/licenses/>. */ 19 <http://www.gnu.org/licenses/>. */
21 20
22 #include "config.h" 21 #include "config.h"
23 #include "system.h" 22 #include "system.h"
24 #include "coretypes.h" 23 #include "coretypes.h"
25 #include "tm.h" 24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h" 26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "cfghooks.h"
30 #include "df.h"
31 #include "memmodel.h"
32 #include "tm_p.h"
33 #include "stringpool.h"
34 #include "attribs.h"
35 #include "optabs.h"
27 #include "regs.h" 36 #include "regs.h"
28 #include "hard-reg-set.h" 37 #include "emit-rtl.h"
29 #include "basic-block.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-flags.h"
33 #include "insn-attr.h"
34 #include "insn-codes.h"
35 #include "recog.h" 38 #include "recog.h"
39 #include "diagnostic-core.h"
40 #include "cfgrtl.h"
36 #include "output.h" 41 #include "output.h"
37 #include "tree.h" 42 #include "fold-const.h"
43 #include "stor-layout.h"
44 #include "calls.h"
45 #include "varasm.h"
46 #include "alias.h"
47 #include "explow.h"
38 #include "expr.h" 48 #include "expr.h"
39 #include "flags.h"
40 #include "reload.h" 49 #include "reload.h"
41 #include "tm_p.h" 50 #include "langhooks.h"
42 #include "function.h" 51 #include "gimplify.h"
43 #include "diagnostic-core.h" 52 #include "builtins.h"
44 #include "optabs.h" 53 #include "dumpfile.h"
45 #include "libfuncs.h" 54 #include "hw-doloop.h"
46 #include "ggc.h" 55 #include "rtl-iter.h"
47 #include "target.h" 56
57 /* This file should be included last. */
48 #include "target-def.h" 58 #include "target-def.h"
49 #include "langhooks.h"
50 #include "gimple.h"
51 #include "df.h"
52
53 59
54 /* Enumeration for all of the relational tests, so that we can build 60 /* Enumeration for all of the relational tests, so that we can build
55 arrays indexed by the test type, and not worry about the order 61 arrays indexed by the test type, and not worry about the order
56 of EQ, NE, etc. */ 62 of EQ, NE, etc. */
57 63
70 ITEST_MAX 76 ITEST_MAX
71 }; 77 };
72 78
73 /* Array giving truth value on whether or not a given hard register 79 /* Array giving truth value on whether or not a given hard register
74 can support a given mode. */ 80 can support a given mode. */
75 char xtensa_hard_regno_mode_ok[(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER]; 81 static char xtensa_hard_regno_mode_ok_p
76 82 [(int) MAX_MACHINE_MODE][FIRST_PSEUDO_REGISTER];
77 /* Current frame size calculated by compute_frame_size. */
78 unsigned xtensa_current_frame_size;
79 83
80 /* Largest block move to handle in-line. */ 84 /* Largest block move to handle in-line. */
81 #define LARGEST_MOVE_RATIO 15 85 #define LARGEST_MOVE_RATIO 15
82 86
83 /* Define the structure for the machine field in struct function. */ 87 /* Define the structure for the machine field in struct function. */
85 { 89 {
86 int accesses_prev_frame; 90 int accesses_prev_frame;
87 bool need_a7_copy; 91 bool need_a7_copy;
88 bool vararg_a7; 92 bool vararg_a7;
89 rtx vararg_a7_copy; 93 rtx vararg_a7_copy;
90 rtx set_frame_ptr_insn; 94 rtx_insn *set_frame_ptr_insn;
95 /* Current frame size calculated by compute_frame_size. */
96 unsigned current_frame_size;
97 /* Callee-save area size in the current frame calculated by
98 compute_frame_size. */
99 int callee_save_size;
100 bool frame_laid_out;
101 bool epilogue_done;
91 }; 102 };
92 103
93 /* Vector, indexed by hard register number, which contains 1 for a 104 /* Vector, indexed by hard register number, which contains 1 for a
94 register that is allowable in a candidate for leaf function 105 register that is allowable in a candidate for leaf function
95 treatment. */ 106 treatment. */
100 1, 1, 1, 111 1, 1, 1,
101 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 112 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
102 1 113 1
103 }; 114 };
104 115
105 /* Map hard register number to register class */
106 const enum reg_class xtensa_regno_to_class[FIRST_PSEUDO_REGISTER] =
107 {
108 RL_REGS, SP_REG, RL_REGS, RL_REGS,
109 RL_REGS, RL_REGS, RL_REGS, GR_REGS,
110 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
111 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
112 AR_REGS, AR_REGS, BR_REGS,
113 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
114 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
115 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
116 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
117 ACC_REG,
118 };
119
120 static void xtensa_option_override (void); 116 static void xtensa_option_override (void);
121 static enum internal_test map_test_to_internal_test (enum rtx_code); 117 static enum internal_test map_test_to_internal_test (enum rtx_code);
122 static rtx gen_int_relational (enum rtx_code, rtx, rtx, int *); 118 static rtx gen_int_relational (enum rtx_code, rtx, rtx, int *);
123 static rtx gen_float_relational (enum rtx_code, rtx, rtx); 119 static rtx gen_float_relational (enum rtx_code, rtx, rtx);
124 static rtx gen_conditional_move (enum rtx_code, enum machine_mode, rtx, rtx); 120 static rtx gen_conditional_move (enum rtx_code, machine_mode, rtx, rtx);
125 static rtx fixup_subreg_mem (rtx); 121 static rtx fixup_subreg_mem (rtx);
126 static struct machine_function * xtensa_init_machine_status (void); 122 static struct machine_function * xtensa_init_machine_status (void);
127 static rtx xtensa_legitimize_tls_address (rtx); 123 static rtx xtensa_legitimize_tls_address (rtx);
128 static rtx xtensa_legitimize_address (rtx, rtx, enum machine_mode); 124 static rtx xtensa_legitimize_address (rtx, rtx, machine_mode);
129 static bool xtensa_mode_dependent_address_p (const_rtx); 125 static bool xtensa_mode_dependent_address_p (const_rtx, addr_space_t);
130 static bool xtensa_return_in_msb (const_tree); 126 static bool xtensa_return_in_msb (const_tree);
131 static void printx (FILE *, signed int); 127 static void printx (FILE *, signed int);
132 static void xtensa_function_epilogue (FILE *, HOST_WIDE_INT);
133 static rtx xtensa_builtin_saveregs (void); 128 static rtx xtensa_builtin_saveregs (void);
134 static bool xtensa_legitimate_address_p (enum machine_mode, rtx, bool); 129 static bool xtensa_legitimate_address_p (machine_mode, rtx, bool);
135 static unsigned int xtensa_multibss_section_type_flags (tree, const char *, 130 static unsigned int xtensa_multibss_section_type_flags (tree, const char *,
136 int) ATTRIBUTE_UNUSED; 131 int) ATTRIBUTE_UNUSED;
137 static section *xtensa_select_rtx_section (enum machine_mode, rtx, 132 static section *xtensa_select_rtx_section (machine_mode, rtx,
138 unsigned HOST_WIDE_INT); 133 unsigned HOST_WIDE_INT);
139 static bool xtensa_rtx_costs (rtx, int, int, int *, bool); 134 static bool xtensa_rtx_costs (rtx, machine_mode, int, int, int *, bool);
140 static int xtensa_register_move_cost (enum machine_mode, reg_class_t, 135 static int xtensa_register_move_cost (machine_mode, reg_class_t,
141 reg_class_t); 136 reg_class_t);
142 static int xtensa_memory_move_cost (enum machine_mode, reg_class_t, bool); 137 static int xtensa_memory_move_cost (machine_mode, reg_class_t, bool);
143 static tree xtensa_build_builtin_va_list (void); 138 static tree xtensa_build_builtin_va_list (void);
144 static bool xtensa_return_in_memory (const_tree, const_tree); 139 static bool xtensa_return_in_memory (const_tree, const_tree);
145 static tree xtensa_gimplify_va_arg_expr (tree, tree, gimple_seq *, 140 static tree xtensa_gimplify_va_arg_expr (tree, tree, gimple_seq *,
146 gimple_seq *); 141 gimple_seq *);
147 static void xtensa_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode, 142 static void xtensa_function_arg_advance (cumulative_args_t, machine_mode,
148 const_tree, bool); 143 const_tree, bool);
149 static rtx xtensa_function_arg (CUMULATIVE_ARGS *, enum machine_mode, 144 static rtx xtensa_function_arg (cumulative_args_t, machine_mode,
150 const_tree, bool); 145 const_tree, bool);
151 static rtx xtensa_function_incoming_arg (CUMULATIVE_ARGS *, 146 static rtx xtensa_function_incoming_arg (cumulative_args_t,
152 enum machine_mode, const_tree, bool); 147 machine_mode, const_tree, bool);
153 static rtx xtensa_function_value (const_tree, const_tree, bool); 148 static rtx xtensa_function_value (const_tree, const_tree, bool);
154 static rtx xtensa_libcall_value (enum machine_mode, const_rtx); 149 static rtx xtensa_libcall_value (machine_mode, const_rtx);
155 static bool xtensa_function_value_regno_p (const unsigned int); 150 static bool xtensa_function_value_regno_p (const unsigned int);
156 static unsigned int xtensa_function_arg_boundary (enum machine_mode, 151 static unsigned int xtensa_function_arg_boundary (machine_mode,
157 const_tree); 152 const_tree);
158 static void xtensa_init_builtins (void); 153 static void xtensa_init_builtins (void);
159 static tree xtensa_fold_builtin (tree, int, tree *, bool); 154 static tree xtensa_fold_builtin (tree, int, tree *, bool);
160 static rtx xtensa_expand_builtin (tree, rtx, rtx, enum machine_mode, int); 155 static rtx xtensa_expand_builtin (tree, rtx, rtx, machine_mode, int);
161 static void xtensa_va_start (tree, rtx); 156 static void xtensa_va_start (tree, rtx);
162 static bool xtensa_frame_pointer_required (void); 157 static bool xtensa_frame_pointer_required (void);
163 static rtx xtensa_static_chain (const_tree, bool); 158 static rtx xtensa_static_chain (const_tree, bool);
164 static void xtensa_asm_trampoline_template (FILE *); 159 static void xtensa_asm_trampoline_template (FILE *);
165 static void xtensa_trampoline_init (rtx, tree, rtx); 160 static void xtensa_trampoline_init (rtx, tree, rtx);
166 static bool xtensa_output_addr_const_extra (FILE *, rtx); 161 static bool xtensa_output_addr_const_extra (FILE *, rtx);
162 static bool xtensa_cannot_force_const_mem (machine_mode, rtx);
167 163
168 static reg_class_t xtensa_preferred_reload_class (rtx, reg_class_t); 164 static reg_class_t xtensa_preferred_reload_class (rtx, reg_class_t);
169 static reg_class_t xtensa_preferred_output_reload_class (rtx, reg_class_t); 165 static reg_class_t xtensa_preferred_output_reload_class (rtx, reg_class_t);
170 static reg_class_t xtensa_secondary_reload (bool, rtx, reg_class_t, 166 static reg_class_t xtensa_secondary_reload (bool, rtx, reg_class_t,
171 enum machine_mode, 167 machine_mode,
172 struct secondary_reload_info *); 168 struct secondary_reload_info *);
173 169
174 static bool constantpool_address_p (const_rtx addr); 170 static bool constantpool_address_p (const_rtx addr);
175 171 static bool xtensa_legitimate_constant_p (machine_mode, rtx);
176 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] = 172 static void xtensa_reorg (void);
177 REG_ALLOC_ORDER; 173 static bool xtensa_can_use_doloop_p (const widest_int &, const widest_int &,
178 174 unsigned int, bool);
179 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */ 175 static const char *xtensa_invalid_within_doloop (const rtx_insn *);
180 176
181 static const struct default_options xtensa_option_optimization_table[] = 177 static bool xtensa_member_type_forces_blk (const_tree,
182 { 178 machine_mode mode);
183 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 }, 179
184 /* Reordering blocks for Xtensa is not a good idea unless the 180 static void xtensa_conditional_register_usage (void);
185 compiler understands the range of conditional branches. 181 static unsigned int xtensa_hard_regno_nregs (unsigned int, machine_mode);
186 Currently all branch relaxation for Xtensa is handled in the 182 static bool xtensa_hard_regno_mode_ok (unsigned int, machine_mode);
187 assembler, so GCC cannot do a good job of reordering blocks. 183 static bool xtensa_modes_tieable_p (machine_mode, machine_mode);
188 Do not enable reordering unless it is explicitly requested. */ 184 static HOST_WIDE_INT xtensa_constant_alignment (const_tree, HOST_WIDE_INT);
189 { OPT_LEVELS_ALL, OPT_freorder_blocks, NULL, 0 }, 185 static HOST_WIDE_INT xtensa_starting_frame_offset (void);
190 { OPT_LEVELS_NONE, 0, NULL, 0 } 186
191 };
192 187
193
194 /* This macro generates the assembly code for function exit,
195 on machines that need it. If FUNCTION_EPILOGUE is not defined
196 then individual return instructions are generated for each
197 return statement. Args are same as for FUNCTION_PROLOGUE. */
198
199 #undef TARGET_ASM_FUNCTION_EPILOGUE
200 #define TARGET_ASM_FUNCTION_EPILOGUE xtensa_function_epilogue
201 188
202 /* These hooks specify assembly directives for creating certain kinds 189 /* These hooks specify assembly directives for creating certain kinds
203 of integer object. */ 190 of integer object. */
204 191
205 #undef TARGET_ASM_ALIGNED_SI_OP 192 #undef TARGET_ASM_ALIGNED_SI_OP
206 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t" 193 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
207 194
208 #undef TARGET_ASM_SELECT_RTX_SECTION 195 #undef TARGET_ASM_SELECT_RTX_SECTION
209 #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section 196 #define TARGET_ASM_SELECT_RTX_SECTION xtensa_select_rtx_section
210
211 #undef TARGET_DEFAULT_TARGET_FLAGS
212 #define TARGET_DEFAULT_TARGET_FLAGS (TARGET_DEFAULT)
213 197
214 #undef TARGET_LEGITIMIZE_ADDRESS 198 #undef TARGET_LEGITIMIZE_ADDRESS
215 #define TARGET_LEGITIMIZE_ADDRESS xtensa_legitimize_address 199 #define TARGET_LEGITIMIZE_ADDRESS xtensa_legitimize_address
216 #undef TARGET_MODE_DEPENDENT_ADDRESS_P 200 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
217 #define TARGET_MODE_DEPENDENT_ADDRESS_P xtensa_mode_dependent_address_p 201 #define TARGET_MODE_DEPENDENT_ADDRESS_P xtensa_mode_dependent_address_p
221 #undef TARGET_MEMORY_MOVE_COST 205 #undef TARGET_MEMORY_MOVE_COST
222 #define TARGET_MEMORY_MOVE_COST xtensa_memory_move_cost 206 #define TARGET_MEMORY_MOVE_COST xtensa_memory_move_cost
223 #undef TARGET_RTX_COSTS 207 #undef TARGET_RTX_COSTS
224 #define TARGET_RTX_COSTS xtensa_rtx_costs 208 #define TARGET_RTX_COSTS xtensa_rtx_costs
225 #undef TARGET_ADDRESS_COST 209 #undef TARGET_ADDRESS_COST
226 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0 210 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
211
212 #undef TARGET_MEMBER_TYPE_FORCES_BLK
213 #define TARGET_MEMBER_TYPE_FORCES_BLK xtensa_member_type_forces_blk
227 214
228 #undef TARGET_BUILD_BUILTIN_VA_LIST 215 #undef TARGET_BUILD_BUILTIN_VA_LIST
229 #define TARGET_BUILD_BUILTIN_VA_LIST xtensa_build_builtin_va_list 216 #define TARGET_BUILD_BUILTIN_VA_LIST xtensa_build_builtin_va_list
230 217
231 #undef TARGET_EXPAND_BUILTIN_VA_START 218 #undef TARGET_EXPAND_BUILTIN_VA_START
283 270
284 #undef TARGET_HAVE_TLS 271 #undef TARGET_HAVE_TLS
285 #define TARGET_HAVE_TLS (TARGET_THREADPTR && HAVE_AS_TLS) 272 #define TARGET_HAVE_TLS (TARGET_THREADPTR && HAVE_AS_TLS)
286 273
287 #undef TARGET_CANNOT_FORCE_CONST_MEM 274 #undef TARGET_CANNOT_FORCE_CONST_MEM
288 #define TARGET_CANNOT_FORCE_CONST_MEM xtensa_tls_referenced_p 275 #define TARGET_CANNOT_FORCE_CONST_MEM xtensa_cannot_force_const_mem
276
277 #undef TARGET_LRA_P
278 #define TARGET_LRA_P hook_bool_void_false
289 279
290 #undef TARGET_LEGITIMATE_ADDRESS_P 280 #undef TARGET_LEGITIMATE_ADDRESS_P
291 #define TARGET_LEGITIMATE_ADDRESS_P xtensa_legitimate_address_p 281 #define TARGET_LEGITIMATE_ADDRESS_P xtensa_legitimate_address_p
292 282
293 #undef TARGET_FRAME_POINTER_REQUIRED 283 #undef TARGET_FRAME_POINTER_REQUIRED
300 #undef TARGET_TRAMPOLINE_INIT 290 #undef TARGET_TRAMPOLINE_INIT
301 #define TARGET_TRAMPOLINE_INIT xtensa_trampoline_init 291 #define TARGET_TRAMPOLINE_INIT xtensa_trampoline_init
302 292
303 #undef TARGET_OPTION_OVERRIDE 293 #undef TARGET_OPTION_OVERRIDE
304 #define TARGET_OPTION_OVERRIDE xtensa_option_override 294 #define TARGET_OPTION_OVERRIDE xtensa_option_override
305 #undef TARGET_OPTION_OPTIMIZATION_TABLE
306 #define TARGET_OPTION_OPTIMIZATION_TABLE xtensa_option_optimization_table
307 295
308 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA 296 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
309 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA xtensa_output_addr_const_extra 297 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA xtensa_output_addr_const_extra
298
299 #undef TARGET_LEGITIMATE_CONSTANT_P
300 #define TARGET_LEGITIMATE_CONSTANT_P xtensa_legitimate_constant_p
301
302 #undef TARGET_MACHINE_DEPENDENT_REORG
303 #define TARGET_MACHINE_DEPENDENT_REORG xtensa_reorg
304
305 #undef TARGET_CAN_USE_DOLOOP_P
306 #define TARGET_CAN_USE_DOLOOP_P xtensa_can_use_doloop_p
307
308 #undef TARGET_INVALID_WITHIN_DOLOOP
309 #define TARGET_INVALID_WITHIN_DOLOOP xtensa_invalid_within_doloop
310
311 #undef TARGET_CONDITIONAL_REGISTER_USAGE
312 #define TARGET_CONDITIONAL_REGISTER_USAGE xtensa_conditional_register_usage
313
314 #undef TARGET_HARD_REGNO_NREGS
315 #define TARGET_HARD_REGNO_NREGS xtensa_hard_regno_nregs
316 #undef TARGET_HARD_REGNO_MODE_OK
317 #define TARGET_HARD_REGNO_MODE_OK xtensa_hard_regno_mode_ok
318
319 #undef TARGET_MODES_TIEABLE_P
320 #define TARGET_MODES_TIEABLE_P xtensa_modes_tieable_p
321
322 #undef TARGET_CONSTANT_ALIGNMENT
323 #define TARGET_CONSTANT_ALIGNMENT xtensa_constant_alignment
324
325 #undef TARGET_STARTING_FRAME_OFFSET
326 #define TARGET_STARTING_FRAME_OFFSET xtensa_starting_frame_offset
310 327
311 struct gcc_target targetm = TARGET_INITIALIZER; 328 struct gcc_target targetm = TARGET_INITIALIZER;
312 329
313 330
314 /* Functions to test Xtensa immediate operand validity. */ 331 /* Functions to test Xtensa immediate operand validity. */
462 return -1; 479 return -1;
463 } 480 }
464 481
465 482
466 int 483 int
467 xtensa_valid_move (enum machine_mode mode, rtx *operands) 484 xtensa_valid_move (machine_mode mode, rtx *operands)
468 { 485 {
469 /* Either the destination or source must be a register, and the 486 /* Either the destination or source must be a register, and the
470 MAC16 accumulator doesn't count. */ 487 MAC16 accumulator doesn't count. */
471 488
472 if (register_operand (operands[0], mode)) 489 if (register_operand (operands[0], mode))
473 { 490 {
474 int dst_regnum = xt_true_regnum (operands[0]); 491 int dst_regnum = xt_true_regnum (operands[0]);
475 492
493 if (xtensa_tls_referenced_p (operands[1]))
494 return FALSE;
495
476 /* The stack pointer can only be assigned with a MOVSP opcode. */ 496 /* The stack pointer can only be assigned with a MOVSP opcode. */
477 if (dst_regnum == STACK_POINTER_REGNUM) 497 if (dst_regnum == STACK_POINTER_REGNUM)
478 return (mode == SImode 498 return !TARGET_WINDOWED_ABI
479 && register_operand (operands[1], mode) 499 || (mode == SImode
480 && !ACC_REG_P (xt_true_regnum (operands[1]))); 500 && register_operand (operands[1], mode)
501 && !ACC_REG_P (xt_true_regnum (operands[1])));
481 502
482 if (!ACC_REG_P (dst_regnum)) 503 if (!ACC_REG_P (dst_regnum))
483 return true; 504 return true;
484 } 505 }
485 if (register_operand (operands[1], mode)) 506 if (register_operand (operands[1], mode))
584 emit_insn (gen_ashrsi3 (dst, temp, shift)); 605 emit_insn (gen_ashrsi3 (dst, temp, shift));
585 } 606 }
586 607
587 608
588 bool 609 bool
589 xtensa_mem_offset (unsigned v, enum machine_mode mode) 610 xtensa_mem_offset (unsigned v, machine_mode mode)
590 { 611 {
591 switch (mode) 612 switch (mode)
592 { 613 {
593 case BLKmode: 614 case E_BLKmode:
594 /* Handle the worst case for block moves. See xtensa_expand_block_move 615 /* Handle the worst case for block moves. See xtensa_expand_block_move
595 where we emit an optimized block move operation if the block can be 616 where we emit an optimized block move operation if the block can be
596 moved in < "move_ratio" pieces. The worst case is when the block is 617 moved in < "move_ratio" pieces. The worst case is when the block is
597 aligned but has a size of (3 mod 4) (does this happen?) so that the 618 aligned but has a size of (3 mod 4) (does this happen?) so that the
598 last piece requires a byte load/store. */ 619 last piece requires a byte load/store. */
599 return (xtensa_uimm8 (v) 620 return (xtensa_uimm8 (v)
600 && xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO)); 621 && xtensa_uimm8 (v + MOVE_MAX * LARGEST_MOVE_RATIO));
601 622
602 case QImode: 623 case E_QImode:
603 return xtensa_uimm8 (v); 624 return xtensa_uimm8 (v);
604 625
605 case HImode: 626 case E_HImode:
606 return xtensa_uimm8x2 (v); 627 return xtensa_uimm8x2 (v);
607 628
608 case DFmode: 629 case E_DImode:
630 case E_DFmode:
609 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4)); 631 return (xtensa_uimm8x4 (v) && xtensa_uimm8x4 (v + 4));
610 632
611 default: 633 default:
612 break; 634 break;
613 } 635 }
677 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */ 699 { LTU, xtensa_b4constu, 0, 0, 0, 0, 1 }, /* LTU */
678 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */ 700 { GEU, xtensa_b4constu, 1, 1, 1, 0, 1 }, /* LEU */
679 }; 701 };
680 702
681 enum internal_test test; 703 enum internal_test test;
682 enum machine_mode mode; 704 machine_mode mode;
683 struct cmp_info *p_info; 705 struct cmp_info *p_info;
684 706
685 test = map_test_to_internal_test (test_code); 707 test = map_test_to_internal_test (test_code);
686 gcc_assert (test != ITEST_MAX); 708 gcc_assert (test != ITEST_MAX);
687 709
788 return gen_rtx_fmt_ee (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx); 810 return gen_rtx_fmt_ee (invert ? EQ : NE, VOIDmode, brtmp, const0_rtx);
789 } 811 }
790 812
791 813
792 void 814 void
793 xtensa_expand_conditional_branch (rtx *operands, enum machine_mode mode) 815 xtensa_expand_conditional_branch (rtx *operands, machine_mode mode)
794 { 816 {
795 enum rtx_code test_code = GET_CODE (operands[0]); 817 enum rtx_code test_code = GET_CODE (operands[0]);
796 rtx cmp0 = operands[1]; 818 rtx cmp0 = operands[1];
797 rtx cmp1 = operands[2]; 819 rtx cmp1 = operands[2];
798 rtx cmp; 820 rtx cmp;
799 int invert; 821 int invert;
800 rtx label1, label2; 822 rtx label1, label2;
801 823
802 switch (mode) 824 switch (mode)
803 { 825 {
804 case DFmode: 826 case E_DFmode:
805 default: 827 default:
806 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1)); 828 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, cmp0, cmp1));
807 829
808 case SImode: 830 case E_SImode:
809 invert = FALSE; 831 invert = FALSE;
810 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert); 832 cmp = gen_int_relational (test_code, cmp0, cmp1, &invert);
811 break; 833 break;
812 834
813 case SFmode: 835 case E_SFmode:
814 if (!TARGET_HARD_FLOAT) 836 if (!TARGET_HARD_FLOAT)
815 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode, 837 fatal_insn ("bad test", gen_rtx_fmt_ee (test_code, VOIDmode,
816 cmp0, cmp1)); 838 cmp0, cmp1));
817 invert = FALSE; 839 invert = FALSE;
818 cmp = gen_float_relational (test_code, cmp0, cmp1); 840 cmp = gen_float_relational (test_code, cmp0, cmp1);
828 { 850 {
829 label2 = label1; 851 label2 = label1;
830 label1 = pc_rtx; 852 label1 = pc_rtx;
831 } 853 }
832 854
833 emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, 855 emit_jump_insn (gen_rtx_SET (pc_rtx,
834 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp, 856 gen_rtx_IF_THEN_ELSE (VOIDmode, cmp,
835 label1, 857 label1,
836 label2))); 858 label2)));
837 } 859 }
838 860
839 861
840 static rtx 862 static rtx
841 gen_conditional_move (enum rtx_code code, enum machine_mode mode, 863 gen_conditional_move (enum rtx_code code, machine_mode mode,
842 rtx op0, rtx op1) 864 rtx op0, rtx op1)
843 { 865 {
844 if (mode == SImode) 866 if (mode == SImode)
845 { 867 {
846 rtx cmp; 868 rtx cmp;
912 int 934 int
913 xtensa_expand_conditional_move (rtx *operands, int isflt) 935 xtensa_expand_conditional_move (rtx *operands, int isflt)
914 { 936 {
915 rtx dest = operands[0]; 937 rtx dest = operands[0];
916 rtx cmp = operands[1]; 938 rtx cmp = operands[1];
917 enum machine_mode cmp_mode = GET_MODE (XEXP (cmp, 0)); 939 machine_mode cmp_mode = GET_MODE (XEXP (cmp, 0));
918 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx); 940 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
919 941
920 if (!(cmp = gen_conditional_move (GET_CODE (cmp), cmp_mode, 942 if (!(cmp = gen_conditional_move (GET_CODE (cmp), cmp_mode,
921 XEXP (cmp, 0), XEXP (cmp, 1)))) 943 XEXP (cmp, 0), XEXP (cmp, 1))))
922 return 0; 944 return 0;
934 return 1; 956 return 1;
935 } 957 }
936 958
937 959
938 int 960 int
939 xtensa_expand_scc (rtx operands[4], enum machine_mode cmp_mode) 961 xtensa_expand_scc (rtx operands[4], machine_mode cmp_mode)
940 { 962 {
941 rtx dest = operands[0]; 963 rtx dest = operands[0];
942 rtx cmp; 964 rtx cmp;
943 rtx one_tmp, zero_tmp; 965 rtx one_tmp, zero_tmp;
944 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx); 966 rtx (*gen_fn) (rtx, rtx, rtx, rtx, rtx);
962 984
963 /* Split OP[1] into OP[2,3] and likewise for OP[0] into OP[0,1]. MODE is 985 /* Split OP[1] into OP[2,3] and likewise for OP[0] into OP[0,1]. MODE is
964 for the output, i.e., the input operands are twice as big as MODE. */ 986 for the output, i.e., the input operands are twice as big as MODE. */
965 987
966 void 988 void
967 xtensa_split_operand_pair (rtx operands[4], enum machine_mode mode) 989 xtensa_split_operand_pair (rtx operands[4], machine_mode mode)
968 { 990 {
969 switch (GET_CODE (operands[1])) 991 switch (GET_CODE (operands[1]))
970 { 992 {
971 case REG: 993 case REG:
972 operands[3] = gen_rtx_REG (mode, REGNO (operands[1]) + 1); 994 operands[3] = gen_rtx_REG (mode, REGNO (operands[1]) + 1);
1009 Return 1 if we have written out everything that needs to be done to 1031 Return 1 if we have written out everything that needs to be done to
1010 do the move. Otherwise, return 0 and the caller will emit the move 1032 do the move. Otherwise, return 0 and the caller will emit the move
1011 normally. */ 1033 normally. */
1012 1034
1013 int 1035 int
1014 xtensa_emit_move_sequence (rtx *operands, enum machine_mode mode) 1036 xtensa_emit_move_sequence (rtx *operands, machine_mode mode)
1015 { 1037 {
1016 rtx src = operands[1]; 1038 rtx src = operands[1];
1017 1039
1018 if (CONSTANT_P (src) 1040 if (CONSTANT_P (src)
1019 && (GET_CODE (src) != CONST_INT || ! xtensa_simm12b (INTVAL (src)))) 1041 && (GET_CODE (src) != CONST_INT || ! xtensa_simm12b (INTVAL (src))))
1038 } 1060 }
1039 emit_move_insn (dst, src); 1061 emit_move_insn (dst, src);
1040 return 1; 1062 return 1;
1041 } 1063 }
1042 1064
1043 if (! TARGET_CONST16) 1065 if (! TARGET_AUTO_LITPOOLS && ! TARGET_CONST16)
1044 { 1066 {
1045 src = force_const_mem (SImode, src); 1067 src = force_const_mem (SImode, src);
1046 operands[1] = src; 1068 operands[1] = src;
1047 } 1069 }
1048 1070
1091 && GET_CODE (SUBREG_REG (x)) == REG 1113 && GET_CODE (SUBREG_REG (x)) == REG
1092 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER) 1114 && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER)
1093 { 1115 {
1094 rtx temp = 1116 rtx temp =
1095 gen_rtx_SUBREG (GET_MODE (x), 1117 gen_rtx_SUBREG (GET_MODE (x),
1096 reg_equiv_mem [REGNO (SUBREG_REG (x))], 1118 reg_equiv_mem (REGNO (SUBREG_REG (x))),
1097 SUBREG_BYTE (x)); 1119 SUBREG_BYTE (x));
1098 x = alter_subreg (&temp); 1120 x = alter_subreg (&temp, true);
1099 } 1121 }
1100 return x; 1122 return x;
1101 } 1123 }
1102 1124
1103 1125
1122 rtx 1144 rtx
1123 xtensa_copy_incoming_a7 (rtx opnd) 1145 xtensa_copy_incoming_a7 (rtx opnd)
1124 { 1146 {
1125 rtx entry_insns = 0; 1147 rtx entry_insns = 0;
1126 rtx reg, tmp; 1148 rtx reg, tmp;
1127 enum machine_mode mode; 1149 machine_mode mode;
1128 1150
1129 if (!cfun->machine->need_a7_copy) 1151 if (!cfun->machine->need_a7_copy)
1130 return opnd; 1152 return opnd;
1131 1153
1132 /* This function should never be called again once a7 has been copied. */ 1154 /* This function should never be called again once a7 has been copied. */
1142 gcc_assert (SUBREG_BYTE (reg) == 0); 1164 gcc_assert (SUBREG_BYTE (reg) == 0);
1143 reg = SUBREG_REG (reg); 1165 reg = SUBREG_REG (reg);
1144 } 1166 }
1145 if (GET_CODE (reg) != REG 1167 if (GET_CODE (reg) != REG
1146 || REGNO (reg) > A7_REG 1168 || REGNO (reg) > A7_REG
1147 || REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) <= A7_REG) 1169 || REGNO (reg) + hard_regno_nregs (A7_REG, mode) <= A7_REG)
1148 return opnd; 1170 return opnd;
1149 1171
1150 /* 1-word args will always be in a7; 2-word args in a6/a7. */ 1172 /* 1-word args will always be in a7; 2-word args in a6/a7. */
1151 gcc_assert (REGNO (reg) + HARD_REGNO_NREGS (A7_REG, mode) - 1 == A7_REG); 1173 gcc_assert (REGNO (reg) + hard_regno_nregs (A7_REG, mode) - 1 == A7_REG);
1152 1174
1153 cfun->machine->need_a7_copy = false; 1175 cfun->machine->need_a7_copy = false;
1154 1176
1155 /* Copy a7 to a new pseudo at the function entry. Use gen_raw_REG to 1177 /* Copy a7 to a new pseudo at the function entry. Use gen_raw_REG to
1156 create the REG for a7 so that hard_frame_pointer_rtx is not used. */ 1178 create the REG for a7 so that hard_frame_pointer_rtx is not used. */
1158 start_sequence (); 1180 start_sequence ();
1159 tmp = gen_reg_rtx (mode); 1181 tmp = gen_reg_rtx (mode);
1160 1182
1161 switch (mode) 1183 switch (mode)
1162 { 1184 {
1163 case DFmode: 1185 case E_DFmode:
1164 case DImode: 1186 case E_DImode:
1165 /* Copy the value out of A7 here but keep the first word in A6 until 1187 /* Copy the value out of A7 here but keep the first word in A6 until
1166 after the set_frame_ptr insn. Otherwise, the register allocator 1188 after the set_frame_ptr insn. Otherwise, the register allocator
1167 may decide to put "subreg (tmp, 0)" in A7 and clobber the incoming 1189 may decide to put "subreg (tmp, 0)" in A7 and clobber the incoming
1168 value. */ 1190 value. */
1169 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 4), 1191 emit_insn (gen_movsi_internal (gen_rtx_SUBREG (SImode, tmp, 4),
1170 gen_raw_REG (SImode, A7_REG))); 1192 gen_raw_REG (SImode, A7_REG)));
1171 break; 1193 break;
1172 case SFmode: 1194 case E_SFmode:
1173 emit_insn (gen_movsf_internal (tmp, gen_raw_REG (mode, A7_REG))); 1195 emit_insn (gen_movsf_internal (tmp, gen_raw_REG (mode, A7_REG)));
1174 break; 1196 break;
1175 case SImode: 1197 case E_SImode:
1176 emit_insn (gen_movsi_internal (tmp, gen_raw_REG (mode, A7_REG))); 1198 emit_insn (gen_movsi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1177 break; 1199 break;
1178 case HImode: 1200 case E_HImode:
1179 emit_insn (gen_movhi_internal (tmp, gen_raw_REG (mode, A7_REG))); 1201 emit_insn (gen_movhi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1180 break; 1202 break;
1181 case QImode: 1203 case E_QImode:
1182 emit_insn (gen_movqi_internal (tmp, gen_raw_REG (mode, A7_REG))); 1204 emit_insn (gen_movqi_internal (tmp, gen_raw_REG (mode, A7_REG)));
1183 break; 1205 break;
1184 default: 1206 default:
1185 gcc_unreachable (); 1207 gcc_unreachable ();
1186 } 1208 }
1230 operands[3] is the alignment */ 1252 operands[3] is the alignment */
1231 1253
1232 int 1254 int
1233 xtensa_expand_block_move (rtx *operands) 1255 xtensa_expand_block_move (rtx *operands)
1234 { 1256 {
1235 static const enum machine_mode mode_from_align[] = 1257 static const machine_mode mode_from_align[] =
1236 { 1258 {
1237 VOIDmode, QImode, HImode, VOIDmode, SImode, 1259 VOIDmode, QImode, HImode, VOIDmode, SImode,
1238 }; 1260 };
1239 1261
1240 rtx dst_mem = operands[0]; 1262 rtx dst_mem = operands[0];
1241 rtx src_mem = operands[1]; 1263 rtx src_mem = operands[1];
1242 HOST_WIDE_INT bytes, align; 1264 HOST_WIDE_INT bytes, align;
1243 int num_pieces, move_ratio; 1265 int num_pieces, move_ratio;
1244 rtx temp[2]; 1266 rtx temp[2];
1245 enum machine_mode mode[2]; 1267 machine_mode mode[2];
1246 int amount[2]; 1268 int amount[2];
1247 bool active[2]; 1269 bool active[2];
1248 int phase = 0; 1270 int phase = 0;
1249 int next; 1271 int next;
1250 int offset_ld = 0; 1272 int offset_ld = 0;
1304 amount[next] = next_amount; 1326 amount[next] = next_amount;
1305 mode[next] = mode_from_align[next_amount]; 1327 mode[next] = mode_from_align[next_amount];
1306 temp[next] = gen_reg_rtx (mode[next]); 1328 temp[next] = gen_reg_rtx (mode[next]);
1307 1329
1308 x = adjust_address (src_mem, mode[next], offset_ld); 1330 x = adjust_address (src_mem, mode[next], offset_ld);
1309 emit_insn (gen_rtx_SET (VOIDmode, temp[next], x)); 1331 emit_insn (gen_rtx_SET (temp[next], x));
1310 1332
1311 offset_ld += next_amount; 1333 offset_ld += next_amount;
1312 bytes -= next_amount; 1334 bytes -= next_amount;
1313 active[next] = true; 1335 active[next] = true;
1314 } 1336 }
1316 if (active[phase]) 1338 if (active[phase])
1317 { 1339 {
1318 active[phase] = false; 1340 active[phase] = false;
1319 1341
1320 x = adjust_address (dst_mem, mode[phase], offset_st); 1342 x = adjust_address (dst_mem, mode[phase], offset_st);
1321 emit_insn (gen_rtx_SET (VOIDmode, x, temp[phase])); 1343 emit_insn (gen_rtx_SET (x, temp[phase]));
1322 1344
1323 offset_st += amount[phase]; 1345 offset_st += amount[phase];
1324 } 1346 }
1325 } 1347 }
1326 while (active[next]); 1348 while (active[next]);
1340 1362
1341 if (GET_CODE (containing_fp) != REG) 1363 if (GET_CODE (containing_fp) != REG)
1342 containing_fp = force_reg (Pmode, containing_fp); 1364 containing_fp = force_reg (Pmode, containing_fp);
1343 1365
1344 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"), 1366 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_nonlocal_goto"),
1345 LCT_NORMAL, VOIDmode, 2, 1367 LCT_NORMAL, VOIDmode,
1346 containing_fp, Pmode, 1368 containing_fp, Pmode,
1347 goto_handler, Pmode); 1369 goto_handler, Pmode);
1348 } 1370 }
1349 1371
1350 1372
1351 static struct machine_function * 1373 static struct machine_function *
1352 xtensa_init_machine_status (void) 1374 xtensa_init_machine_status (void)
1353 { 1375 {
1354 return ggc_alloc_cleared_machine_function (); 1376 return ggc_cleared_alloc<machine_function> ();
1355 } 1377 }
1356 1378
1357 1379
1358 /* Shift VAL of mode MODE left by COUNT bits. */ 1380 /* Shift VAL of mode MODE left by COUNT bits. */
1359 1381
1360 static inline rtx 1382 static inline rtx
1361 xtensa_expand_mask_and_shift (rtx val, enum machine_mode mode, rtx count) 1383 xtensa_expand_mask_and_shift (rtx val, machine_mode mode, rtx count)
1362 { 1384 {
1363 val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)), 1385 val = expand_simple_binop (SImode, AND, val, GEN_INT (GET_MODE_MASK (mode)),
1364 NULL_RTX, 1, OPTAB_DIRECT); 1386 NULL_RTX, 1, OPTAB_DIRECT);
1365 return expand_simple_binop (SImode, ASHIFT, val, count, 1387 return expand_simple_binop (SImode, ASHIFT, val, count,
1366 NULL_RTX, 1, OPTAB_DIRECT); 1388 NULL_RTX, 1, OPTAB_DIRECT);
1382 /* Initialize structure AC for word access to HI and QI mode memory. */ 1404 /* Initialize structure AC for word access to HI and QI mode memory. */
1383 1405
1384 static void 1406 static void
1385 init_alignment_context (struct alignment_context *ac, rtx mem) 1407 init_alignment_context (struct alignment_context *ac, rtx mem)
1386 { 1408 {
1387 enum machine_mode mode = GET_MODE (mem); 1409 machine_mode mode = GET_MODE (mem);
1388 rtx byteoffset = NULL_RTX; 1410 rtx byteoffset = NULL_RTX;
1389 bool aligned = (MEM_ALIGN (mem) >= GET_MODE_BITSIZE (SImode)); 1411 bool aligned = (MEM_ALIGN (mem) >= GET_MODE_BITSIZE (SImode));
1390 1412
1391 if (aligned) 1413 if (aligned)
1392 ac->memsi = adjust_address (mem, SImode, 0); /* Memory is aligned. */ 1414 ac->memsi = adjust_address (mem, SImode, 0); /* Memory is aligned. */
1430 } 1452 }
1431 1453
1432 if (ac->shift != NULL_RTX) 1454 if (ac->shift != NULL_RTX)
1433 { 1455 {
1434 /* Shift is the byte count, but we need the bitcount. */ 1456 /* Shift is the byte count, but we need the bitcount. */
1435 ac->shift = expand_simple_binop (SImode, MULT, ac->shift, 1457 gcc_assert (exact_log2 (BITS_PER_UNIT) >= 0);
1436 GEN_INT (BITS_PER_UNIT), 1458 ac->shift = expand_simple_binop (SImode, ASHIFT, ac->shift,
1459 GEN_INT (exact_log2 (BITS_PER_UNIT)),
1437 NULL_RTX, 1, OPTAB_DIRECT); 1460 NULL_RTX, 1, OPTAB_DIRECT);
1438 ac->modemask = expand_simple_binop (SImode, ASHIFT, 1461 ac->modemask = expand_simple_binop (SImode, ASHIFT,
1439 GEN_INT (GET_MODE_MASK (mode)), 1462 GEN_INT (GET_MODE_MASK (mode)),
1440 ac->shift, 1463 ac->shift,
1441 NULL_RTX, 1, OPTAB_DIRECT); 1464 NULL_RTX, 1, OPTAB_DIRECT);
1452 and NEW_RTX the value to set if CMP == MEM. */ 1475 and NEW_RTX the value to set if CMP == MEM. */
1453 1476
1454 void 1477 void
1455 xtensa_expand_compare_and_swap (rtx target, rtx mem, rtx cmp, rtx new_rtx) 1478 xtensa_expand_compare_and_swap (rtx target, rtx mem, rtx cmp, rtx new_rtx)
1456 { 1479 {
1457 enum machine_mode mode = GET_MODE (mem); 1480 machine_mode mode = GET_MODE (mem);
1458 struct alignment_context ac; 1481 struct alignment_context ac;
1459 rtx tmp, cmpv, newv, val; 1482 rtx tmp, cmpv, newv, val;
1460 rtx oldval = gen_reg_rtx (SImode); 1483 rtx oldval = gen_reg_rtx (SImode);
1461 rtx res = gen_reg_rtx (SImode); 1484 rtx res = gen_reg_rtx (SImode);
1462 rtx csloop = gen_label_rtx (); 1485 rtx_code_label *csloop = gen_label_rtx ();
1463 rtx csend = gen_label_rtx (); 1486 rtx_code_label *csend = gen_label_rtx ();
1464 1487
1465 init_alignment_context (&ac, mem); 1488 init_alignment_context (&ac, mem);
1466 1489
1467 if (ac.shift != NULL_RTX) 1490 if (ac.shift != NULL_RTX)
1468 { 1491 {
1516 1539
1517 void 1540 void
1518 xtensa_expand_atomic (enum rtx_code code, rtx target, rtx mem, rtx val, 1541 xtensa_expand_atomic (enum rtx_code code, rtx target, rtx mem, rtx val,
1519 bool after) 1542 bool after)
1520 { 1543 {
1521 enum machine_mode mode = GET_MODE (mem); 1544 machine_mode mode = GET_MODE (mem);
1522 struct alignment_context ac; 1545 struct alignment_context ac;
1523 rtx csloop = gen_label_rtx (); 1546 rtx_code_label *csloop = gen_label_rtx ();
1524 rtx cmp, tmp; 1547 rtx cmp, tmp;
1525 rtx old = gen_reg_rtx (SImode); 1548 rtx old = gen_reg_rtx (SImode);
1526 rtx new_rtx = gen_reg_rtx (SImode); 1549 rtx new_rtx = gen_reg_rtx (SImode);
1527 rtx orig = NULL_RTX; 1550 rtx orig = NULL_RTX;
1528 1551
1616 xtensa_setup_frame_addresses (void) 1639 xtensa_setup_frame_addresses (void)
1617 { 1640 {
1618 /* Set flag to cause TARGET_FRAME_POINTER_REQUIRED to return true. */ 1641 /* Set flag to cause TARGET_FRAME_POINTER_REQUIRED to return true. */
1619 cfun->machine->accesses_prev_frame = 1; 1642 cfun->machine->accesses_prev_frame = 1;
1620 1643
1621 emit_library_call 1644 if (TARGET_WINDOWED_ABI)
1622 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"), 1645 emit_library_call
1623 LCT_NORMAL, VOIDmode, 0); 1646 (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_libgcc_window_spill"),
1647 LCT_NORMAL, VOIDmode);
1624 } 1648 }
1625 1649
1626 1650
1627 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit 1651 /* Emit the assembly for the end of a zero-cost loop. Normally we just emit
1628 a comment showing where the end of the loop is. However, if there is a 1652 a comment showing where the end of the loop is. However, if there is a
1636 taken (because it is the last instruction in the loop), so we need to 1660 taken (because it is the last instruction in the loop), so we need to
1637 nop after the branch to prevent the loop count from being decremented 1661 nop after the branch to prevent the loop count from being decremented
1638 when the branch is taken. */ 1662 when the branch is taken. */
1639 1663
1640 void 1664 void
1641 xtensa_emit_loop_end (rtx insn, rtx *operands) 1665 xtensa_emit_loop_end (rtx_insn *insn, rtx *operands)
1642 { 1666 {
1643 char done = 0; 1667 char done = 0;
1644 1668
1645 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn)) 1669 for (insn = PREV_INSN (insn); insn && !done; insn = PREV_INSN (insn))
1646 { 1670 {
1657 1681
1658 default: 1682 default:
1659 { 1683 {
1660 rtx body = PATTERN (insn); 1684 rtx body = PATTERN (insn);
1661 1685
1662 if (GET_CODE (body) == JUMP_INSN) 1686 if (JUMP_P (body))
1663 { 1687 {
1664 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands); 1688 output_asm_insn (TARGET_DENSITY ? "nop.n" : "nop", operands);
1665 done = 1; 1689 done = 1;
1666 } 1690 }
1667 else if ((GET_CODE (body) != USE) 1691 else if ((GET_CODE (body) != USE)
1670 } 1694 }
1671 break; 1695 break;
1672 } 1696 }
1673 } 1697 }
1674 1698
1675 output_asm_insn ("# loop end for %0", operands); 1699 output_asm_insn ("%1_LEND:", operands);
1676 } 1700 }
1677 1701
1678 1702
1679 char * 1703 char *
1680 xtensa_emit_branch (bool inverted, bool immed, rtx *operands) 1704 xtensa_emit_branch (bool inverted, bool immed, rtx *operands)
1776 { 1800 {
1777 static char result[64]; 1801 static char result[64];
1778 rtx tgt = operands[callop]; 1802 rtx tgt = operands[callop];
1779 1803
1780 if (GET_CODE (tgt) == CONST_INT) 1804 if (GET_CODE (tgt) == CONST_INT)
1781 sprintf (result, "call8\t0x%lx", INTVAL (tgt)); 1805 sprintf (result, "call%d\t" HOST_WIDE_INT_PRINT_HEX,
1806 WINDOW_SIZE, INTVAL (tgt));
1782 else if (register_operand (tgt, VOIDmode)) 1807 else if (register_operand (tgt, VOIDmode))
1783 sprintf (result, "callx8\t%%%d", callop); 1808 sprintf (result, "callx%d\t%%%d", WINDOW_SIZE, callop);
1784 else 1809 else
1785 sprintf (result, "call8\t%%%d", callop); 1810 sprintf (result, "call%d\t%%%d", WINDOW_SIZE, callop);
1786 1811
1787 return result; 1812 return result;
1788 } 1813 }
1789 1814
1790 1815
1791 bool 1816 bool
1792 xtensa_legitimate_address_p (enum machine_mode mode, rtx addr, bool strict) 1817 xtensa_legitimate_address_p (machine_mode mode, rtx addr, bool strict)
1793 { 1818 {
1794 /* Allow constant pool addresses. */ 1819 /* Allow constant pool addresses. */
1795 if (mode != BLKmode && GET_MODE_SIZE (mode) >= UNITS_PER_WORD 1820 if (mode != BLKmode && GET_MODE_SIZE (mode) >= UNITS_PER_WORD
1796 && ! TARGET_CONST16 && constantpool_address_p (addr) 1821 && ! TARGET_CONST16 && constantpool_address_p (addr)
1797 && ! xtensa_tls_referenced_p (addr)) 1822 && ! xtensa_tls_referenced_p (addr))
1856 1881
1857 return xtensa_tls_module_base_symbol; 1882 return xtensa_tls_module_base_symbol;
1858 } 1883 }
1859 1884
1860 1885
1861 static rtx 1886 static rtx_insn *
1862 xtensa_call_tls_desc (rtx sym, rtx *retp) 1887 xtensa_call_tls_desc (rtx sym, rtx *retp)
1863 { 1888 {
1864 rtx fn, arg, a10, call_insn, insns; 1889 rtx fn, arg, a_io;
1890 rtx_insn *call_insn, *insns;
1865 1891
1866 start_sequence (); 1892 start_sequence ();
1867 fn = gen_reg_rtx (Pmode); 1893 fn = gen_reg_rtx (Pmode);
1868 arg = gen_reg_rtx (Pmode); 1894 arg = gen_reg_rtx (Pmode);
1869 a10 = gen_rtx_REG (Pmode, 10); 1895 a_io = gen_rtx_REG (Pmode, WINDOW_SIZE + 2);
1870 1896
1871 emit_insn (gen_tls_func (fn, sym)); 1897 emit_insn (gen_tls_func (fn, sym));
1872 emit_insn (gen_tls_arg (arg, sym)); 1898 emit_insn (gen_tls_arg (arg, sym));
1873 emit_move_insn (a10, arg); 1899 emit_move_insn (a_io, arg);
1874 call_insn = emit_call_insn (gen_tls_call (a10, fn, sym, const1_rtx)); 1900 call_insn = emit_call_insn (gen_tls_call (a_io, fn, sym, const1_rtx));
1875 CALL_INSN_FUNCTION_USAGE (call_insn) 1901 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), a_io);
1876 = gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_USE (VOIDmode, a10),
1877 CALL_INSN_FUNCTION_USAGE (call_insn));
1878 insns = get_insns (); 1902 insns = get_insns ();
1879 end_sequence (); 1903 end_sequence ();
1880 1904
1881 *retp = a10; 1905 *retp = a_io;
1882 return insns; 1906 return insns;
1883 } 1907 }
1884 1908
1885 1909
1886 static rtx 1910 static rtx
1887 xtensa_legitimize_tls_address (rtx x) 1911 xtensa_legitimize_tls_address (rtx x)
1888 { 1912 {
1889 unsigned int model = SYMBOL_REF_TLS_MODEL (x); 1913 unsigned int model = SYMBOL_REF_TLS_MODEL (x);
1890 rtx dest, tp, ret, modbase, base, addend, insns; 1914 rtx dest, tp, ret, modbase, base, addend;
1915 rtx_insn *insns;
1891 1916
1892 dest = gen_reg_rtx (Pmode); 1917 dest = gen_reg_rtx (Pmode);
1893 switch (model) 1918 switch (model)
1894 { 1919 {
1895 case TLS_MODEL_GLOBAL_DYNAMIC: 1920 case TLS_MODEL_GLOBAL_DYNAMIC:
1907 break; 1932 break;
1908 1933
1909 case TLS_MODEL_INITIAL_EXEC: 1934 case TLS_MODEL_INITIAL_EXEC:
1910 case TLS_MODEL_LOCAL_EXEC: 1935 case TLS_MODEL_LOCAL_EXEC:
1911 tp = gen_reg_rtx (SImode); 1936 tp = gen_reg_rtx (SImode);
1912 emit_insn (gen_load_tp (tp)); 1937 emit_insn (gen_get_thread_pointersi (tp));
1913 addend = force_reg (SImode, gen_sym_TPOFF (x)); 1938 addend = force_reg (SImode, gen_sym_TPOFF (x));
1914 emit_insn (gen_addsi3 (dest, tp, addend)); 1939 emit_insn (gen_addsi3 (dest, tp, addend));
1915 break; 1940 break;
1916 1941
1917 default: 1942 default:
1923 1948
1924 1949
1925 rtx 1950 rtx
1926 xtensa_legitimize_address (rtx x, 1951 xtensa_legitimize_address (rtx x,
1927 rtx oldx ATTRIBUTE_UNUSED, 1952 rtx oldx ATTRIBUTE_UNUSED,
1928 enum machine_mode mode) 1953 machine_mode mode)
1929 { 1954 {
1930 if (xtensa_tls_symbol_p (x)) 1955 if (xtensa_tls_symbol_p (x))
1931 return xtensa_legitimize_tls_address (x); 1956 return xtensa_legitimize_tls_address (x);
1932 1957
1933 if (GET_CODE (x) == PLUS) 1958 if (GET_CODE (x) == PLUS)
1949 && xtensa_mem_offset (INTVAL (plus1) & 0xff, mode) 1974 && xtensa_mem_offset (INTVAL (plus1) & 0xff, mode)
1950 && xtensa_simm8x256 (INTVAL (plus1) & ~0xff)) 1975 && xtensa_simm8x256 (INTVAL (plus1) & ~0xff))
1951 { 1976 {
1952 rtx temp = gen_reg_rtx (Pmode); 1977 rtx temp = gen_reg_rtx (Pmode);
1953 rtx addmi_offset = GEN_INT (INTVAL (plus1) & ~0xff); 1978 rtx addmi_offset = GEN_INT (INTVAL (plus1) & ~0xff);
1954 emit_insn (gen_rtx_SET (Pmode, temp, 1979 emit_insn (gen_rtx_SET (temp, gen_rtx_PLUS (Pmode, plus0,
1955 gen_rtx_PLUS (Pmode, plus0, addmi_offset))); 1980 addmi_offset)));
1956 return gen_rtx_PLUS (Pmode, temp, GEN_INT (INTVAL (plus1) & 0xff)); 1981 return gen_rtx_PLUS (Pmode, temp, GEN_INT (INTVAL (plus1) & 0xff));
1957 } 1982 }
1958 } 1983 }
1959 1984
1960 return x; 1985 return x;
1969 our definition of LOAD_EXTEND_OP. This is wrong because the high 1994 our definition of LOAD_EXTEND_OP. This is wrong because the high
1970 bits of a 16-bit value in the constant pool are now sign-extended 1995 bits of a 16-bit value in the constant pool are now sign-extended
1971 by default. */ 1996 by default. */
1972 1997
1973 static bool 1998 static bool
1974 xtensa_mode_dependent_address_p (const_rtx addr) 1999 xtensa_mode_dependent_address_p (const_rtx addr,
2000 addr_space_t as ATTRIBUTE_UNUSED)
1975 { 2001 {
1976 return constantpool_address_p (addr); 2002 return constantpool_address_p (addr);
1977 } 2003 }
1978
1979 /* Helper for xtensa_tls_referenced_p. */
1980
1981 static int
1982 xtensa_tls_referenced_p_1 (rtx *x, void *data ATTRIBUTE_UNUSED)
1983 {
1984 if (GET_CODE (*x) == SYMBOL_REF)
1985 return SYMBOL_REF_TLS_MODEL (*x) != 0;
1986
1987 /* Ignore TLS references that have already been legitimized. */
1988 if (GET_CODE (*x) == UNSPEC)
1989 {
1990 switch (XINT (*x, 1))
1991 {
1992 case UNSPEC_TPOFF:
1993 case UNSPEC_DTPOFF:
1994 case UNSPEC_TLS_FUNC:
1995 case UNSPEC_TLS_ARG:
1996 case UNSPEC_TLS_CALL:
1997 return -1;
1998 default:
1999 break;
2000 }
2001 }
2002
2003 return 0;
2004 }
2005
2006 2004
2007 /* Return TRUE if X contains any TLS symbol references. */ 2005 /* Return TRUE if X contains any TLS symbol references. */
2008 2006
2009 bool 2007 bool
2010 xtensa_tls_referenced_p (rtx x) 2008 xtensa_tls_referenced_p (rtx x)
2011 { 2009 {
2012 if (! TARGET_HAVE_TLS) 2010 if (! TARGET_HAVE_TLS)
2013 return false; 2011 return false;
2014 2012
2015 return for_each_rtx (&x, xtensa_tls_referenced_p_1, NULL); 2013 subrtx_iterator::array_type array;
2014 FOR_EACH_SUBRTX (iter, array, x, ALL)
2015 {
2016 const_rtx x = *iter;
2017 if (GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (x) != 0)
2018 return true;
2019
2020 /* Ignore TLS references that have already been legitimized. */
2021 if (GET_CODE (x) == UNSPEC)
2022 switch (XINT (x, 1))
2023 {
2024 case UNSPEC_TPOFF:
2025 case UNSPEC_DTPOFF:
2026 case UNSPEC_TLS_FUNC:
2027 case UNSPEC_TLS_ARG:
2028 case UNSPEC_TLS_CALL:
2029 iter.skip_subrtxes ();
2030 break;
2031 default:
2032 break;
2033 }
2034 }
2035 return false;
2036 }
2037
2038
2039 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
2040
2041 static bool
2042 xtensa_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
2043 {
2044 return xtensa_tls_referenced_p (x);
2016 } 2045 }
2017 2046
2018 2047
2019 /* Return the debugger register number to use for 'regno'. */ 2048 /* Return the debugger register number to use for 'regno'. */
2020 2049
2066 2095
2067 2096
2068 /* Advance the argument to the next argument position. */ 2097 /* Advance the argument to the next argument position. */
2069 2098
2070 static void 2099 static void
2071 xtensa_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, 2100 xtensa_function_arg_advance (cumulative_args_t cum, machine_mode mode,
2072 const_tree type, bool named ATTRIBUTE_UNUSED) 2101 const_tree type, bool named ATTRIBUTE_UNUSED)
2073 { 2102 {
2074 int words, max; 2103 int words, max;
2075 int *arg_words; 2104 int *arg_words;
2076 2105
2077 arg_words = &cum->arg_words; 2106 arg_words = &get_cumulative_args (cum)->arg_words;
2078 max = MAX_ARGS_IN_REGISTERS; 2107 max = MAX_ARGS_IN_REGISTERS;
2079 2108
2080 words = (((mode != BLKmode) 2109 words = (((mode != BLKmode)
2081 ? (int) GET_MODE_SIZE (mode) 2110 ? (int) GET_MODE_SIZE (mode)
2082 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD; 2111 : int_size_in_bytes (type)) + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2093 /* Return an RTL expression containing the register for the given mode, 2122 /* Return an RTL expression containing the register for the given mode,
2094 or 0 if the argument is to be passed on the stack. INCOMING_P is nonzero 2123 or 0 if the argument is to be passed on the stack. INCOMING_P is nonzero
2095 if this is an incoming argument to the current function. */ 2124 if this is an incoming argument to the current function. */
2096 2125
2097 static rtx 2126 static rtx
2098 xtensa_function_arg_1 (CUMULATIVE_ARGS *cum, enum machine_mode mode, 2127 xtensa_function_arg_1 (cumulative_args_t cum_v, machine_mode mode,
2099 const_tree type, bool incoming_p) 2128 const_tree type, bool incoming_p)
2100 { 2129 {
2130 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
2101 int regbase, words, max; 2131 int regbase, words, max;
2102 int *arg_words; 2132 int *arg_words;
2103 int regno; 2133 int regno;
2104 2134
2105 arg_words = &cum->arg_words; 2135 arg_words = &cum->arg_words;
2120 return (rtx)0; 2150 return (rtx)0;
2121 2151
2122 regno = regbase + *arg_words; 2152 regno = regbase + *arg_words;
2123 2153
2124 if (cum->incoming && regno <= A7_REG && regno + words > A7_REG) 2154 if (cum->incoming && regno <= A7_REG && regno + words > A7_REG)
2125 cfun->machine->need_a7_copy = true; 2155 cfun->machine->need_a7_copy = TARGET_WINDOWED_ABI;
2126 2156
2127 return gen_rtx_REG (mode, regno); 2157 return gen_rtx_REG (mode, regno);
2128 } 2158 }
2129 2159
2130 /* Implement TARGET_FUNCTION_ARG. */ 2160 /* Implement TARGET_FUNCTION_ARG. */
2131 2161
2132 static rtx 2162 static rtx
2133 xtensa_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, 2163 xtensa_function_arg (cumulative_args_t cum, machine_mode mode,
2134 const_tree type, bool named ATTRIBUTE_UNUSED) 2164 const_tree type, bool named ATTRIBUTE_UNUSED)
2135 { 2165 {
2136 return xtensa_function_arg_1 (cum, mode, type, false); 2166 return xtensa_function_arg_1 (cum, mode, type, false);
2137 } 2167 }
2138 2168
2139 /* Implement TARGET_FUNCTION_INCOMING_ARG. */ 2169 /* Implement TARGET_FUNCTION_INCOMING_ARG. */
2140 2170
2141 static rtx 2171 static rtx
2142 xtensa_function_incoming_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, 2172 xtensa_function_incoming_arg (cumulative_args_t cum, machine_mode mode,
2143 const_tree type, bool named ATTRIBUTE_UNUSED) 2173 const_tree type, bool named ATTRIBUTE_UNUSED)
2144 { 2174 {
2145 return xtensa_function_arg_1 (cum, mode, type, true); 2175 return xtensa_function_arg_1 (cum, mode, type, true);
2146 } 2176 }
2147 2177
2148 static unsigned int 2178 static unsigned int
2149 xtensa_function_arg_boundary (enum machine_mode mode, const_tree type) 2179 xtensa_function_arg_boundary (machine_mode mode, const_tree type)
2150 { 2180 {
2151 unsigned int alignment; 2181 unsigned int alignment;
2152 2182
2153 alignment = type ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode); 2183 alignment = type ? TYPE_ALIGN (type) : GET_MODE_ALIGNMENT (mode);
2154 if (alignment < PARM_BOUNDARY) 2184 if (alignment < PARM_BOUNDARY)
2170 2200
2171 static void 2201 static void
2172 xtensa_option_override (void) 2202 xtensa_option_override (void)
2173 { 2203 {
2174 int regno; 2204 int regno;
2175 enum machine_mode mode; 2205 machine_mode mode;
2206
2207 /* Use CONST16 in the absence of L32R.
2208 Set it in the TARGET_OPTION_OVERRIDE to avoid dependency on xtensa
2209 configuration in the xtensa-common.c */
2210
2211 if (!TARGET_L32R)
2212 target_flags |= MASK_CONST16;
2176 2213
2177 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT) 2214 if (!TARGET_BOOLEANS && TARGET_HARD_FLOAT)
2178 error ("boolean registers required for the floating-point option"); 2215 error ("boolean registers required for the floating-point option");
2179 2216
2180 /* Set up array giving whether a given register can hold a given mode. */ 2217 /* Set up array giving whether a given register can hold a given mode. */
2181 for (mode = VOIDmode; 2218 for (mode = VOIDmode;
2182 mode != MAX_MACHINE_MODE; 2219 mode != MAX_MACHINE_MODE;
2183 mode = (enum machine_mode) ((int) mode + 1)) 2220 mode = (machine_mode) ((int) mode + 1))
2184 { 2221 {
2185 int size = GET_MODE_SIZE (mode); 2222 int size = GET_MODE_SIZE (mode);
2186 enum mode_class mclass = GET_MODE_CLASS (mode); 2223 enum mode_class mclass = GET_MODE_CLASS (mode);
2187 2224
2188 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++) 2225 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2199 else if (BR_REG_P (regno)) 2236 else if (BR_REG_P (regno))
2200 temp = (TARGET_BOOLEANS && (mode == CCmode)); 2237 temp = (TARGET_BOOLEANS && (mode == CCmode));
2201 else 2238 else
2202 temp = FALSE; 2239 temp = FALSE;
2203 2240
2204 xtensa_hard_regno_mode_ok[(int) mode][regno] = temp; 2241 xtensa_hard_regno_mode_ok_p[(int) mode][regno] = temp;
2205 } 2242 }
2206 } 2243 }
2207 2244
2208 init_machine_status = xtensa_init_machine_status; 2245 init_machine_status = xtensa_init_machine_status;
2209 2246
2232 if (flag_reorder_blocks_and_partition) 2269 if (flag_reorder_blocks_and_partition)
2233 { 2270 {
2234 flag_reorder_blocks_and_partition = 0; 2271 flag_reorder_blocks_and_partition = 0;
2235 flag_reorder_blocks = 1; 2272 flag_reorder_blocks = 1;
2236 } 2273 }
2274 }
2275
2276 /* Implement TARGET_HARD_REGNO_NREGS. */
2277
2278 static unsigned int
2279 xtensa_hard_regno_nregs (unsigned int regno, machine_mode mode)
2280 {
2281 if (FP_REG_P (regno))
2282 return CEIL (GET_MODE_SIZE (mode), UNITS_PER_FPREG);
2283 return CEIL (GET_MODE_SIZE (mode), UNITS_PER_WORD);
2284 }
2285
2286 /* Implement TARGET_HARD_REGNO_MODE_OK. */
2287
2288 static bool
2289 xtensa_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
2290 {
2291 return xtensa_hard_regno_mode_ok_p[mode][regno];
2292 }
2293
2294 /* Implement TARGET_MODES_TIEABLE_P. */
2295
2296 static bool
2297 xtensa_modes_tieable_p (machine_mode mode1, machine_mode mode2)
2298 {
2299 return ((GET_MODE_CLASS (mode1) == MODE_FLOAT
2300 || GET_MODE_CLASS (mode1) == MODE_COMPLEX_FLOAT)
2301 == (GET_MODE_CLASS (mode2) == MODE_FLOAT
2302 || GET_MODE_CLASS (mode2) == MODE_COMPLEX_FLOAT));
2237 } 2303 }
2238 2304
2239 /* A C compound statement to output to stdio stream STREAM the 2305 /* A C compound statement to output to stdio stream STREAM the
2240 assembler syntax for an instruction operand X. X is an RTL 2306 assembler syntax for an instruction operand X. X is an RTL
2241 expression. 2307 expression.
2314 2380
2315 case 'N': 2381 case 'N':
2316 if (GET_CODE (x) == MEM 2382 if (GET_CODE (x) == MEM
2317 && (GET_MODE (x) == DFmode || GET_MODE (x) == DImode)) 2383 && (GET_MODE (x) == DFmode || GET_MODE (x) == DImode))
2318 { 2384 {
2319 x = adjust_address (x, GET_MODE (x) == DFmode ? SFmode : SImode, 4); 2385 x = adjust_address (x, GET_MODE (x) == DFmode ? E_SFmode : E_SImode,
2320 output_address (XEXP (x, 0)); 2386 4);
2387 output_address (GET_MODE (x), XEXP (x, 0));
2321 } 2388 }
2322 else 2389 else
2323 output_operand_lossage ("invalid %%N value"); 2390 output_operand_lossage ("invalid %%N value");
2324 break; 2391 break;
2325 2392
2342 output_operand_lossage ("invalid %%K value"); 2409 output_operand_lossage ("invalid %%K value");
2343 break; 2410 break;
2344 2411
2345 case 'L': 2412 case 'L':
2346 if (GET_CODE (x) == CONST_INT) 2413 if (GET_CODE (x) == CONST_INT)
2347 fprintf (file, "%ld", (32 - INTVAL (x)) & 0x1f); 2414 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (32 - INTVAL (x)) & 0x1f);
2348 else 2415 else
2349 output_operand_lossage ("invalid %%L value"); 2416 output_operand_lossage ("invalid %%L value");
2350 break; 2417 break;
2351 2418
2352 case 'R': 2419 case 'R':
2353 if (GET_CODE (x) == CONST_INT) 2420 if (GET_CODE (x) == CONST_INT)
2354 fprintf (file, "%ld", INTVAL (x) & 0x1f); 2421 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0x1f);
2355 else 2422 else
2356 output_operand_lossage ("invalid %%R value"); 2423 output_operand_lossage ("invalid %%R value");
2357 break; 2424 break;
2358 2425
2359 case 'x': 2426 case 'x':
2363 output_operand_lossage ("invalid %%x value"); 2430 output_operand_lossage ("invalid %%x value");
2364 break; 2431 break;
2365 2432
2366 case 'd': 2433 case 'd':
2367 if (GET_CODE (x) == CONST_INT) 2434 if (GET_CODE (x) == CONST_INT)
2368 fprintf (file, "%ld", INTVAL (x)); 2435 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
2369 else 2436 else
2370 output_operand_lossage ("invalid %%d value"); 2437 output_operand_lossage ("invalid %%d value");
2371 break; 2438 break;
2372 2439
2373 case 't': 2440 case 't':
2377 printx (file, INTVAL (x)); 2444 printx (file, INTVAL (x));
2378 fputs (letter == 't' ? "@h" : "@l", file); 2445 fputs (letter == 't' ? "@h" : "@l", file);
2379 } 2446 }
2380 else if (GET_CODE (x) == CONST_DOUBLE) 2447 else if (GET_CODE (x) == CONST_DOUBLE)
2381 { 2448 {
2382 REAL_VALUE_TYPE r;
2383 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2384 if (GET_MODE (x) == SFmode) 2449 if (GET_MODE (x) == SFmode)
2385 { 2450 {
2386 long l; 2451 long l;
2387 REAL_VALUE_TO_TARGET_SINGLE (r, l); 2452 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), l);
2388 fprintf (file, "0x%08lx@%c", l, letter == 't' ? 'h' : 'l'); 2453 fprintf (file, "0x%08lx@%c", l, letter == 't' ? 'h' : 'l');
2389 } 2454 }
2390 else 2455 else
2391 output_operand_lossage ("invalid %%t/%%b value"); 2456 output_operand_lossage ("invalid %%t/%%b value");
2392 } 2457 }
2412 output_addr_const (file, x); 2477 output_addr_const (file, x);
2413 fputs (letter == 't' ? "@h" : "@l", file); 2478 fputs (letter == 't' ? "@h" : "@l", file);
2414 } 2479 }
2415 break; 2480 break;
2416 2481
2482 case 'y':
2483 if (GET_CODE (x) == CONST_DOUBLE &&
2484 GET_MODE (x) == SFmode)
2485 {
2486 long l;
2487 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x), l);
2488 fprintf (file, "0x%08lx", l);
2489 break;
2490 }
2491
2492 /* fall through */
2493
2417 default: 2494 default:
2418 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG) 2495 if (GET_CODE (x) == REG || GET_CODE (x) == SUBREG)
2419 fprintf (file, "%s", reg_names[xt_true_regnum (x)]); 2496 fprintf (file, "%s", reg_names[xt_true_regnum (x)]);
2420 else if (GET_CODE (x) == MEM) 2497 else if (GET_CODE (x) == MEM)
2421 output_address (XEXP (x, 0)); 2498 output_address (GET_MODE (x), XEXP (x, 0));
2422 else if (GET_CODE (x) == CONST_INT) 2499 else if (GET_CODE (x) == CONST_INT)
2423 fprintf (file, "%ld", INTVAL (x)); 2500 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
2424 else 2501 else
2425 output_addr_const (file, x); 2502 output_addr_const (file, x);
2426 } 2503 }
2427 } 2504 }
2428 2505
2516 } 2593 }
2517 } 2594 }
2518 return false; 2595 return false;
2519 } 2596 }
2520 2597
2598 static void
2599 xtensa_output_integer_literal_parts (FILE *file, rtx x, int size)
2600 {
2601 if (size > 4 && !(size & (size - 1)))
2602 {
2603 rtx first, second;
2604
2605 split_double (x, &first, &second);
2606 xtensa_output_integer_literal_parts (file, first, size / 2);
2607 fputs (", ", file);
2608 xtensa_output_integer_literal_parts (file, second, size / 2);
2609 }
2610 else if (size == 4)
2611 {
2612 output_addr_const (file, x);
2613 }
2614 else
2615 {
2616 gcc_unreachable();
2617 }
2618 }
2521 2619
2522 void 2620 void
2523 xtensa_output_literal (FILE *file, rtx x, enum machine_mode mode, int labelno) 2621 xtensa_output_literal (FILE *file, rtx x, machine_mode mode, int labelno)
2524 { 2622 {
2525 long value_long[2]; 2623 long value_long[2];
2526 REAL_VALUE_TYPE r;
2527 int size;
2528 rtx first, second;
2529 2624
2530 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno); 2625 fprintf (file, "\t.literal .LC%u, ", (unsigned) labelno);
2531 2626
2532 switch (GET_MODE_CLASS (mode)) 2627 switch (GET_MODE_CLASS (mode))
2533 { 2628 {
2534 case MODE_FLOAT: 2629 case MODE_FLOAT:
2535 gcc_assert (GET_CODE (x) == CONST_DOUBLE); 2630 gcc_assert (GET_CODE (x) == CONST_DOUBLE);
2536 2631
2537 REAL_VALUE_FROM_CONST_DOUBLE (r, x);
2538 switch (mode) 2632 switch (mode)
2539 { 2633 {
2540 case SFmode: 2634 case E_SFmode:
2541 REAL_VALUE_TO_TARGET_SINGLE (r, value_long[0]); 2635 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (x),
2636 value_long[0]);
2542 if (HOST_BITS_PER_LONG > 32) 2637 if (HOST_BITS_PER_LONG > 32)
2543 value_long[0] &= 0xffffffff; 2638 value_long[0] &= 0xffffffff;
2544 fprintf (file, "0x%08lx\n", value_long[0]); 2639 fprintf (file, "0x%08lx\n", value_long[0]);
2545 break; 2640 break;
2546 2641
2547 case DFmode: 2642 case E_DFmode:
2548 REAL_VALUE_TO_TARGET_DOUBLE (r, value_long); 2643 REAL_VALUE_TO_TARGET_DOUBLE (*CONST_DOUBLE_REAL_VALUE (x),
2644 value_long);
2549 if (HOST_BITS_PER_LONG > 32) 2645 if (HOST_BITS_PER_LONG > 32)
2550 { 2646 {
2551 value_long[0] &= 0xffffffff; 2647 value_long[0] &= 0xffffffff;
2552 value_long[1] &= 0xffffffff; 2648 value_long[1] &= 0xffffffff;
2553 } 2649 }
2561 2657
2562 break; 2658 break;
2563 2659
2564 case MODE_INT: 2660 case MODE_INT:
2565 case MODE_PARTIAL_INT: 2661 case MODE_PARTIAL_INT:
2566 size = GET_MODE_SIZE (mode); 2662 xtensa_output_integer_literal_parts (file, x, GET_MODE_SIZE (mode));
2567 switch (size) 2663 fputs ("\n", file);
2568 {
2569 case 4:
2570 output_addr_const (file, x);
2571 fputs ("\n", file);
2572 break;
2573
2574 case 8:
2575 split_double (x, &first, &second);
2576 output_addr_const (file, first);
2577 fputs (", ", file);
2578 output_addr_const (file, second);
2579 fputs ("\n", file);
2580 break;
2581
2582 default:
2583 gcc_unreachable ();
2584 }
2585 break; 2664 break;
2586 2665
2587 default: 2666 default:
2588 gcc_unreachable (); 2667 gcc_unreachable ();
2589 } 2668 }
2590 } 2669 }
2591 2670
2671 static bool
2672 xtensa_call_save_reg(int regno)
2673 {
2674 if (TARGET_WINDOWED_ABI)
2675 return false;
2676
2677 if (regno == A0_REG)
2678 return crtl->profile || !crtl->is_leaf || crtl->calls_eh_return ||
2679 df_regs_ever_live_p (regno);
2680
2681 if (crtl->calls_eh_return && regno >= 2 && regno < 4)
2682 return true;
2683
2684 return !fixed_regs[regno] && !call_used_regs[regno] &&
2685 df_regs_ever_live_p (regno);
2686 }
2592 2687
2593 /* Return the bytes needed to compute the frame pointer from the current 2688 /* Return the bytes needed to compute the frame pointer from the current
2594 stack pointer. */ 2689 stack pointer. */
2595 2690
2596 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT) 2691 #define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
2597 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1)) 2692 #define XTENSA_STACK_ALIGN(LOC) (((LOC) + STACK_BYTES-1) & ~(STACK_BYTES-1))
2598 2693
2599 long 2694 long
2600 compute_frame_size (int size) 2695 compute_frame_size (int size)
2601 { 2696 {
2697 int regno;
2698
2699 if (reload_completed && cfun->machine->frame_laid_out)
2700 return cfun->machine->current_frame_size;
2701
2602 /* Add space for the incoming static chain value. */ 2702 /* Add space for the incoming static chain value. */
2603 if (cfun->static_chain_decl != NULL) 2703 if (cfun->static_chain_decl != NULL)
2604 size += (1 * UNITS_PER_WORD); 2704 size += (1 * UNITS_PER_WORD);
2605 2705
2606 xtensa_current_frame_size = 2706 cfun->machine->callee_save_size = 0;
2707 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; ++regno)
2708 {
2709 if (xtensa_call_save_reg(regno))
2710 cfun->machine->callee_save_size += UNITS_PER_WORD;
2711 }
2712
2713 cfun->machine->current_frame_size =
2607 XTENSA_STACK_ALIGN (size 2714 XTENSA_STACK_ALIGN (size
2715 + cfun->machine->callee_save_size
2608 + crtl->outgoing_args_size 2716 + crtl->outgoing_args_size
2609 + (WINDOW_SIZE * UNITS_PER_WORD)); 2717 + (WINDOW_SIZE * UNITS_PER_WORD));
2610 return xtensa_current_frame_size; 2718 cfun->machine->callee_save_size =
2719 XTENSA_STACK_ALIGN (cfun->machine->callee_save_size);
2720 cfun->machine->frame_laid_out = true;
2721 return cfun->machine->current_frame_size;
2611 } 2722 }
2612 2723
2613 2724
2614 bool 2725 bool
2615 xtensa_frame_pointer_required (void) 2726 xtensa_frame_pointer_required (void)
2623 return true; 2734 return true;
2624 2735
2625 return false; 2736 return false;
2626 } 2737 }
2627 2738
2739 HOST_WIDE_INT
2740 xtensa_initial_elimination_offset (int from, int to ATTRIBUTE_UNUSED)
2741 {
2742 long frame_size = compute_frame_size (get_frame_size ());
2743 HOST_WIDE_INT offset;
2744
2745 switch (from)
2746 {
2747 case FRAME_POINTER_REGNUM:
2748 if (FRAME_GROWS_DOWNWARD)
2749 offset = frame_size - (WINDOW_SIZE * UNITS_PER_WORD)
2750 - cfun->machine->callee_save_size;
2751 else
2752 offset = 0;
2753 break;
2754 case ARG_POINTER_REGNUM:
2755 offset = frame_size;
2756 break;
2757 default:
2758 gcc_unreachable ();
2759 }
2760
2761 return offset;
2762 }
2628 2763
2629 /* minimum frame = reg save area (4 words) plus static chain (1 word) 2764 /* minimum frame = reg save area (4 words) plus static chain (1 word)
2630 and the total number of words must be a multiple of 128 bits. */ 2765 and the total number of words must be a multiple of 128 bits. */
2631 #define MIN_FRAME_SIZE (8 * UNITS_PER_WORD) 2766 #define MIN_FRAME_SIZE (8 * UNITS_PER_WORD)
2632 2767
2633 void 2768 void
2634 xtensa_expand_prologue (void) 2769 xtensa_expand_prologue (void)
2635 { 2770 {
2636 HOST_WIDE_INT total_size; 2771 HOST_WIDE_INT total_size;
2637 rtx size_rtx; 2772 rtx_insn *insn = NULL;
2638 rtx insn, note_rtx; 2773 rtx note_rtx;
2774
2639 2775
2640 total_size = compute_frame_size (get_frame_size ()); 2776 total_size = compute_frame_size (get_frame_size ());
2641 size_rtx = GEN_INT (total_size); 2777
2642 2778 if (flag_stack_usage_info)
2643 if (total_size < (1 << (12+3))) 2779 current_function_static_stack_size = total_size;
2644 insn = emit_insn (gen_entry (size_rtx)); 2780
2781 if (TARGET_WINDOWED_ABI)
2782 {
2783 if (total_size < (1 << (12+3)))
2784 insn = emit_insn (gen_entry (GEN_INT (total_size)));
2785 else
2786 {
2787 /* Use a8 as a temporary since a0-a7 may be live. */
2788 rtx tmp_reg = gen_rtx_REG (Pmode, A8_REG);
2789 emit_insn (gen_entry (GEN_INT (MIN_FRAME_SIZE)));
2790 emit_move_insn (tmp_reg, GEN_INT (total_size - MIN_FRAME_SIZE));
2791 emit_insn (gen_subsi3 (tmp_reg, stack_pointer_rtx, tmp_reg));
2792 insn = emit_insn (gen_movsi (stack_pointer_rtx, tmp_reg));
2793 }
2794 }
2645 else 2795 else
2646 { 2796 {
2647 /* Use a8 as a temporary since a0-a7 may be live. */ 2797 int regno;
2648 rtx tmp_reg = gen_rtx_REG (Pmode, A8_REG); 2798 HOST_WIDE_INT offset = 0;
2649 emit_insn (gen_entry (GEN_INT (MIN_FRAME_SIZE))); 2799 int callee_save_size = cfun->machine->callee_save_size;
2650 emit_move_insn (tmp_reg, GEN_INT (total_size - MIN_FRAME_SIZE)); 2800
2651 emit_insn (gen_subsi3 (tmp_reg, stack_pointer_rtx, tmp_reg)); 2801 /* -128 is a limit of single addi instruction. */
2652 insn = emit_insn (gen_movsi (stack_pointer_rtx, tmp_reg)); 2802 if (total_size > 0 && total_size <= 128)
2803 {
2804 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
2805 GEN_INT (-total_size)));
2806 RTX_FRAME_RELATED_P (insn) = 1;
2807 note_rtx = gen_rtx_SET (stack_pointer_rtx,
2808 plus_constant (Pmode, stack_pointer_rtx,
2809 -total_size));
2810 add_reg_note (insn, REG_FRAME_RELATED_EXPR, note_rtx);
2811 offset = total_size - UNITS_PER_WORD;
2812 }
2813 else if (callee_save_size)
2814 {
2815 /* 1020 is maximal s32i offset, if the frame is bigger than that
2816 * we move sp to the end of callee-saved save area, save and then
2817 * move it to its final location. */
2818 if (total_size > 1024)
2819 {
2820 insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
2821 GEN_INT (-callee_save_size)));
2822 RTX_FRAME_RELATED_P (insn) = 1;
2823 note_rtx = gen_rtx_SET (stack_pointer_rtx,
2824 plus_constant (Pmode, stack_pointer_rtx,
2825 -callee_save_size));
2826 add_reg_note (insn, REG_FRAME_RELATED_EXPR, note_rtx);
2827 offset = callee_save_size - UNITS_PER_WORD;
2828 }
2829 else
2830 {
2831 rtx tmp_reg = gen_rtx_REG (Pmode, A9_REG);
2832 emit_move_insn (tmp_reg, GEN_INT (total_size));
2833 insn = emit_insn (gen_subsi3 (stack_pointer_rtx,
2834 stack_pointer_rtx, tmp_reg));
2835 RTX_FRAME_RELATED_P (insn) = 1;
2836 note_rtx = gen_rtx_SET (stack_pointer_rtx,
2837 plus_constant (Pmode, stack_pointer_rtx,
2838 -total_size));
2839 add_reg_note (insn, REG_FRAME_RELATED_EXPR, note_rtx);
2840 offset = total_size - UNITS_PER_WORD;
2841 }
2842 }
2843
2844 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; ++regno)
2845 {
2846 if (xtensa_call_save_reg(regno))
2847 {
2848 rtx x = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
2849 rtx mem = gen_frame_mem (SImode, x);
2850 rtx reg = gen_rtx_REG (SImode, regno);
2851
2852 offset -= UNITS_PER_WORD;
2853 insn = emit_move_insn (mem, reg);
2854 RTX_FRAME_RELATED_P (insn) = 1;
2855 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
2856 gen_rtx_SET (mem, reg));
2857 }
2858 }
2859 if (total_size > 1024)
2860 {
2861 rtx tmp_reg = gen_rtx_REG (Pmode, A9_REG);
2862 emit_move_insn (tmp_reg, GEN_INT (total_size -
2863 callee_save_size));
2864 insn = emit_insn (gen_subsi3 (stack_pointer_rtx,
2865 stack_pointer_rtx, tmp_reg));
2866 RTX_FRAME_RELATED_P (insn) = 1;
2867 note_rtx = gen_rtx_SET (stack_pointer_rtx,
2868 plus_constant (Pmode, stack_pointer_rtx,
2869 callee_save_size -
2870 total_size));
2871 add_reg_note (insn, REG_FRAME_RELATED_EXPR, note_rtx);
2872 }
2653 } 2873 }
2654 2874
2655 if (frame_pointer_needed) 2875 if (frame_pointer_needed)
2656 { 2876 {
2657 if (cfun->machine->set_frame_ptr_insn) 2877 if (cfun->machine->set_frame_ptr_insn)
2658 { 2878 {
2659 rtx first; 2879 rtx_insn *first;
2660 2880
2661 push_topmost_sequence (); 2881 push_topmost_sequence ();
2662 first = get_insns (); 2882 first = get_insns ();
2663 pop_topmost_sequence (); 2883 pop_topmost_sequence ();
2664 2884
2676 df_insn_rescan (insn); 2896 df_insn_rescan (insn);
2677 } 2897 }
2678 } 2898 }
2679 } 2899 }
2680 else 2900 else
2681 insn = emit_insn (gen_movsi (hard_frame_pointer_rtx, 2901 {
2682 stack_pointer_rtx)); 2902 insn = emit_insn (gen_movsi (hard_frame_pointer_rtx,
2683 } 2903 stack_pointer_rtx));
2684 2904 if (!TARGET_WINDOWED_ABI)
2685 /* Create a note to describe the CFA. Because this is only used to set 2905 {
2686 DW_AT_frame_base for debug info, don't bother tracking changes through 2906 note_rtx = gen_rtx_SET (hard_frame_pointer_rtx,
2687 each instruction in the prologue. It just takes up space. */ 2907 stack_pointer_rtx);
2688 note_rtx = gen_rtx_SET (VOIDmode, (frame_pointer_needed 2908 RTX_FRAME_RELATED_P (insn) = 1;
2689 ? hard_frame_pointer_rtx 2909 add_reg_note (insn, REG_FRAME_RELATED_EXPR, note_rtx);
2690 : stack_pointer_rtx), 2910 }
2691 plus_constant (stack_pointer_rtx, -total_size)); 2911 }
2912 }
2913
2914 if (TARGET_WINDOWED_ABI)
2915 {
2916 /* Create a note to describe the CFA. Because this is only used to set
2917 DW_AT_frame_base for debug info, don't bother tracking changes through
2918 each instruction in the prologue. It just takes up space. */
2919 note_rtx = gen_rtx_SET ((frame_pointer_needed
2920 ? hard_frame_pointer_rtx
2921 : stack_pointer_rtx),
2922 plus_constant (Pmode, stack_pointer_rtx,
2923 -total_size));
2924 RTX_FRAME_RELATED_P (insn) = 1;
2925 add_reg_note (insn, REG_FRAME_RELATED_EXPR, note_rtx);
2926 }
2927 }
2928
2929 void
2930 xtensa_expand_epilogue (void)
2931 {
2932 if (!TARGET_WINDOWED_ABI)
2933 {
2934 int regno;
2935 HOST_WIDE_INT offset;
2936
2937 if (cfun->machine->current_frame_size > (frame_pointer_needed ? 127 : 1024))
2938 {
2939 rtx tmp_reg = gen_rtx_REG (Pmode, A9_REG);
2940 emit_move_insn (tmp_reg, GEN_INT (cfun->machine->current_frame_size -
2941 cfun->machine->callee_save_size));
2942 emit_insn (gen_addsi3 (stack_pointer_rtx, frame_pointer_needed ?
2943 hard_frame_pointer_rtx : stack_pointer_rtx,
2944 tmp_reg));
2945 offset = cfun->machine->callee_save_size - UNITS_PER_WORD;
2946 }
2947 else
2948 {
2949 if (frame_pointer_needed)
2950 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
2951 offset = cfun->machine->current_frame_size - UNITS_PER_WORD;
2952 }
2953
2954 /* Prevent reordering of saved a0 update and loading it back from
2955 the save area. */
2956 if (crtl->calls_eh_return)
2957 emit_insn (gen_blockage ());
2958
2959 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; ++regno)
2960 {
2961 if (xtensa_call_save_reg(regno))
2962 {
2963 rtx x = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
2964
2965 offset -= UNITS_PER_WORD;
2966 emit_move_insn (gen_rtx_REG (SImode, regno),
2967 gen_frame_mem (SImode, x));
2968 }
2969 }
2970
2971 if (cfun->machine->current_frame_size > 0)
2972 {
2973 if (frame_pointer_needed || /* always reachable with addi */
2974 cfun->machine->current_frame_size > 1024 ||
2975 cfun->machine->current_frame_size <= 127)
2976 {
2977 if (cfun->machine->current_frame_size <= 127)
2978 offset = cfun->machine->current_frame_size;
2979 else
2980 offset = cfun->machine->callee_save_size;
2981
2982 emit_insn (gen_addsi3 (stack_pointer_rtx,
2983 stack_pointer_rtx,
2984 GEN_INT (offset)));
2985 }
2986 else
2987 {
2988 rtx tmp_reg = gen_rtx_REG (Pmode, A9_REG);
2989 emit_move_insn (tmp_reg,
2990 GEN_INT (cfun->machine->current_frame_size));
2991 emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx,
2992 tmp_reg));
2993 }
2994 }
2995
2996 if (crtl->calls_eh_return)
2997 emit_insn (gen_add3_insn (stack_pointer_rtx,
2998 stack_pointer_rtx,
2999 EH_RETURN_STACKADJ_RTX));
3000 }
3001 cfun->machine->epilogue_done = true;
3002 emit_jump_insn (gen_return ());
3003 }
3004
3005 bool
3006 xtensa_use_return_instruction_p (void)
3007 {
3008 if (!reload_completed)
3009 return false;
3010 if (TARGET_WINDOWED_ABI)
3011 return true;
3012 if (compute_frame_size (get_frame_size ()) == 0)
3013 return true;
3014 return cfun->machine->epilogue_done;
3015 }
3016
3017 void
3018 xtensa_set_return_address (rtx address, rtx scratch)
3019 {
3020 HOST_WIDE_INT total_size = compute_frame_size (get_frame_size ());
3021 rtx frame = frame_pointer_needed ?
3022 hard_frame_pointer_rtx : stack_pointer_rtx;
3023 rtx a0_addr = plus_constant (Pmode, frame,
3024 total_size - UNITS_PER_WORD);
3025 rtx note = gen_rtx_SET (gen_frame_mem (SImode, a0_addr),
3026 gen_rtx_REG (SImode, A0_REG));
3027 rtx insn;
3028
3029 if (total_size > 1024) {
3030 emit_move_insn (scratch, GEN_INT (total_size - UNITS_PER_WORD));
3031 emit_insn (gen_addsi3 (scratch, frame, scratch));
3032 a0_addr = scratch;
3033 }
3034
3035 insn = emit_move_insn (gen_frame_mem (SImode, a0_addr), address);
2692 RTX_FRAME_RELATED_P (insn) = 1; 3036 RTX_FRAME_RELATED_P (insn) = 1;
2693 add_reg_note (insn, REG_FRAME_RELATED_EXPR, note_rtx); 3037 add_reg_note (insn, REG_FRAME_RELATED_EXPR, note);
2694 } 3038 }
2695
2696
2697 /* Clear variables at function end. */
2698
2699 void
2700 xtensa_function_epilogue (FILE *file ATTRIBUTE_UNUSED,
2701 HOST_WIDE_INT size ATTRIBUTE_UNUSED)
2702 {
2703 xtensa_current_frame_size = 0;
2704 }
2705
2706 3039
2707 rtx 3040 rtx
2708 xtensa_return_addr (int count, rtx frame) 3041 xtensa_return_addr (int count, rtx frame)
2709 { 3042 {
2710 rtx result, retaddr, curaddr, label; 3043 rtx result, retaddr, curaddr, label;
3044
3045 if (!TARGET_WINDOWED_ABI)
3046 {
3047 if (count != 0)
3048 return const0_rtx;
3049
3050 return get_hard_reg_initial_val (Pmode, A0_REG);
3051 }
2711 3052
2712 if (count == -1) 3053 if (count == -1)
2713 retaddr = gen_rtx_REG (Pmode, A0_REG); 3054 retaddr = gen_rtx_REG (Pmode, A0_REG);
2714 else 3055 else
2715 { 3056 {
2716 rtx addr = plus_constant (frame, -4 * UNITS_PER_WORD); 3057 rtx addr = plus_constant (Pmode, frame, -4 * UNITS_PER_WORD);
2717 addr = memory_address (Pmode, addr); 3058 addr = memory_address (Pmode, addr);
2718 retaddr = gen_reg_rtx (Pmode); 3059 retaddr = gen_reg_rtx (Pmode);
2719 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr)); 3060 emit_move_insn (retaddr, gen_rtx_MEM (Pmode, addr));
2720 } 3061 }
2721 3062
2741 /* Combine them to get the result. */ 3082 /* Combine them to get the result. */
2742 emit_insn (gen_iorsi3 (result, result, curaddr)); 3083 emit_insn (gen_iorsi3 (result, result, curaddr));
2743 return result; 3084 return result;
2744 } 3085 }
2745 3086
3087 /* Disable the use of word-sized or smaller complex modes for structures,
3088 and for function arguments in particular, where they cause problems with
3089 register a7. The xtensa_copy_incoming_a7 function assumes that there is
3090 a single reference to an argument in a7, but with small complex modes the
3091 real and imaginary components may be extracted separately, leading to two
3092 uses of the register, only one of which would be replaced. */
3093
3094 static bool
3095 xtensa_member_type_forces_blk (const_tree, machine_mode mode)
3096 {
3097 return mode == CQImode || mode == CHImode;
3098 }
2746 3099
2747 /* Create the va_list data type. 3100 /* Create the va_list data type.
2748 3101
2749 This structure is set up by __builtin_saveregs. The __va_reg field 3102 This structure is set up by __builtin_saveregs. The __va_reg field
2750 points to a stack-allocated region holding the contents of the 3103 points to a stack-allocated region holding the contents of the
2811 gp_regs = assign_stack_local 3164 gp_regs = assign_stack_local
2812 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1); 3165 (BLKmode, MAX_ARGS_IN_REGISTERS * UNITS_PER_WORD, -1);
2813 set_mem_alias_set (gp_regs, get_varargs_alias_set ()); 3166 set_mem_alias_set (gp_regs, get_varargs_alias_set ());
2814 3167
2815 /* Now store the incoming registers. */ 3168 /* Now store the incoming registers. */
2816 cfun->machine->need_a7_copy = true; 3169 cfun->machine->need_a7_copy = TARGET_WINDOWED_ABI;
2817 cfun->machine->vararg_a7 = true; 3170 cfun->machine->vararg_a7 = true;
2818 move_block_from_reg (GP_ARG_FIRST + arg_words, 3171 move_block_from_reg (GP_ARG_FIRST + arg_words,
2819 adjust_address (gp_regs, BLKmode, 3172 adjust_address (gp_regs, BLKmode,
2820 arg_words * UNITS_PER_WORD), 3173 arg_words * UNITS_PER_WORD),
2821 gp_left); 3174 gp_left);
2822 gcc_assert (cfun->machine->vararg_a7_copy != 0); 3175 if (cfun->machine->vararg_a7_copy != 0)
2823 emit_insn_before (cfun->machine->vararg_a7_copy, get_insns ()); 3176 emit_insn_before (cfun->machine->vararg_a7_copy, get_insns ());
2824 3177
2825 return XEXP (gp_regs, 0); 3178 return XEXP (gp_regs, 0);
2826 } 3179 }
2827 3180
2828 3181
2857 TREE_SIDE_EFFECTS (t) = 1; 3210 TREE_SIDE_EFFECTS (t) = 1;
2858 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); 3211 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2859 3212
2860 /* Set the __va_stk member to ($arg_ptr - 32). */ 3213 /* Set the __va_stk member to ($arg_ptr - 32). */
2861 u = make_tree (ptr_type_node, virtual_incoming_args_rtx); 3214 u = make_tree (ptr_type_node, virtual_incoming_args_rtx);
2862 u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u, size_int (-32)); 3215 u = fold_build_pointer_plus_hwi (u, -32);
2863 t = build2 (MODIFY_EXPR, ptr_type_node, stk, u); 3216 t = build2 (MODIFY_EXPR, ptr_type_node, stk, u);
2864 TREE_SIDE_EFFECTS (t) = 1; 3217 TREE_SIDE_EFFECTS (t) = 1;
2865 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); 3218 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
2866 3219
2867 /* Set the __va_ndx member. If the first variable argument is on 3220 /* Set the __va_ndx member. If the first variable argument is on
2960 3313
2961 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4 3314 if ((AP).__va_ndx <= __MAX_ARGS_IN_REGISTERS * 4
2962 && !must_pass_in_stack (type)) 3315 && !must_pass_in_stack (type))
2963 __array = (AP).__va_reg; */ 3316 __array = (AP).__va_reg; */
2964 3317
2965 array = create_tmp_var (ptr_type_node, NULL); 3318 array = create_tmp_var (ptr_type_node);
2966 3319
2967 lab_over = NULL; 3320 lab_over = NULL;
2968 if (!targetm.calls.must_pass_in_stack (TYPE_MODE (type), type)) 3321 if (!targetm.calls.must_pass_in_stack (TYPE_MODE (type), type))
2969 { 3322 {
2970 lab_false = create_artificial_label (UNKNOWN_LOCATION); 3323 lab_false = create_artificial_label (UNKNOWN_LOCATION);
3046 else 3399 else
3047 size = unshare_expr (va_size); 3400 size = unshare_expr (va_size);
3048 3401
3049 t = fold_convert (sizetype, unshare_expr (ndx)); 3402 t = fold_convert (sizetype, unshare_expr (ndx));
3050 t = build2 (MINUS_EXPR, sizetype, t, size); 3403 t = build2 (MINUS_EXPR, sizetype, t, size);
3051 addr = build2 (POINTER_PLUS_EXPR, ptr_type_node, unshare_expr (array), t); 3404 addr = fold_build_pointer_plus (unshare_expr (array), t);
3052 3405
3053 addr = fold_convert (build_pointer_type (type), addr); 3406 addr = fold_convert (build_pointer_type (type), addr);
3054 if (indirect) 3407 if (indirect)
3055 addr = build_va_arg_indirect_ref (addr); 3408 addr = build_va_arg_indirect_ref (addr);
3056 return build_va_arg_indirect_ref (addr); 3409 return build_va_arg_indirect_ref (addr);
3060 /* Builtins. */ 3413 /* Builtins. */
3061 3414
3062 enum xtensa_builtin 3415 enum xtensa_builtin
3063 { 3416 {
3064 XTENSA_BUILTIN_UMULSIDI3, 3417 XTENSA_BUILTIN_UMULSIDI3,
3065 XTENSA_BUILTIN_THREAD_POINTER,
3066 XTENSA_BUILTIN_SET_THREAD_POINTER,
3067 XTENSA_BUILTIN_max 3418 XTENSA_BUILTIN_max
3068 }; 3419 };
3069 3420
3070 3421
3071 static void 3422 static void
3080 decl = add_builtin_function ("__builtin_umulsidi3", ftype, 3431 decl = add_builtin_function ("__builtin_umulsidi3", ftype,
3081 XTENSA_BUILTIN_UMULSIDI3, BUILT_IN_MD, 3432 XTENSA_BUILTIN_UMULSIDI3, BUILT_IN_MD,
3082 "__umulsidi3", NULL_TREE); 3433 "__umulsidi3", NULL_TREE);
3083 TREE_NOTHROW (decl) = 1; 3434 TREE_NOTHROW (decl) = 1;
3084 TREE_READONLY (decl) = 1; 3435 TREE_READONLY (decl) = 1;
3085
3086 if (TARGET_THREADPTR)
3087 {
3088 ftype = build_function_type (ptr_type_node, void_list_node);
3089 decl = add_builtin_function ("__builtin_thread_pointer", ftype,
3090 XTENSA_BUILTIN_THREAD_POINTER, BUILT_IN_MD,
3091 NULL, NULL_TREE);
3092 TREE_READONLY (decl) = 1;
3093 TREE_NOTHROW (decl) = 1;
3094
3095 ftype = build_function_type_list (void_type_node, ptr_type_node,
3096 NULL_TREE);
3097 decl = add_builtin_function ("__builtin_set_thread_pointer", ftype,
3098 XTENSA_BUILTIN_SET_THREAD_POINTER,
3099 BUILT_IN_MD, NULL, NULL_TREE);
3100 TREE_NOTHROW (decl) = 1;
3101 }
3102 } 3436 }
3103 3437
3104 3438
3105 static tree 3439 static tree
3106 xtensa_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *args, 3440 xtensa_fold_builtin (tree fndecl, int n_args ATTRIBUTE_UNUSED, tree *args,
3119 return fold_build2 (MULT_EXPR, unsigned_intDI_type_node, 3453 return fold_build2 (MULT_EXPR, unsigned_intDI_type_node,
3120 fold_convert (unsigned_intDI_type_node, arg0), 3454 fold_convert (unsigned_intDI_type_node, arg0),
3121 fold_convert (unsigned_intDI_type_node, arg1)); 3455 fold_convert (unsigned_intDI_type_node, arg1));
3122 break; 3456 break;
3123 3457
3124 case XTENSA_BUILTIN_THREAD_POINTER:
3125 case XTENSA_BUILTIN_SET_THREAD_POINTER:
3126 break;
3127
3128 default: 3458 default:
3129 internal_error ("bad builtin code"); 3459 internal_error ("bad builtin code");
3130 break; 3460 break;
3131 } 3461 }
3132 3462
3135 3465
3136 3466
3137 static rtx 3467 static rtx
3138 xtensa_expand_builtin (tree exp, rtx target, 3468 xtensa_expand_builtin (tree exp, rtx target,
3139 rtx subtarget ATTRIBUTE_UNUSED, 3469 rtx subtarget ATTRIBUTE_UNUSED,
3140 enum machine_mode mode ATTRIBUTE_UNUSED, 3470 machine_mode mode ATTRIBUTE_UNUSED,
3141 int ignore) 3471 int ignore)
3142 { 3472 {
3143 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0); 3473 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
3144 unsigned int fcode = DECL_FUNCTION_CODE (fndecl); 3474 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
3145 rtx arg;
3146 3475
3147 switch (fcode) 3476 switch (fcode)
3148 { 3477 {
3149 case XTENSA_BUILTIN_UMULSIDI3: 3478 case XTENSA_BUILTIN_UMULSIDI3:
3150 /* The umulsidi3 builtin is just a mechanism to avoid calling the real 3479 /* The umulsidi3 builtin is just a mechanism to avoid calling the real
3151 __umulsidi3 function when the Xtensa configuration can directly 3480 __umulsidi3 function when the Xtensa configuration can directly
3152 implement it. If not, just call the function. */ 3481 implement it. If not, just call the function. */
3153 return expand_call (exp, target, ignore); 3482 return expand_call (exp, target, ignore);
3154 3483
3155 case XTENSA_BUILTIN_THREAD_POINTER:
3156 if (!target || !register_operand (target, Pmode))
3157 target = gen_reg_rtx (Pmode);
3158 emit_insn (gen_load_tp (target));
3159 return target;
3160
3161 case XTENSA_BUILTIN_SET_THREAD_POINTER:
3162 arg = expand_normal (CALL_EXPR_ARG (exp, 0));
3163 if (!register_operand (arg, Pmode))
3164 arg = copy_to_mode_reg (Pmode, arg);
3165 emit_insn (gen_set_tp (arg));
3166 return const0_rtx;
3167
3168 default: 3484 default:
3169 internal_error ("bad builtin code"); 3485 internal_error ("bad builtin code");
3170 } 3486 }
3171 return NULL_RTX; 3487 return NULL_RTX;
3172 } 3488 }
3211 3527
3212 /* Worker function for TARGET_SECONDARY_RELOAD. */ 3528 /* Worker function for TARGET_SECONDARY_RELOAD. */
3213 3529
3214 static reg_class_t 3530 static reg_class_t
3215 xtensa_secondary_reload (bool in_p, rtx x, reg_class_t rclass, 3531 xtensa_secondary_reload (bool in_p, rtx x, reg_class_t rclass,
3216 enum machine_mode mode, secondary_reload_info *sri) 3532 machine_mode mode, secondary_reload_info *sri)
3217 { 3533 {
3218 int regno; 3534 int regno;
3219 3535
3220 if (in_p && constantpool_mem_p (x)) 3536 if (in_p && constantpool_mem_p (x))
3221 { 3537 {
3241 void 3557 void
3242 order_regs_for_local_alloc (void) 3558 order_regs_for_local_alloc (void)
3243 { 3559 {
3244 if (!leaf_function_p ()) 3560 if (!leaf_function_p ())
3245 { 3561 {
3246 memcpy (reg_alloc_order, reg_nonleaf_alloc_order, 3562 static const int reg_nonleaf_alloc_order[FIRST_PSEUDO_REGISTER] =
3563 REG_ALLOC_ORDER;
3564 static const int reg_nonleaf_alloc_order_call0[FIRST_PSEUDO_REGISTER] =
3565 {
3566 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 12, 13, 14, 15,
3567 18,
3568 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34,
3569 0, 1, 16, 17,
3570 35,
3571 };
3572
3573 memcpy (reg_alloc_order, TARGET_WINDOWED_ABI ?
3574 reg_nonleaf_alloc_order : reg_nonleaf_alloc_order_call0,
3247 FIRST_PSEUDO_REGISTER * sizeof (int)); 3575 FIRST_PSEUDO_REGISTER * sizeof (int));
3248 } 3576 }
3249 else 3577 else
3250 { 3578 {
3251 int i, num_arg_regs; 3579 int i, num_arg_regs;
3305 3633
3306 3634
3307 /* The literal pool stays with the function. */ 3635 /* The literal pool stays with the function. */
3308 3636
3309 static section * 3637 static section *
3310 xtensa_select_rtx_section (enum machine_mode mode ATTRIBUTE_UNUSED, 3638 xtensa_select_rtx_section (machine_mode mode ATTRIBUTE_UNUSED,
3311 rtx x ATTRIBUTE_UNUSED, 3639 rtx x ATTRIBUTE_UNUSED,
3312 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED) 3640 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
3313 { 3641 {
3314 return function_section (current_function_decl); 3642 return function_section (current_function_decl);
3315 } 3643 }
3316 3644
3317 /* Worker function for TARGET_REGISTER_MOVE_COST. */ 3645 /* Worker function for TARGET_REGISTER_MOVE_COST. */
3318 3646
3319 static int 3647 static int
3320 xtensa_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED, 3648 xtensa_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
3321 reg_class_t from, reg_class_t to) 3649 reg_class_t from, reg_class_t to)
3322 { 3650 {
3323 if (from == to && from != BR_REGS && to != BR_REGS) 3651 if (from == to && from != BR_REGS && to != BR_REGS)
3324 return 2; 3652 return 2;
3325 else if (reg_class_subset_p (from, AR_REGS) 3653 else if (reg_class_subset_p (from, AR_REGS)
3334 } 3662 }
3335 3663
3336 /* Worker function for TARGET_MEMORY_MOVE_COST. */ 3664 /* Worker function for TARGET_MEMORY_MOVE_COST. */
3337 3665
3338 static int 3666 static int
3339 xtensa_memory_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED, 3667 xtensa_memory_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
3340 reg_class_t rclass ATTRIBUTE_UNUSED, 3668 reg_class_t rclass ATTRIBUTE_UNUSED,
3341 bool in ATTRIBUTE_UNUSED) 3669 bool in ATTRIBUTE_UNUSED)
3342 { 3670 {
3343 return 4; 3671 return 4;
3344 } 3672 }
3346 /* Compute a (partial) cost for rtx X. Return true if the complete 3674 /* Compute a (partial) cost for rtx X. Return true if the complete
3347 cost has been computed, and false if subexpressions should be 3675 cost has been computed, and false if subexpressions should be
3348 scanned. In either case, *TOTAL contains the cost result. */ 3676 scanned. In either case, *TOTAL contains the cost result. */
3349 3677
3350 static bool 3678 static bool
3351 xtensa_rtx_costs (rtx x, int code, int outer_code, int *total, 3679 xtensa_rtx_costs (rtx x, machine_mode mode, int outer_code,
3352 bool speed ATTRIBUTE_UNUSED) 3680 int opno ATTRIBUTE_UNUSED,
3353 { 3681 int *total, bool speed ATTRIBUTE_UNUSED)
3682 {
3683 int code = GET_CODE (x);
3684
3354 switch (code) 3685 switch (code)
3355 { 3686 {
3356 case CONST_INT: 3687 case CONST_INT:
3357 switch (outer_code) 3688 switch (outer_code)
3358 { 3689 {
3418 return true; 3749 return true;
3419 3750
3420 case MEM: 3751 case MEM:
3421 { 3752 {
3422 int num_words = 3753 int num_words =
3423 (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD) ? 2 : 1; 3754 (GET_MODE_SIZE (mode) > UNITS_PER_WORD) ? 2 : 1;
3424 3755
3425 if (memory_address_p (GET_MODE (x), XEXP ((x), 0))) 3756 if (memory_address_p (mode, XEXP ((x), 0)))
3426 *total = COSTS_N_INSNS (num_words); 3757 *total = COSTS_N_INSNS (num_words);
3427 else 3758 else
3428 *total = COSTS_N_INSNS (2*num_words); 3759 *total = COSTS_N_INSNS (2*num_words);
3429 return true; 3760 return true;
3430 } 3761 }
3437 case CLZ: 3768 case CLZ:
3438 *total = COSTS_N_INSNS (TARGET_NSA ? 1 : 50); 3769 *total = COSTS_N_INSNS (TARGET_NSA ? 1 : 50);
3439 return true; 3770 return true;
3440 3771
3441 case NOT: 3772 case NOT:
3442 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 3 : 2); 3773 *total = COSTS_N_INSNS (mode == DImode ? 3 : 2);
3443 return true; 3774 return true;
3444 3775
3445 case AND: 3776 case AND:
3446 case IOR: 3777 case IOR:
3447 case XOR: 3778 case XOR:
3448 if (GET_MODE (x) == DImode) 3779 if (mode == DImode)
3449 *total = COSTS_N_INSNS (2); 3780 *total = COSTS_N_INSNS (2);
3450 else 3781 else
3451 *total = COSTS_N_INSNS (1); 3782 *total = COSTS_N_INSNS (1);
3452 return true; 3783 return true;
3453 3784
3454 case ASHIFT: 3785 case ASHIFT:
3455 case ASHIFTRT: 3786 case ASHIFTRT:
3456 case LSHIFTRT: 3787 case LSHIFTRT:
3457 if (GET_MODE (x) == DImode) 3788 if (mode == DImode)
3458 *total = COSTS_N_INSNS (50); 3789 *total = COSTS_N_INSNS (50);
3459 else 3790 else
3460 *total = COSTS_N_INSNS (1); 3791 *total = COSTS_N_INSNS (1);
3461 return true; 3792 return true;
3462 3793
3463 case ABS: 3794 case ABS:
3464 { 3795 {
3465 enum machine_mode xmode = GET_MODE (x); 3796 if (mode == SFmode)
3466 if (xmode == SFmode)
3467 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50); 3797 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
3468 else if (xmode == DFmode) 3798 else if (mode == DFmode)
3469 *total = COSTS_N_INSNS (50); 3799 *total = COSTS_N_INSNS (50);
3470 else 3800 else
3471 *total = COSTS_N_INSNS (4); 3801 *total = COSTS_N_INSNS (4);
3472 return true; 3802 return true;
3473 } 3803 }
3474 3804
3475 case PLUS: 3805 case PLUS:
3476 case MINUS: 3806 case MINUS:
3477 { 3807 {
3478 enum machine_mode xmode = GET_MODE (x); 3808 if (mode == SFmode)
3479 if (xmode == SFmode)
3480 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50); 3809 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 1 : 50);
3481 else if (xmode == DFmode || xmode == DImode) 3810 else if (mode == DFmode || mode == DImode)
3482 *total = COSTS_N_INSNS (50); 3811 *total = COSTS_N_INSNS (50);
3483 else 3812 else
3484 *total = COSTS_N_INSNS (1); 3813 *total = COSTS_N_INSNS (1);
3485 return true; 3814 return true;
3486 } 3815 }
3487 3816
3488 case NEG: 3817 case NEG:
3489 *total = COSTS_N_INSNS ((GET_MODE (x) == DImode) ? 4 : 2); 3818 *total = COSTS_N_INSNS (mode == DImode ? 4 : 2);
3490 return true; 3819 return true;
3491 3820
3492 case MULT: 3821 case MULT:
3493 { 3822 {
3494 enum machine_mode xmode = GET_MODE (x); 3823 if (mode == SFmode)
3495 if (xmode == SFmode)
3496 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50); 3824 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT ? 4 : 50);
3497 else if (xmode == DFmode) 3825 else if (mode == DFmode)
3498 *total = COSTS_N_INSNS (50); 3826 *total = COSTS_N_INSNS (50);
3499 else if (xmode == DImode) 3827 else if (mode == DImode)
3500 *total = COSTS_N_INSNS (TARGET_MUL32_HIGH ? 10 : 50); 3828 *total = COSTS_N_INSNS (TARGET_MUL32_HIGH ? 10 : 50);
3501 else if (TARGET_MUL32) 3829 else if (TARGET_MUL32)
3502 *total = COSTS_N_INSNS (4); 3830 *total = COSTS_N_INSNS (4);
3503 else if (TARGET_MAC16) 3831 else if (TARGET_MAC16)
3504 *total = COSTS_N_INSNS (16); 3832 *total = COSTS_N_INSNS (16);
3510 } 3838 }
3511 3839
3512 case DIV: 3840 case DIV:
3513 case MOD: 3841 case MOD:
3514 { 3842 {
3515 enum machine_mode xmode = GET_MODE (x); 3843 if (mode == SFmode)
3516 if (xmode == SFmode)
3517 { 3844 {
3518 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50); 3845 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_DIV ? 8 : 50);
3519 return true; 3846 return true;
3520 } 3847 }
3521 else if (xmode == DFmode) 3848 else if (mode == DFmode)
3522 { 3849 {
3523 *total = COSTS_N_INSNS (50); 3850 *total = COSTS_N_INSNS (50);
3524 return true; 3851 return true;
3525 } 3852 }
3526 } 3853 }
3527 /* Fall through. */ 3854 /* Fall through. */
3528 3855
3529 case UDIV: 3856 case UDIV:
3530 case UMOD: 3857 case UMOD:
3531 { 3858 {
3532 enum machine_mode xmode = GET_MODE (x); 3859 if (mode == DImode)
3533 if (xmode == DImode)
3534 *total = COSTS_N_INSNS (50); 3860 *total = COSTS_N_INSNS (50);
3535 else if (TARGET_DIV32) 3861 else if (TARGET_DIV32)
3536 *total = COSTS_N_INSNS (32); 3862 *total = COSTS_N_INSNS (32);
3537 else 3863 else
3538 *total = COSTS_N_INSNS (50); 3864 *total = COSTS_N_INSNS (50);
3539 return true; 3865 return true;
3540 } 3866 }
3541 3867
3542 case SQRT: 3868 case SQRT:
3543 if (GET_MODE (x) == SFmode) 3869 if (mode == SFmode)
3544 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50); 3870 *total = COSTS_N_INSNS (TARGET_HARD_FLOAT_SQRT ? 8 : 50);
3545 else 3871 else
3546 *total = COSTS_N_INSNS (50); 3872 *total = COSTS_N_INSNS (50);
3547 return true; 3873 return true;
3548 3874
3590 } 3916 }
3591 3917
3592 /* Worker function for TARGET_LIBCALL_VALUE. */ 3918 /* Worker function for TARGET_LIBCALL_VALUE. */
3593 3919
3594 static rtx 3920 static rtx
3595 xtensa_libcall_value (enum machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED) 3921 xtensa_libcall_value (machine_mode mode, const_rtx fun ATTRIBUTE_UNUSED)
3596 { 3922 {
3597 return gen_rtx_REG ((GET_MODE_CLASS (mode) == MODE_INT 3923 return gen_rtx_REG ((GET_MODE_CLASS (mode) == MODE_INT
3598 && GET_MODE_SIZE (mode) < UNITS_PER_WORD) 3924 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
3599 ? SImode : mode, GP_RETURN); 3925 ? SImode : mode, GP_RETURN);
3600 } 3926 }
3609 3935
3610 /* The static chain is passed in memory. Provide rtx giving 'mem' 3936 /* The static chain is passed in memory. Provide rtx giving 'mem'
3611 expressions that denote where they are stored. */ 3937 expressions that denote where they are stored. */
3612 3938
3613 static rtx 3939 static rtx
3614 xtensa_static_chain (const_tree ARG_UNUSED (fndecl), bool incoming_p) 3940 xtensa_static_chain (const_tree ARG_UNUSED (fndecl_or_type), bool incoming_p)
3615 { 3941 {
3616 rtx base = incoming_p ? arg_pointer_rtx : stack_pointer_rtx; 3942 if (TARGET_WINDOWED_ABI)
3617 return gen_frame_mem (Pmode, plus_constant (base, -5 * UNITS_PER_WORD)); 3943 {
3944 rtx base = incoming_p ? arg_pointer_rtx : stack_pointer_rtx;
3945 return gen_frame_mem (Pmode, plus_constant (Pmode, base,
3946 -5 * UNITS_PER_WORD));
3947 }
3948 else
3949 return gen_rtx_REG (Pmode, A8_REG);
3618 } 3950 }
3619 3951
3620 3952
3621 /* TRAMPOLINE_TEMPLATE: For Xtensa, the trampoline must perform an ENTRY 3953 /* TRAMPOLINE_TEMPLATE: For Xtensa, the trampoline must perform an ENTRY
3622 instruction with a minimal stack frame in order to get some free 3954 instruction with a minimal stack frame in order to get some free
3630 xtensa_asm_trampoline_template (FILE *stream) 3962 xtensa_asm_trampoline_template (FILE *stream)
3631 { 3963 {
3632 bool use_call0 = (TARGET_CONST16 || TARGET_ABSOLUTE_LITERALS); 3964 bool use_call0 = (TARGET_CONST16 || TARGET_ABSOLUTE_LITERALS);
3633 3965
3634 fprintf (stream, "\t.begin no-transform\n"); 3966 fprintf (stream, "\t.begin no-transform\n");
3635 fprintf (stream, "\tentry\tsp, %d\n", MIN_FRAME_SIZE); 3967
3636 3968 if (TARGET_WINDOWED_ABI)
3637 if (use_call0) 3969 {
3638 { 3970 fprintf (stream, "\tentry\tsp, %d\n", MIN_FRAME_SIZE);
3639 /* Save the return address. */ 3971
3640 fprintf (stream, "\tmov\ta10, a0\n"); 3972 if (use_call0)
3641 3973 {
3642 /* Use a CALL0 instruction to skip past the constants and in the 3974 /* Save the return address. */
3643 process get the PC into A0. This allows PC-relative access to 3975 fprintf (stream, "\tmov\ta10, a0\n");
3644 the constants without relying on L32R. */ 3976
3645 fprintf (stream, "\tcall0\t.Lskipconsts\n"); 3977 /* Use a CALL0 instruction to skip past the constants and in the
3978 process get the PC into A0. This allows PC-relative access to
3979 the constants without relying on L32R. */
3980 fprintf (stream, "\tcall0\t.Lskipconsts\n");
3981 }
3982 else
3983 fprintf (stream, "\tj\t.Lskipconsts\n");
3984
3985 fprintf (stream, "\t.align\t4\n");
3986 fprintf (stream, ".Lchainval:%s0\n", integer_asm_op (4, TRUE));
3987 fprintf (stream, ".Lfnaddr:%s0\n", integer_asm_op (4, TRUE));
3988 fprintf (stream, ".Lskipconsts:\n");
3989
3990 /* Load the static chain and function address from the trampoline. */
3991 if (use_call0)
3992 {
3993 fprintf (stream, "\taddi\ta0, a0, 3\n");
3994 fprintf (stream, "\tl32i\ta9, a0, 0\n");
3995 fprintf (stream, "\tl32i\ta8, a0, 4\n");
3996 }
3997 else
3998 {
3999 fprintf (stream, "\tl32r\ta9, .Lchainval\n");
4000 fprintf (stream, "\tl32r\ta8, .Lfnaddr\n");
4001 }
4002
4003 /* Store the static chain. */
4004 fprintf (stream, "\ts32i\ta9, sp, %d\n", MIN_FRAME_SIZE - 20);
4005
4006 /* Set the proper stack pointer value. */
4007 fprintf (stream, "\tl32i\ta9, a8, 0\n");
4008 fprintf (stream, "\textui\ta9, a9, %d, 12\n",
4009 TARGET_BIG_ENDIAN ? 8 : 12);
4010 fprintf (stream, "\tslli\ta9, a9, 3\n");
4011 fprintf (stream, "\taddi\ta9, a9, %d\n", -MIN_FRAME_SIZE);
4012 fprintf (stream, "\tsub\ta9, sp, a9\n");
4013 fprintf (stream, "\tmovsp\tsp, a9\n");
4014
4015 if (use_call0)
4016 /* Restore the return address. */
4017 fprintf (stream, "\tmov\ta0, a10\n");
4018
4019 /* Jump to the instruction following the ENTRY. */
4020 fprintf (stream, "\taddi\ta8, a8, 3\n");
4021 fprintf (stream, "\tjx\ta8\n");
4022
4023 /* Pad size to a multiple of TRAMPOLINE_ALIGNMENT. */
4024 if (use_call0)
4025 fprintf (stream, "\t.byte\t0\n");
4026 else
4027 fprintf (stream, "\tnop\n");
3646 } 4028 }
3647 else 4029 else
3648 fprintf (stream, "\tj\t.Lskipconsts\n"); 4030 {
3649 4031 if (use_call0)
3650 fprintf (stream, "\t.align\t4\n"); 4032 {
3651 fprintf (stream, ".Lchainval:%s0\n", integer_asm_op (4, TRUE)); 4033 /* Save the return address. */
3652 fprintf (stream, ".Lfnaddr:%s0\n", integer_asm_op (4, TRUE)); 4034 fprintf (stream, "\tmov\ta10, a0\n");
3653 fprintf (stream, ".Lskipconsts:\n"); 4035
3654 4036 /* Use a CALL0 instruction to skip past the constants and in the
3655 /* Load the static chain and function address from the trampoline. */ 4037 process get the PC into A0. This allows PC-relative access to
3656 if (use_call0) 4038 the constants without relying on L32R. */
3657 { 4039 fprintf (stream, "\tcall0\t.Lskipconsts\n");
3658 fprintf (stream, "\taddi\ta0, a0, 3\n"); 4040 }
3659 fprintf (stream, "\tl32i\ta9, a0, 0\n"); 4041 else
3660 fprintf (stream, "\tl32i\ta8, a0, 4\n"); 4042 fprintf (stream, "\tj\t.Lskipconsts\n");
3661 } 4043
3662 else 4044 fprintf (stream, "\t.align\t4\n");
3663 { 4045 fprintf (stream, ".Lchainval:%s0\n", integer_asm_op (4, TRUE));
3664 fprintf (stream, "\tl32r\ta9, .Lchainval\n"); 4046 fprintf (stream, ".Lfnaddr:%s0\n", integer_asm_op (4, TRUE));
3665 fprintf (stream, "\tl32r\ta8, .Lfnaddr\n"); 4047 fprintf (stream, ".Lskipconsts:\n");
3666 } 4048
3667 4049 /* Load the static chain and function address from the trampoline. */
3668 /* Store the static chain. */ 4050 if (use_call0)
3669 fprintf (stream, "\ts32i\ta9, sp, %d\n", MIN_FRAME_SIZE - 20); 4051 {
3670 4052 fprintf (stream, "\taddi\ta0, a0, 3\n");
3671 /* Set the proper stack pointer value. */ 4053 fprintf (stream, "\tl32i\ta8, a0, 0\n");
3672 fprintf (stream, "\tl32i\ta9, a8, 0\n"); 4054 fprintf (stream, "\tl32i\ta9, a0, 4\n");
3673 fprintf (stream, "\textui\ta9, a9, %d, 12\n", 4055 fprintf (stream, "\tmov\ta0, a10\n");
3674 TARGET_BIG_ENDIAN ? 8 : 12); 4056 }
3675 fprintf (stream, "\tslli\ta9, a9, 3\n"); 4057 else
3676 fprintf (stream, "\taddi\ta9, a9, %d\n", -MIN_FRAME_SIZE); 4058 {
3677 fprintf (stream, "\tsub\ta9, sp, a9\n"); 4059 fprintf (stream, "\tl32r\ta8, .Lchainval\n");
3678 fprintf (stream, "\tmovsp\tsp, a9\n"); 4060 fprintf (stream, "\tl32r\ta9, .Lfnaddr\n");
3679 4061 }
3680 if (use_call0) 4062 fprintf (stream, "\tjx\ta9\n");
3681 /* Restore the return address. */ 4063
3682 fprintf (stream, "\tmov\ta0, a10\n"); 4064 /* Pad size to a multiple of TRAMPOLINE_ALIGNMENT. */
3683 4065 if (use_call0)
3684 /* Jump to the instruction following the ENTRY. */ 4066 fprintf (stream, "\t.byte\t0\n");
3685 fprintf (stream, "\taddi\ta8, a8, 3\n"); 4067 else
3686 fprintf (stream, "\tjx\ta8\n"); 4068 fprintf (stream, "\tnop\n");
3687 4069 }
3688 /* Pad size to a multiple of TRAMPOLINE_ALIGNMENT. */
3689 if (use_call0)
3690 fprintf (stream, "\t.byte\t0\n");
3691 else
3692 fprintf (stream, "\tnop\n");
3693
3694 fprintf (stream, "\t.end no-transform\n"); 4070 fprintf (stream, "\t.end no-transform\n");
3695 } 4071 }
3696 4072
3697 static void 4073 static void
3698 xtensa_trampoline_init (rtx m_tramp, tree fndecl, rtx chain) 4074 xtensa_trampoline_init (rtx m_tramp, tree fndecl, rtx chain)
3699 { 4075 {
3700 rtx func = XEXP (DECL_RTL (fndecl), 0); 4076 rtx func = XEXP (DECL_RTL (fndecl), 0);
3701 bool use_call0 = (TARGET_CONST16 || TARGET_ABSOLUTE_LITERALS); 4077 bool use_call0 = (TARGET_CONST16 || TARGET_ABSOLUTE_LITERALS);
3702 int chain_off = use_call0 ? 12 : 8; 4078 int chain_off;
3703 int func_off = use_call0 ? 16 : 12; 4079 int func_off;
4080
4081 if (TARGET_WINDOWED_ABI)
4082 {
4083 chain_off = use_call0 ? 12 : 8;
4084 func_off = use_call0 ? 16 : 12;
4085 }
4086 else
4087 {
4088 chain_off = use_call0 ? 8 : 4;
4089 func_off = use_call0 ? 12 : 8;
4090 }
3704 4091
3705 emit_block_move (m_tramp, assemble_trampoline_template (), 4092 emit_block_move (m_tramp, assemble_trampoline_template (),
3706 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL); 4093 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
3707 4094
3708 emit_move_insn (adjust_address (m_tramp, SImode, chain_off), chain); 4095 emit_move_insn (adjust_address (m_tramp, SImode, chain_off), chain);
3709 emit_move_insn (adjust_address (m_tramp, SImode, func_off), func); 4096 emit_move_insn (adjust_address (m_tramp, SImode, func_off), func);
3710 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_sync_caches"), 4097 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__xtensa_sync_caches"),
3711 LCT_NORMAL, VOIDmode, 1, XEXP (m_tramp, 0), Pmode); 4098 LCT_NORMAL, VOIDmode, XEXP (m_tramp, 0), Pmode);
3712 } 4099 }
3713 4100
4101 /* Implement TARGET_LEGITIMATE_CONSTANT_P. */
4102
4103 static bool
4104 xtensa_legitimate_constant_p (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
4105 {
4106 return !xtensa_tls_referenced_p (x);
4107 }
4108
4109 /* Implement TARGET_CAN_USE_DOLOOP_P. */
4110
4111 static bool
4112 xtensa_can_use_doloop_p (const widest_int &, const widest_int &,
4113 unsigned int loop_depth, bool entered_at_top)
4114 {
4115 /* Considering limitations in the hardware, only use doloop
4116 for innermost loops which must be entered from the top. */
4117 if (loop_depth > 1 || !entered_at_top)
4118 return false;
4119
4120 return true;
4121 }
4122
4123 /* NULL if INSN insn is valid within a low-overhead loop.
4124 Otherwise return why doloop cannot be applied. */
4125
4126 static const char *
4127 xtensa_invalid_within_doloop (const rtx_insn *insn)
4128 {
4129 if (CALL_P (insn))
4130 return "Function call in the loop.";
4131
4132 if (JUMP_P (insn) && INSN_CODE (insn) == CODE_FOR_return)
4133 return "Return from a call instruction in the loop.";
4134
4135 return NULL;
4136 }
4137
4138 /* Optimize LOOP. */
4139
4140 static bool
4141 hwloop_optimize (hwloop_info loop)
4142 {
4143 int i;
4144 edge entry_edge;
4145 basic_block entry_bb;
4146 rtx iter_reg;
4147 rtx_insn *insn, *seq, *entry_after;
4148
4149 if (loop->depth > 1)
4150 {
4151 if (dump_file)
4152 fprintf (dump_file, ";; loop %d is not innermost\n",
4153 loop->loop_no);
4154 return false;
4155 }
4156
4157 if (!loop->incoming_dest)
4158 {
4159 if (dump_file)
4160 fprintf (dump_file, ";; loop %d has more than one entry\n",
4161 loop->loop_no);
4162 return false;
4163 }
4164
4165 if (loop->incoming_dest != loop->head)
4166 {
4167 if (dump_file)
4168 fprintf (dump_file, ";; loop %d is not entered from head\n",
4169 loop->loop_no);
4170 return false;
4171 }
4172
4173 if (loop->has_call || loop->has_asm)
4174 {
4175 if (dump_file)
4176 fprintf (dump_file, ";; loop %d has invalid insn\n",
4177 loop->loop_no);
4178 return false;
4179 }
4180
4181 /* Scan all the blocks to make sure they don't use iter_reg. */
4182 if (loop->iter_reg_used || loop->iter_reg_used_outside)
4183 {
4184 if (dump_file)
4185 fprintf (dump_file, ";; loop %d uses iterator\n",
4186 loop->loop_no);
4187 return false;
4188 }
4189
4190 /* Check if start_label appears before doloop_end. */
4191 insn = loop->start_label;
4192 while (insn && insn != loop->loop_end)
4193 insn = NEXT_INSN (insn);
4194
4195 if (!insn)
4196 {
4197 if (dump_file)
4198 fprintf (dump_file, ";; loop %d start_label not before loop_end\n",
4199 loop->loop_no);
4200 return false;
4201 }
4202
4203 /* Get the loop iteration register. */
4204 iter_reg = loop->iter_reg;
4205
4206 gcc_assert (REG_P (iter_reg));
4207
4208 entry_edge = NULL;
4209
4210 FOR_EACH_VEC_SAFE_ELT (loop->incoming, i, entry_edge)
4211 if (entry_edge->flags & EDGE_FALLTHRU)
4212 break;
4213
4214 if (entry_edge == NULL)
4215 return false;
4216
4217 /* Place the zero_cost_loop_start instruction before the loop. */
4218 entry_bb = entry_edge->src;
4219
4220 start_sequence ();
4221
4222 insn = emit_insn (gen_zero_cost_loop_start (loop->iter_reg,
4223 loop->start_label,
4224 loop->iter_reg));
4225
4226 seq = get_insns ();
4227
4228 if (!single_succ_p (entry_bb) || vec_safe_length (loop->incoming) > 1)
4229 {
4230 basic_block new_bb;
4231 edge e;
4232 edge_iterator ei;
4233
4234 emit_insn_before (seq, BB_HEAD (loop->head));
4235 seq = emit_label_before (gen_label_rtx (), seq);
4236 new_bb = create_basic_block (seq, insn, entry_bb);
4237 FOR_EACH_EDGE (e, ei, loop->incoming)
4238 {
4239 if (!(e->flags & EDGE_FALLTHRU))
4240 redirect_edge_and_branch_force (e, new_bb);
4241 else
4242 redirect_edge_succ (e, new_bb);
4243 }
4244
4245 make_edge (new_bb, loop->head, 0);
4246 }
4247 else
4248 {
4249 entry_after = BB_END (entry_bb);
4250 while (DEBUG_INSN_P (entry_after)
4251 || (NOTE_P (entry_after)
4252 && NOTE_KIND (entry_after) != NOTE_INSN_BASIC_BLOCK
4253 /* Make sure we don't split a call and its corresponding
4254 CALL_ARG_LOCATION note. */
4255 && NOTE_KIND (entry_after) != NOTE_INSN_CALL_ARG_LOCATION))
4256 entry_after = PREV_INSN (entry_after);
4257
4258 emit_insn_after (seq, entry_after);
4259 }
4260
4261 end_sequence ();
4262
4263 return true;
4264 }
4265
4266 /* A callback for the hw-doloop pass. Called when a loop we have discovered
4267 turns out not to be optimizable; we have to split the loop_end pattern into
4268 a subtract and a test. */
4269
4270 static void
4271 hwloop_fail (hwloop_info loop)
4272 {
4273 rtx test;
4274 rtx_insn *insn = loop->loop_end;
4275
4276 emit_insn_before (gen_addsi3 (loop->iter_reg,
4277 loop->iter_reg,
4278 constm1_rtx),
4279 loop->loop_end);
4280
4281 test = gen_rtx_NE (VOIDmode, loop->iter_reg, const0_rtx);
4282 insn = emit_jump_insn_before (gen_cbranchsi4 (test,
4283 loop->iter_reg, const0_rtx,
4284 loop->start_label),
4285 loop->loop_end);
4286
4287 JUMP_LABEL (insn) = loop->start_label;
4288 LABEL_NUSES (loop->start_label)++;
4289 delete_insn (loop->loop_end);
4290 }
4291
4292 /* A callback for the hw-doloop pass. This function examines INSN; if
4293 it is a doloop_end pattern we recognize, return the reg rtx for the
4294 loop counter. Otherwise, return NULL_RTX. */
4295
4296 static rtx
4297 hwloop_pattern_reg (rtx_insn *insn)
4298 {
4299 rtx reg;
4300
4301 if (!JUMP_P (insn) || recog_memoized (insn) != CODE_FOR_loop_end)
4302 return NULL_RTX;
4303
4304 reg = SET_DEST (XVECEXP (PATTERN (insn), 0, 1));
4305 if (!REG_P (reg))
4306 return NULL_RTX;
4307
4308 return reg;
4309 }
4310
4311
4312 static struct hw_doloop_hooks xtensa_doloop_hooks =
4313 {
4314 hwloop_pattern_reg,
4315 hwloop_optimize,
4316 hwloop_fail
4317 };
4318
4319 /* Run from machine_dependent_reorg, this pass looks for doloop_end insns
4320 and tries to rewrite the RTL of these loops so that proper Xtensa
4321 hardware loops are generated. */
4322
4323 static void
4324 xtensa_reorg_loops (void)
4325 {
4326 if (TARGET_LOOPS)
4327 reorg_loops (false, &xtensa_doloop_hooks);
4328 }
4329
4330 /* Implement the TARGET_MACHINE_DEPENDENT_REORG pass. */
4331
4332 static void
4333 xtensa_reorg (void)
4334 {
4335 /* We are freeing block_for_insn in the toplev to keep compatibility
4336 with old MDEP_REORGS that are not CFG based. Recompute it now. */
4337 compute_bb_for_insn ();
4338
4339 df_analyze ();
4340
4341 /* Doloop optimization. */
4342 xtensa_reorg_loops ();
4343 }
4344
4345 /* Update register usage after having seen the compiler flags. */
4346
4347 static void
4348 xtensa_conditional_register_usage (void)
4349 {
4350 unsigned i, c_mask;
4351
4352 c_mask = TARGET_WINDOWED_ABI ? (1 << 1) : (1 << 2);
4353
4354 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4355 {
4356 /* Set/reset conditionally defined registers from
4357 CALL_USED_REGISTERS initializer. */
4358 if (call_used_regs[i] > 1)
4359 call_used_regs[i] = !!(call_used_regs[i] & c_mask);
4360 }
4361
4362 /* Remove hard FP register from the preferred reload registers set. */
4363 CLEAR_HARD_REG_BIT (reg_class_contents[(int)RL_REGS],
4364 HARD_FRAME_POINTER_REGNUM);
4365 }
4366
4367 /* Map hard register number to register class */
4368
4369 enum reg_class xtensa_regno_to_class (int regno)
4370 {
4371 static const enum reg_class regno_to_class[FIRST_PSEUDO_REGISTER] =
4372 {
4373 RL_REGS, SP_REG, RL_REGS, RL_REGS,
4374 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
4375 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
4376 RL_REGS, RL_REGS, RL_REGS, RL_REGS,
4377 AR_REGS, AR_REGS, BR_REGS,
4378 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
4379 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
4380 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
4381 FP_REGS, FP_REGS, FP_REGS, FP_REGS,
4382 ACC_REG,
4383 };
4384
4385 if (regno == HARD_FRAME_POINTER_REGNUM)
4386 return GR_REGS;
4387 else
4388 return regno_to_class[regno];
4389 }
4390
4391 /* Implement TARGET_CONSTANT_ALIGNMENT. Align string constants and
4392 constructors to at least a word boundary. The typical use of this
4393 macro is to increase alignment for string constants to be word
4394 aligned so that 'strcpy' calls that copy constants can be done
4395 inline. */
4396
4397 static HOST_WIDE_INT
4398 xtensa_constant_alignment (const_tree exp, HOST_WIDE_INT align)
4399 {
4400 if ((TREE_CODE (exp) == STRING_CST || TREE_CODE (exp) == CONSTRUCTOR)
4401 && !optimize_size)
4402 return MAX (align, BITS_PER_WORD);
4403 return align;
4404 }
4405
4406 /* Implement TARGET_STARTING_FRAME_OFFSET. */
4407
4408 static HOST_WIDE_INT
4409 xtensa_starting_frame_offset (void)
4410 {
4411 if (FRAME_GROWS_DOWNWARD)
4412 return 0;
4413 return crtl->outgoing_args_size;
4414 }
3714 4415
3715 #include "gt-xtensa.h" 4416 #include "gt-xtensa.h"