comparison gcc/config/avr/avr.c @ 0:a06113de4d67

first commit
author kent <kent@cr.ie.u-ryukyu.ac.jp>
date Fri, 17 Jul 2009 14:47:48 +0900
parents
children 855418dad1a3
comparison
equal deleted inserted replaced
-1:000000000000 0:a06113de4d67
1 /* Subroutines for insn-output.c for ATMEL AVR micro controllers
2 Copyright (C) 1998, 1999, 2000, 2001, 2002, 2004, 2005, 2006, 2007, 2008,
3 2009 Free Software Foundation, Inc.
4 Contributed by Denis Chertykov (denisc@overta.ru)
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
12
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "real.h"
30 #include "insn-config.h"
31 #include "conditions.h"
32 #include "insn-attr.h"
33 #include "flags.h"
34 #include "reload.h"
35 #include "tree.h"
36 #include "output.h"
37 #include "expr.h"
38 #include "toplev.h"
39 #include "obstack.h"
40 #include "function.h"
41 #include "recog.h"
42 #include "ggc.h"
43 #include "tm_p.h"
44 #include "target.h"
45 #include "target-def.h"
46 #include "params.h"
47 #include "df.h"
48
49 /* Maximal allowed offset for an address in the LD command */
50 #define MAX_LD_OFFSET(MODE) (64 - (signed)GET_MODE_SIZE (MODE))
51
52 static int avr_naked_function_p (tree);
53 static int interrupt_function_p (tree);
54 static int signal_function_p (tree);
55 static int avr_OS_task_function_p (tree);
56 static int avr_OS_main_function_p (tree);
57 static int avr_regs_to_save (HARD_REG_SET *);
58 static int get_sequence_length (rtx insns);
59 static int sequent_regs_live (void);
60 static const char *ptrreg_to_str (int);
61 static const char *cond_string (enum rtx_code);
62 static int avr_num_arg_regs (enum machine_mode, tree);
63
64 static RTX_CODE compare_condition (rtx insn);
65 static int compare_sign_p (rtx insn);
66 static tree avr_handle_progmem_attribute (tree *, tree, tree, int, bool *);
67 static tree avr_handle_fndecl_attribute (tree *, tree, tree, int, bool *);
68 static tree avr_handle_fntype_attribute (tree *, tree, tree, int, bool *);
69 const struct attribute_spec avr_attribute_table[];
70 static bool avr_assemble_integer (rtx, unsigned int, int);
71 static void avr_file_start (void);
72 static void avr_file_end (void);
73 static void avr_asm_function_end_prologue (FILE *);
74 static void avr_asm_function_begin_epilogue (FILE *);
75 static rtx avr_function_value (const_tree, const_tree, bool);
76 static void avr_insert_attributes (tree, tree *);
77 static void avr_asm_init_sections (void);
78 static unsigned int avr_section_type_flags (tree, const char *, int);
79
80 static void avr_reorg (void);
81 static void avr_asm_out_ctor (rtx, int);
82 static void avr_asm_out_dtor (rtx, int);
83 static int avr_operand_rtx_cost (rtx, enum machine_mode, enum rtx_code, bool);
84 static bool avr_rtx_costs (rtx, int, int, int *, bool);
85 static int avr_address_cost (rtx, bool);
86 static bool avr_return_in_memory (const_tree, const_tree);
87 static struct machine_function * avr_init_machine_status (void);
88 static rtx avr_builtin_setjmp_frame_value (void);
89 static bool avr_hard_regno_scratch_ok (unsigned int);
90
91 /* Allocate registers from r25 to r8 for parameters for function calls. */
92 #define FIRST_CUM_REG 26
93
94 /* Temporary register RTX (gen_rtx_REG (QImode, TMP_REGNO)) */
95 static GTY(()) rtx tmp_reg_rtx;
96
97 /* Zeroed register RTX (gen_rtx_REG (QImode, ZERO_REGNO)) */
98 static GTY(()) rtx zero_reg_rtx;
99
100 /* AVR register names {"r0", "r1", ..., "r31"} */
101 static const char *const avr_regnames[] = REGISTER_NAMES;
102
103 /* This holds the last insn address. */
104 static int last_insn_address = 0;
105
106 /* Preprocessor macros to define depending on MCU type. */
107 const char *avr_extra_arch_macro;
108
109 /* Current architecture. */
110 const struct base_arch_s *avr_current_arch;
111
112 section *progmem_section;
113
114 static const struct base_arch_s avr_arch_types[] = {
115 { 1, 0, 0, 0, 0, 0, 0, 0, NULL }, /* unknown device specified */
116 { 1, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=1" },
117 { 0, 0, 0, 0, 0, 0, 0, 0, "__AVR_ARCH__=2" },
118 { 0, 0, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=25" },
119 { 0, 0, 1, 0, 0, 0, 0, 0, "__AVR_ARCH__=3" },
120 { 0, 0, 1, 0, 1, 0, 0, 0, "__AVR_ARCH__=31" },
121 { 0, 0, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=35" },
122 { 0, 1, 0, 1, 0, 0, 0, 0, "__AVR_ARCH__=4" },
123 { 0, 1, 1, 1, 0, 0, 0, 0, "__AVR_ARCH__=5" },
124 { 0, 1, 1, 1, 1, 1, 0, 0, "__AVR_ARCH__=51" },
125 { 0, 1, 1, 1, 1, 1, 1, 0, "__AVR_ARCH__=6" }
126 };
127
128 /* These names are used as the index into the avr_arch_types[] table
129 above. */
130
131 enum avr_arch
132 {
133 ARCH_UNKNOWN,
134 ARCH_AVR1,
135 ARCH_AVR2,
136 ARCH_AVR25,
137 ARCH_AVR3,
138 ARCH_AVR31,
139 ARCH_AVR35,
140 ARCH_AVR4,
141 ARCH_AVR5,
142 ARCH_AVR51,
143 ARCH_AVR6
144 };
145
146 struct mcu_type_s {
147 const char *const name;
148 int arch; /* index in avr_arch_types[] */
149 /* Must lie outside user's namespace. NULL == no macro. */
150 const char *const macro;
151 };
152
153 /* List of all known AVR MCU types - if updated, it has to be kept
154 in sync in several places (FIXME: is there a better way?):
155 - here
156 - avr.h (CPP_SPEC, LINK_SPEC, CRT_BINUTILS_SPECS)
157 - t-avr (MULTILIB_MATCHES)
158 - gas/config/tc-avr.c
159 - avr-libc */
160
161 static const struct mcu_type_s avr_mcu_types[] = {
162 /* Classic, <= 8K. */
163 { "avr2", ARCH_AVR2, NULL },
164 { "at90s2313", ARCH_AVR2, "__AVR_AT90S2313__" },
165 { "at90s2323", ARCH_AVR2, "__AVR_AT90S2323__" },
166 { "at90s2333", ARCH_AVR2, "__AVR_AT90S2333__" },
167 { "at90s2343", ARCH_AVR2, "__AVR_AT90S2343__" },
168 { "attiny22", ARCH_AVR2, "__AVR_ATtiny22__" },
169 { "attiny26", ARCH_AVR2, "__AVR_ATtiny26__" },
170 { "at90s4414", ARCH_AVR2, "__AVR_AT90S4414__" },
171 { "at90s4433", ARCH_AVR2, "__AVR_AT90S4433__" },
172 { "at90s4434", ARCH_AVR2, "__AVR_AT90S4434__" },
173 { "at90s8515", ARCH_AVR2, "__AVR_AT90S8515__" },
174 { "at90c8534", ARCH_AVR2, "__AVR_AT90C8534__" },
175 { "at90s8535", ARCH_AVR2, "__AVR_AT90S8535__" },
176 /* Classic + MOVW, <= 8K. */
177 { "avr25", ARCH_AVR25, NULL },
178 { "ata6289", ARCH_AVR25, "__AVR_ATA6289__" },
179 { "attiny13", ARCH_AVR25, "__AVR_ATtiny13__" },
180 { "attiny13a", ARCH_AVR25, "__AVR_ATtiny13A__" },
181 { "attiny2313", ARCH_AVR25, "__AVR_ATtiny2313__" },
182 { "attiny24", ARCH_AVR25, "__AVR_ATtiny24__" },
183 { "attiny44", ARCH_AVR25, "__AVR_ATtiny44__" },
184 { "attiny84", ARCH_AVR25, "__AVR_ATtiny84__" },
185 { "attiny25", ARCH_AVR25, "__AVR_ATtiny25__" },
186 { "attiny45", ARCH_AVR25, "__AVR_ATtiny45__" },
187 { "attiny85", ARCH_AVR25, "__AVR_ATtiny85__" },
188 { "attiny261", ARCH_AVR25, "__AVR_ATtiny261__" },
189 { "attiny461", ARCH_AVR25, "__AVR_ATtiny461__" },
190 { "attiny861", ARCH_AVR25, "__AVR_ATtiny861__" },
191 { "attiny43u", ARCH_AVR25, "__AVR_ATtiny43U__" },
192 { "attiny87", ARCH_AVR25, "__AVR_ATtiny87__" },
193 { "attiny48", ARCH_AVR25, "__AVR_ATtiny48__" },
194 { "attiny88", ARCH_AVR25, "__AVR_ATtiny88__" },
195 { "at86rf401", ARCH_AVR25, "__AVR_AT86RF401__" },
196 /* Classic, > 8K, <= 64K. */
197 { "avr3", ARCH_AVR3, NULL },
198 { "at43usb355", ARCH_AVR3, "__AVR_AT43USB355__" },
199 { "at76c711", ARCH_AVR3, "__AVR_AT76C711__" },
200 /* Classic, == 128K. */
201 { "avr31", ARCH_AVR31, NULL },
202 { "atmega103", ARCH_AVR31, "__AVR_ATmega103__" },
203 { "at43usb320", ARCH_AVR31, "__AVR_AT43USB320__" },
204 /* Classic + MOVW + JMP/CALL. */
205 { "avr35", ARCH_AVR35, NULL },
206 { "at90usb82", ARCH_AVR35, "__AVR_AT90USB82__" },
207 { "at90usb162", ARCH_AVR35, "__AVR_AT90USB162__" },
208 { "attiny167", ARCH_AVR35, "__AVR_ATtiny167__" },
209 { "attiny327", ARCH_AVR35, "__AVR_ATtiny327__" },
210 /* Enhanced, <= 8K. */
211 { "avr4", ARCH_AVR4, NULL },
212 { "atmega8", ARCH_AVR4, "__AVR_ATmega8__" },
213 { "atmega48", ARCH_AVR4, "__AVR_ATmega48__" },
214 { "atmega48p", ARCH_AVR4, "__AVR_ATmega48P__" },
215 { "atmega88", ARCH_AVR4, "__AVR_ATmega88__" },
216 { "atmega88p", ARCH_AVR4, "__AVR_ATmega88P__" },
217 { "atmega8515", ARCH_AVR4, "__AVR_ATmega8515__" },
218 { "atmega8535", ARCH_AVR4, "__AVR_ATmega8535__" },
219 { "atmega8hva", ARCH_AVR4, "__AVR_ATmega8HVA__" },
220 { "atmega4hvd", ARCH_AVR4, "__AVR_ATmega4HVD__" },
221 { "atmega8hvd", ARCH_AVR4, "__AVR_ATmega8HVD__" },
222 { "atmega8c1", ARCH_AVR4, "__AVR_ATmega8C1__" },
223 { "atmega8m1", ARCH_AVR4, "__AVR_ATmega8M1__" },
224 { "at90pwm1", ARCH_AVR4, "__AVR_AT90PWM1__" },
225 { "at90pwm2", ARCH_AVR4, "__AVR_AT90PWM2__" },
226 { "at90pwm2b", ARCH_AVR4, "__AVR_AT90PWM2B__" },
227 { "at90pwm3", ARCH_AVR4, "__AVR_AT90PWM3__" },
228 { "at90pwm3b", ARCH_AVR4, "__AVR_AT90PWM3B__" },
229 { "at90pwm81", ARCH_AVR4, "__AVR_AT90PWM81__" },
230 /* Enhanced, > 8K, <= 64K. */
231 { "avr5", ARCH_AVR5, NULL },
232 { "atmega16", ARCH_AVR5, "__AVR_ATmega16__" },
233 { "atmega161", ARCH_AVR5, "__AVR_ATmega161__" },
234 { "atmega162", ARCH_AVR5, "__AVR_ATmega162__" },
235 { "atmega163", ARCH_AVR5, "__AVR_ATmega163__" },
236 { "atmega164p", ARCH_AVR5, "__AVR_ATmega164P__" },
237 { "atmega165", ARCH_AVR5, "__AVR_ATmega165__" },
238 { "atmega165p", ARCH_AVR5, "__AVR_ATmega165P__" },
239 { "atmega168", ARCH_AVR5, "__AVR_ATmega168__" },
240 { "atmega168p", ARCH_AVR5, "__AVR_ATmega168P__" },
241 { "atmega169", ARCH_AVR5, "__AVR_ATmega169__" },
242 { "atmega169p", ARCH_AVR5, "__AVR_ATmega169P__" },
243 { "atmega32", ARCH_AVR5, "__AVR_ATmega32__" },
244 { "atmega323", ARCH_AVR5, "__AVR_ATmega323__" },
245 { "atmega324p", ARCH_AVR5, "__AVR_ATmega324P__" },
246 { "atmega325", ARCH_AVR5, "__AVR_ATmega325__" },
247 { "atmega325p", ARCH_AVR5, "__AVR_ATmega325P__" },
248 { "atmega3250", ARCH_AVR5, "__AVR_ATmega3250__" },
249 { "atmega3250p", ARCH_AVR5, "__AVR_ATmega3250P__" },
250 { "atmega328p", ARCH_AVR5, "__AVR_ATmega328P__" },
251 { "atmega329", ARCH_AVR5, "__AVR_ATmega329__" },
252 { "atmega329p", ARCH_AVR5, "__AVR_ATmega329P__" },
253 { "atmega3290", ARCH_AVR5, "__AVR_ATmega3290__" },
254 { "atmega3290p", ARCH_AVR5, "__AVR_ATmega3290P__" },
255 { "atmega406", ARCH_AVR5, "__AVR_ATmega406__" },
256 { "atmega64", ARCH_AVR5, "__AVR_ATmega64__" },
257 { "atmega640", ARCH_AVR5, "__AVR_ATmega640__" },
258 { "atmega644", ARCH_AVR5, "__AVR_ATmega644__" },
259 { "atmega644p", ARCH_AVR5, "__AVR_ATmega644P__" },
260 { "atmega645", ARCH_AVR5, "__AVR_ATmega645__" },
261 { "atmega6450", ARCH_AVR5, "__AVR_ATmega6450__" },
262 { "atmega649", ARCH_AVR5, "__AVR_ATmega649__" },
263 { "atmega6490", ARCH_AVR5, "__AVR_ATmega6490__" },
264 { "atmega16hva", ARCH_AVR5, "__AVR_ATmega16HVA__" },
265 { "atmega16hvb", ARCH_AVR5, "__AVR_ATmega16HVB__" },
266 { "atmega32hvb", ARCH_AVR5, "__AVR_ATmega32HVB__" },
267 { "at90can32", ARCH_AVR5, "__AVR_AT90CAN32__" },
268 { "at90can64", ARCH_AVR5, "__AVR_AT90CAN64__" },
269 { "at90pwm216", ARCH_AVR5, "__AVR_AT90PWM216__" },
270 { "at90pwm316", ARCH_AVR5, "__AVR_AT90PWM316__" },
271 { "atmega16c1", ARCH_AVR5, "__AVR_ATmega16C1__" },
272 { "atmega32c1", ARCH_AVR5, "__AVR_ATmega32C1__" },
273 { "atmega64c1", ARCH_AVR5, "__AVR_ATmega64C1__" },
274 { "atmega16m1", ARCH_AVR5, "__AVR_ATmega16M1__" },
275 { "atmega32m1", ARCH_AVR5, "__AVR_ATmega32M1__" },
276 { "atmega64m1", ARCH_AVR5, "__AVR_ATmega64M1__" },
277 { "atmega16u4", ARCH_AVR5, "__AVR_ATmega16U4__" },
278 { "atmega32u4", ARCH_AVR5, "__AVR_ATmega32U4__" },
279 { "atmega32u6", ARCH_AVR5, "__AVR_ATmega32U6__" },
280 { "at90scr100", ARCH_AVR5, "__AVR_AT90SCR100__" },
281 { "at90usb646", ARCH_AVR5, "__AVR_AT90USB646__" },
282 { "at90usb647", ARCH_AVR5, "__AVR_AT90USB647__" },
283 { "at94k", ARCH_AVR5, "__AVR_AT94K__" },
284 /* Enhanced, == 128K. */
285 { "avr51", ARCH_AVR51, NULL },
286 { "atmega128", ARCH_AVR51, "__AVR_ATmega128__" },
287 { "atmega1280", ARCH_AVR51, "__AVR_ATmega1280__" },
288 { "atmega1281", ARCH_AVR51, "__AVR_ATmega1281__" },
289 { "atmega1284p", ARCH_AVR51, "__AVR_ATmega1284P__" },
290 { "atmega128rfa1", ARCH_AVR51, "__AVR_ATmega128RFA1__" },
291 { "at90can128", ARCH_AVR51, "__AVR_AT90CAN128__" },
292 { "at90usb1286", ARCH_AVR51, "__AVR_AT90USB1286__" },
293 { "at90usb1287", ARCH_AVR51, "__AVR_AT90USB1287__" },
294 { "m3000f", ARCH_AVR51, "__AVR_M3000F__" },
295 { "m3000s", ARCH_AVR51, "__AVR_M3000S__" },
296 { "m3001b", ARCH_AVR51, "__AVR_M3001B__" },
297 /* 3-Byte PC. */
298 { "avr6", ARCH_AVR6, NULL },
299 { "atmega2560", ARCH_AVR6, "__AVR_ATmega2560__" },
300 { "atmega2561", ARCH_AVR6, "__AVR_ATmega2561__" },
301 /* Assembler only. */
302 { "avr1", ARCH_AVR1, NULL },
303 { "at90s1200", ARCH_AVR1, "__AVR_AT90S1200__" },
304 { "attiny11", ARCH_AVR1, "__AVR_ATtiny11__" },
305 { "attiny12", ARCH_AVR1, "__AVR_ATtiny12__" },
306 { "attiny15", ARCH_AVR1, "__AVR_ATtiny15__" },
307 { "attiny28", ARCH_AVR1, "__AVR_ATtiny28__" },
308 { NULL, ARCH_UNKNOWN, NULL }
309 };
310
311 int avr_case_values_threshold = 30000;
312
313 /* Initialize the GCC target structure. */
314 #undef TARGET_ASM_ALIGNED_HI_OP
315 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
316 #undef TARGET_ASM_ALIGNED_SI_OP
317 #define TARGET_ASM_ALIGNED_SI_OP "\t.long\t"
318 #undef TARGET_ASM_UNALIGNED_HI_OP
319 #define TARGET_ASM_UNALIGNED_HI_OP "\t.word\t"
320 #undef TARGET_ASM_UNALIGNED_SI_OP
321 #define TARGET_ASM_UNALIGNED_SI_OP "\t.long\t"
322 #undef TARGET_ASM_INTEGER
323 #define TARGET_ASM_INTEGER avr_assemble_integer
324 #undef TARGET_ASM_FILE_START
325 #define TARGET_ASM_FILE_START avr_file_start
326 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
327 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
328 #undef TARGET_ASM_FILE_END
329 #define TARGET_ASM_FILE_END avr_file_end
330
331 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
332 #define TARGET_ASM_FUNCTION_END_PROLOGUE avr_asm_function_end_prologue
333 #undef TARGET_ASM_FUNCTION_BEGIN_EPILOGUE
334 #define TARGET_ASM_FUNCTION_BEGIN_EPILOGUE avr_asm_function_begin_epilogue
335 #undef TARGET_FUNCTION_VALUE
336 #define TARGET_FUNCTION_VALUE avr_function_value
337 #undef TARGET_ATTRIBUTE_TABLE
338 #define TARGET_ATTRIBUTE_TABLE avr_attribute_table
339 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
340 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
341 #undef TARGET_INSERT_ATTRIBUTES
342 #define TARGET_INSERT_ATTRIBUTES avr_insert_attributes
343 #undef TARGET_SECTION_TYPE_FLAGS
344 #define TARGET_SECTION_TYPE_FLAGS avr_section_type_flags
345 #undef TARGET_RTX_COSTS
346 #define TARGET_RTX_COSTS avr_rtx_costs
347 #undef TARGET_ADDRESS_COST
348 #define TARGET_ADDRESS_COST avr_address_cost
349 #undef TARGET_MACHINE_DEPENDENT_REORG
350 #define TARGET_MACHINE_DEPENDENT_REORG avr_reorg
351
352 #undef TARGET_RETURN_IN_MEMORY
353 #define TARGET_RETURN_IN_MEMORY avr_return_in_memory
354
355 #undef TARGET_STRICT_ARGUMENT_NAMING
356 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true
357
358 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
359 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE avr_builtin_setjmp_frame_value
360
361 #undef TARGET_HARD_REGNO_SCRATCH_OK
362 #define TARGET_HARD_REGNO_SCRATCH_OK avr_hard_regno_scratch_ok
363
364 struct gcc_target targetm = TARGET_INITIALIZER;
365
366 void
367 avr_override_options (void)
368 {
369 const struct mcu_type_s *t;
370 static bool warned_no_tablejump_deprecated = false;
371
372 flag_delete_null_pointer_checks = 0;
373
374 if (!PARAM_SET_P (PARAM_INLINE_CALL_COST))
375 set_param_value ("inline-call-cost", 5);
376
377 for (t = avr_mcu_types; t->name; t++)
378 if (strcmp (t->name, avr_mcu_name) == 0)
379 break;
380
381 if (!t->name)
382 {
383 fprintf (stderr, "unknown MCU '%s' specified\nKnown MCU names:\n",
384 avr_mcu_name);
385 for (t = avr_mcu_types; t->name; t++)
386 fprintf (stderr," %s\n", t->name);
387 }
388
389 avr_current_arch = &avr_arch_types[t->arch];
390 avr_extra_arch_macro = t->macro;
391
392 if (optimize && !TARGET_NO_TABLEJUMP)
393 avr_case_values_threshold =
394 (!AVR_HAVE_JMP_CALL || TARGET_CALL_PROLOGUES) ? 8 : 17;
395
396 if (TARGET_NO_TABLEJUMP
397 && !warned_no_tablejump_deprecated)
398 {
399 inform (input_location, "the -mno-tablejump switch is deprecated");
400 inform (input_location, "GCC 4.4 is the last release with this switch");
401 inform (input_location, "use the -fno-jump-tables switch instead");
402 warned_no_tablejump_deprecated = true;
403 }
404
405 tmp_reg_rtx = gen_rtx_REG (QImode, TMP_REGNO);
406 zero_reg_rtx = gen_rtx_REG (QImode, ZERO_REGNO);
407
408 init_machine_status = avr_init_machine_status;
409 }
410
411 /* return register class from register number. */
412
413 static const int reg_class_tab[]={
414 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
415 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
416 GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,GENERAL_REGS,
417 GENERAL_REGS, /* r0 - r15 */
418 LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,LD_REGS,
419 LD_REGS, /* r16 - 23 */
420 ADDW_REGS,ADDW_REGS, /* r24,r25 */
421 POINTER_X_REGS,POINTER_X_REGS, /* r26,27 */
422 POINTER_Y_REGS,POINTER_Y_REGS, /* r28,r29 */
423 POINTER_Z_REGS,POINTER_Z_REGS, /* r30,r31 */
424 STACK_REG,STACK_REG /* SPL,SPH */
425 };
426
427 /* Function to set up the backend function structure. */
428
429 static struct machine_function *
430 avr_init_machine_status (void)
431 {
432 return ((struct machine_function *)
433 ggc_alloc_cleared (sizeof (struct machine_function)));
434 }
435
436 /* Return register class for register R. */
437
438 enum reg_class
439 avr_regno_reg_class (int r)
440 {
441 if (r <= 33)
442 return reg_class_tab[r];
443 return ALL_REGS;
444 }
445
446 /* Return nonzero if FUNC is a naked function. */
447
448 static int
449 avr_naked_function_p (tree func)
450 {
451 tree a;
452
453 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
454
455 a = lookup_attribute ("naked", TYPE_ATTRIBUTES (TREE_TYPE (func)));
456 return a != NULL_TREE;
457 }
458
459 /* Return nonzero if FUNC is an interrupt function as specified
460 by the "interrupt" attribute. */
461
462 static int
463 interrupt_function_p (tree func)
464 {
465 tree a;
466
467 if (TREE_CODE (func) != FUNCTION_DECL)
468 return 0;
469
470 a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func));
471 return a != NULL_TREE;
472 }
473
474 /* Return nonzero if FUNC is a signal function as specified
475 by the "signal" attribute. */
476
477 static int
478 signal_function_p (tree func)
479 {
480 tree a;
481
482 if (TREE_CODE (func) != FUNCTION_DECL)
483 return 0;
484
485 a = lookup_attribute ("signal", DECL_ATTRIBUTES (func));
486 return a != NULL_TREE;
487 }
488
489 /* Return nonzero if FUNC is a OS_task function. */
490
491 static int
492 avr_OS_task_function_p (tree func)
493 {
494 tree a;
495
496 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
497
498 a = lookup_attribute ("OS_task", TYPE_ATTRIBUTES (TREE_TYPE (func)));
499 return a != NULL_TREE;
500 }
501
502 /* Return nonzero if FUNC is a OS_main function. */
503
504 static int
505 avr_OS_main_function_p (tree func)
506 {
507 tree a;
508
509 gcc_assert (TREE_CODE (func) == FUNCTION_DECL);
510
511 a = lookup_attribute ("OS_main", TYPE_ATTRIBUTES (TREE_TYPE (func)));
512 return a != NULL_TREE;
513 }
514
515 /* Return the number of hard registers to push/pop in the prologue/epilogue
516 of the current function, and optionally store these registers in SET. */
517
518 static int
519 avr_regs_to_save (HARD_REG_SET *set)
520 {
521 int reg, count;
522 int int_or_sig_p = (interrupt_function_p (current_function_decl)
523 || signal_function_p (current_function_decl));
524
525 if (!reload_completed)
526 cfun->machine->is_leaf = leaf_function_p ();
527
528 if (set)
529 CLEAR_HARD_REG_SET (*set);
530 count = 0;
531
532 /* No need to save any registers if the function never returns or
533 is have "OS_task" or "OS_main" attribute. */
534 if (TREE_THIS_VOLATILE (current_function_decl)
535 || cfun->machine->is_OS_task
536 || cfun->machine->is_OS_main)
537 return 0;
538
539 for (reg = 0; reg < 32; reg++)
540 {
541 /* Do not push/pop __tmp_reg__, __zero_reg__, as well as
542 any global register variables. */
543 if (fixed_regs[reg])
544 continue;
545
546 if ((int_or_sig_p && !cfun->machine->is_leaf && call_used_regs[reg])
547 || (df_regs_ever_live_p (reg)
548 && (int_or_sig_p || !call_used_regs[reg])
549 && !(frame_pointer_needed
550 && (reg == REG_Y || reg == (REG_Y+1)))))
551 {
552 if (set)
553 SET_HARD_REG_BIT (*set, reg);
554 count++;
555 }
556 }
557 return count;
558 }
559
560 /* Compute offset between arg_pointer and frame_pointer. */
561
562 int
563 initial_elimination_offset (int from, int to)
564 {
565 if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
566 return 0;
567 else
568 {
569 int offset = frame_pointer_needed ? 2 : 0;
570 int avr_pc_size = AVR_HAVE_EIJMP_EICALL ? 3 : 2;
571
572 offset += avr_regs_to_save (NULL);
573 return get_frame_size () + (avr_pc_size) + 1 + offset;
574 }
575 }
576
577 /* Actual start of frame is virtual_stack_vars_rtx this is offset from
578 frame pointer by +STARTING_FRAME_OFFSET.
579 Using saved frame = virtual_stack_vars_rtx - STARTING_FRAME_OFFSET
580 avoids creating add/sub of offset in nonlocal goto and setjmp. */
581
582 rtx avr_builtin_setjmp_frame_value (void)
583 {
584 return gen_rtx_MINUS (Pmode, virtual_stack_vars_rtx,
585 gen_int_mode (STARTING_FRAME_OFFSET, Pmode));
586 }
587
588 /* Return 1 if the function epilogue is just a single "ret". */
589
590 int
591 avr_simple_epilogue (void)
592 {
593 return (! frame_pointer_needed
594 && get_frame_size () == 0
595 && avr_regs_to_save (NULL) == 0
596 && ! interrupt_function_p (current_function_decl)
597 && ! signal_function_p (current_function_decl)
598 && ! avr_naked_function_p (current_function_decl)
599 && ! TREE_THIS_VOLATILE (current_function_decl));
600 }
601
602 /* This function checks sequence of live registers. */
603
604 static int
605 sequent_regs_live (void)
606 {
607 int reg;
608 int live_seq=0;
609 int cur_seq=0;
610
611 for (reg = 0; reg < 18; ++reg)
612 {
613 if (!call_used_regs[reg])
614 {
615 if (df_regs_ever_live_p (reg))
616 {
617 ++live_seq;
618 ++cur_seq;
619 }
620 else
621 cur_seq = 0;
622 }
623 }
624
625 if (!frame_pointer_needed)
626 {
627 if (df_regs_ever_live_p (REG_Y))
628 {
629 ++live_seq;
630 ++cur_seq;
631 }
632 else
633 cur_seq = 0;
634
635 if (df_regs_ever_live_p (REG_Y+1))
636 {
637 ++live_seq;
638 ++cur_seq;
639 }
640 else
641 cur_seq = 0;
642 }
643 else
644 {
645 cur_seq += 2;
646 live_seq += 2;
647 }
648 return (cur_seq == live_seq) ? live_seq : 0;
649 }
650
651 /* Obtain the length sequence of insns. */
652
653 int
654 get_sequence_length (rtx insns)
655 {
656 rtx insn;
657 int length;
658
659 for (insn = insns, length = 0; insn; insn = NEXT_INSN (insn))
660 length += get_attr_length (insn);
661
662 return length;
663 }
664
665 /* Output function prologue. */
666
667 void
668 expand_prologue (void)
669 {
670 int live_seq;
671 HARD_REG_SET set;
672 int minimize;
673 HOST_WIDE_INT size = get_frame_size();
674 /* Define templates for push instructions. */
675 rtx pushbyte = gen_rtx_MEM (QImode,
676 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
677 rtx pushword = gen_rtx_MEM (HImode,
678 gen_rtx_POST_DEC (HImode, stack_pointer_rtx));
679 rtx insn;
680
681 last_insn_address = 0;
682
683 /* Init cfun->machine. */
684 cfun->machine->is_naked = avr_naked_function_p (current_function_decl);
685 cfun->machine->is_interrupt = interrupt_function_p (current_function_decl);
686 cfun->machine->is_signal = signal_function_p (current_function_decl);
687 cfun->machine->is_OS_task = avr_OS_task_function_p (current_function_decl);
688 cfun->machine->is_OS_main = avr_OS_main_function_p (current_function_decl);
689
690 /* Prologue: naked. */
691 if (cfun->machine->is_naked)
692 {
693 return;
694 }
695
696 avr_regs_to_save (&set);
697 live_seq = sequent_regs_live ();
698 minimize = (TARGET_CALL_PROLOGUES
699 && !cfun->machine->is_interrupt
700 && !cfun->machine->is_signal
701 && !cfun->machine->is_OS_task
702 && !cfun->machine->is_OS_main
703 && live_seq);
704
705 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
706 {
707 if (cfun->machine->is_interrupt)
708 {
709 /* Enable interrupts. */
710 insn = emit_insn (gen_enable_interrupt ());
711 RTX_FRAME_RELATED_P (insn) = 1;
712 }
713
714 /* Push zero reg. */
715 insn = emit_move_insn (pushbyte, zero_reg_rtx);
716 RTX_FRAME_RELATED_P (insn) = 1;
717
718 /* Push tmp reg. */
719 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
720 RTX_FRAME_RELATED_P (insn) = 1;
721
722 /* Push SREG. */
723 insn = emit_move_insn (tmp_reg_rtx,
724 gen_rtx_MEM (QImode, GEN_INT (SREG_ADDR)));
725 RTX_FRAME_RELATED_P (insn) = 1;
726 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
727 RTX_FRAME_RELATED_P (insn) = 1;
728
729 /* Push RAMPZ. */
730 if(AVR_HAVE_RAMPZ
731 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
732 {
733 insn = emit_move_insn (tmp_reg_rtx,
734 gen_rtx_MEM (QImode, GEN_INT (RAMPZ_ADDR)));
735 RTX_FRAME_RELATED_P (insn) = 1;
736 insn = emit_move_insn (pushbyte, tmp_reg_rtx);
737 RTX_FRAME_RELATED_P (insn) = 1;
738 }
739
740 /* Clear zero reg. */
741 insn = emit_move_insn (zero_reg_rtx, const0_rtx);
742 RTX_FRAME_RELATED_P (insn) = 1;
743
744 /* Prevent any attempt to delete the setting of ZERO_REG! */
745 emit_use (zero_reg_rtx);
746 }
747 if (minimize && (frame_pointer_needed
748 || (AVR_2_BYTE_PC && live_seq > 6)
749 || live_seq > 7))
750 {
751 insn = emit_move_insn (gen_rtx_REG (HImode, REG_X),
752 gen_int_mode (size, HImode));
753 RTX_FRAME_RELATED_P (insn) = 1;
754
755 insn =
756 emit_insn (gen_call_prologue_saves (gen_int_mode (live_seq, HImode),
757 gen_int_mode (size + live_seq, HImode)));
758 RTX_FRAME_RELATED_P (insn) = 1;
759 }
760 else
761 {
762 int reg;
763 for (reg = 0; reg < 32; ++reg)
764 {
765 if (TEST_HARD_REG_BIT (set, reg))
766 {
767 /* Emit push of register to save. */
768 insn=emit_move_insn (pushbyte, gen_rtx_REG (QImode, reg));
769 RTX_FRAME_RELATED_P (insn) = 1;
770 }
771 }
772 if (frame_pointer_needed)
773 {
774 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
775 {
776 /* Push frame pointer. */
777 insn = emit_move_insn (pushword, frame_pointer_rtx);
778 RTX_FRAME_RELATED_P (insn) = 1;
779 }
780
781 if (!size)
782 {
783 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
784 RTX_FRAME_RELATED_P (insn) = 1;
785 }
786 else
787 {
788 /* Creating a frame can be done by direct manipulation of the
789 stack or via the frame pointer. These two methods are:
790 fp=sp
791 fp-=size
792 sp=fp
793 OR
794 sp-=size
795 fp=sp
796 the optimum method depends on function type, stack and frame size.
797 To avoid a complex logic, both methods are tested and shortest
798 is selected. */
799 rtx myfp;
800 rtx fp_plus_insns;
801 rtx sp_plus_insns = NULL_RTX;
802
803 if (TARGET_TINY_STACK)
804 {
805 /* The high byte (r29) doesn't change - prefer 'subi' (1 cycle)
806 over 'sbiw' (2 cycles, same size). */
807 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
808 }
809 else
810 {
811 /* Normal sized addition. */
812 myfp = frame_pointer_rtx;
813 }
814
815 /* Method 1-Adjust frame pointer. */
816 start_sequence ();
817
818 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
819 RTX_FRAME_RELATED_P (insn) = 1;
820
821 insn =
822 emit_move_insn (myfp,
823 gen_rtx_PLUS (GET_MODE(myfp), myfp,
824 gen_int_mode (-size,
825 GET_MODE(myfp))));
826 RTX_FRAME_RELATED_P (insn) = 1;
827
828 /* Copy to stack pointer. */
829 if (TARGET_TINY_STACK)
830 {
831 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
832 RTX_FRAME_RELATED_P (insn) = 1;
833 }
834 else if (TARGET_NO_INTERRUPTS
835 || cfun->machine->is_signal
836 || cfun->machine->is_OS_main)
837 {
838 insn =
839 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
840 frame_pointer_rtx));
841 RTX_FRAME_RELATED_P (insn) = 1;
842 }
843 else if (cfun->machine->is_interrupt)
844 {
845 insn = emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
846 frame_pointer_rtx));
847 RTX_FRAME_RELATED_P (insn) = 1;
848 }
849 else
850 {
851 insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
852 RTX_FRAME_RELATED_P (insn) = 1;
853 }
854
855 fp_plus_insns = get_insns ();
856 end_sequence ();
857
858 /* Method 2-Adjust Stack pointer. */
859 if (size <= 6)
860 {
861 start_sequence ();
862
863 insn =
864 emit_move_insn (stack_pointer_rtx,
865 gen_rtx_PLUS (HImode,
866 stack_pointer_rtx,
867 gen_int_mode (-size,
868 HImode)));
869 RTX_FRAME_RELATED_P (insn) = 1;
870
871 insn =
872 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
873 RTX_FRAME_RELATED_P (insn) = 1;
874
875 sp_plus_insns = get_insns ();
876 end_sequence ();
877 }
878
879 /* Use shortest method. */
880 if (size <= 6 && (get_sequence_length (sp_plus_insns)
881 < get_sequence_length (fp_plus_insns)))
882 emit_insn (sp_plus_insns);
883 else
884 emit_insn (fp_plus_insns);
885 }
886 }
887 }
888 }
889
890 /* Output summary at end of function prologue. */
891
892 static void
893 avr_asm_function_end_prologue (FILE *file)
894 {
895 if (cfun->machine->is_naked)
896 {
897 fputs ("/* prologue: naked */\n", file);
898 }
899 else
900 {
901 if (cfun->machine->is_interrupt)
902 {
903 fputs ("/* prologue: Interrupt */\n", file);
904 }
905 else if (cfun->machine->is_signal)
906 {
907 fputs ("/* prologue: Signal */\n", file);
908 }
909 else
910 fputs ("/* prologue: function */\n", file);
911 }
912 fprintf (file, "/* frame size = " HOST_WIDE_INT_PRINT_DEC " */\n",
913 get_frame_size());
914 }
915
916
917 /* Implement EPILOGUE_USES. */
918
919 int
920 avr_epilogue_uses (int regno ATTRIBUTE_UNUSED)
921 {
922 if (reload_completed
923 && cfun->machine
924 && (cfun->machine->is_interrupt || cfun->machine->is_signal))
925 return 1;
926 return 0;
927 }
928
929 /* Output RTL epilogue. */
930
931 void
932 expand_epilogue (void)
933 {
934 int reg;
935 int live_seq;
936 HARD_REG_SET set;
937 int minimize;
938 HOST_WIDE_INT size = get_frame_size();
939
940 /* epilogue: naked */
941 if (cfun->machine->is_naked)
942 {
943 emit_jump_insn (gen_return ());
944 return;
945 }
946
947 avr_regs_to_save (&set);
948 live_seq = sequent_regs_live ();
949 minimize = (TARGET_CALL_PROLOGUES
950 && !cfun->machine->is_interrupt
951 && !cfun->machine->is_signal
952 && !cfun->machine->is_OS_task
953 && !cfun->machine->is_OS_main
954 && live_seq);
955
956 if (minimize && (frame_pointer_needed || live_seq > 4))
957 {
958 if (frame_pointer_needed)
959 {
960 /* Get rid of frame. */
961 emit_move_insn(frame_pointer_rtx,
962 gen_rtx_PLUS (HImode, frame_pointer_rtx,
963 gen_int_mode (size, HImode)));
964 }
965 else
966 {
967 emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
968 }
969
970 emit_insn (gen_epilogue_restores (gen_int_mode (live_seq, HImode)));
971 }
972 else
973 {
974 if (frame_pointer_needed)
975 {
976 if (size)
977 {
978 /* Try two methods to adjust stack and select shortest. */
979 rtx myfp;
980 rtx fp_plus_insns;
981 rtx sp_plus_insns = NULL_RTX;
982
983 if (TARGET_TINY_STACK)
984 {
985 /* The high byte (r29) doesn't change - prefer 'subi'
986 (1 cycle) over 'sbiw' (2 cycles, same size). */
987 myfp = gen_rtx_REG (QImode, REGNO (frame_pointer_rtx));
988 }
989 else
990 {
991 /* Normal sized addition. */
992 myfp = frame_pointer_rtx;
993 }
994
995 /* Method 1-Adjust frame pointer. */
996 start_sequence ();
997
998 emit_move_insn (myfp,
999 gen_rtx_PLUS (HImode, myfp,
1000 gen_int_mode (size,
1001 GET_MODE(myfp))));
1002
1003 /* Copy to stack pointer. */
1004 if (TARGET_TINY_STACK)
1005 {
1006 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1007 }
1008 else if (TARGET_NO_INTERRUPTS
1009 || cfun->machine->is_signal)
1010 {
1011 emit_insn (gen_movhi_sp_r_irq_off (stack_pointer_rtx,
1012 frame_pointer_rtx));
1013 }
1014 else if (cfun->machine->is_interrupt)
1015 {
1016 emit_insn (gen_movhi_sp_r_irq_on (stack_pointer_rtx,
1017 frame_pointer_rtx));
1018 }
1019 else
1020 {
1021 emit_move_insn (stack_pointer_rtx, frame_pointer_rtx);
1022 }
1023
1024 fp_plus_insns = get_insns ();
1025 end_sequence ();
1026
1027 /* Method 2-Adjust Stack pointer. */
1028 if (size <= 5)
1029 {
1030 start_sequence ();
1031
1032 emit_move_insn (stack_pointer_rtx,
1033 gen_rtx_PLUS (HImode, stack_pointer_rtx,
1034 gen_int_mode (size,
1035 HImode)));
1036
1037 sp_plus_insns = get_insns ();
1038 end_sequence ();
1039 }
1040
1041 /* Use shortest method. */
1042 if (size <= 5 && (get_sequence_length (sp_plus_insns)
1043 < get_sequence_length (fp_plus_insns)))
1044 emit_insn (sp_plus_insns);
1045 else
1046 emit_insn (fp_plus_insns);
1047 }
1048 if (!(cfun->machine->is_OS_task || cfun->machine->is_OS_main))
1049 {
1050 /* Restore previous frame_pointer. */
1051 emit_insn (gen_pophi (frame_pointer_rtx));
1052 }
1053 }
1054 /* Restore used registers. */
1055 for (reg = 31; reg >= 0; --reg)
1056 {
1057 if (TEST_HARD_REG_BIT (set, reg))
1058 emit_insn (gen_popqi (gen_rtx_REG (QImode, reg)));
1059 }
1060 if (cfun->machine->is_interrupt || cfun->machine->is_signal)
1061 {
1062 /* Restore RAMPZ using tmp reg as scratch. */
1063 if(AVR_HAVE_RAMPZ
1064 && (TEST_HARD_REG_BIT (set, REG_Z) && TEST_HARD_REG_BIT (set, REG_Z + 1)))
1065 {
1066 emit_insn (gen_popqi (tmp_reg_rtx));
1067 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(RAMPZ_ADDR)),
1068 tmp_reg_rtx);
1069 }
1070
1071 /* Restore SREG using tmp reg as scratch. */
1072 emit_insn (gen_popqi (tmp_reg_rtx));
1073
1074 emit_move_insn (gen_rtx_MEM(QImode, GEN_INT(SREG_ADDR)),
1075 tmp_reg_rtx);
1076
1077 /* Restore tmp REG. */
1078 emit_insn (gen_popqi (tmp_reg_rtx));
1079
1080 /* Restore zero REG. */
1081 emit_insn (gen_popqi (zero_reg_rtx));
1082 }
1083
1084 emit_jump_insn (gen_return ());
1085 }
1086 }
1087
1088 /* Output summary messages at beginning of function epilogue. */
1089
1090 static void
1091 avr_asm_function_begin_epilogue (FILE *file)
1092 {
1093 fprintf (file, "/* epilogue start */\n");
1094 }
1095
1096 /* Return nonzero if X (an RTX) is a legitimate memory address on the target
1097 machine for a memory operand of mode MODE. */
1098
1099 int
1100 legitimate_address_p (enum machine_mode mode, rtx x, int strict)
1101 {
1102 enum reg_class r = NO_REGS;
1103
1104 if (TARGET_ALL_DEBUG)
1105 {
1106 fprintf (stderr, "mode: (%s) %s %s %s %s:",
1107 GET_MODE_NAME(mode),
1108 strict ? "(strict)": "",
1109 reload_completed ? "(reload_completed)": "",
1110 reload_in_progress ? "(reload_in_progress)": "",
1111 reg_renumber ? "(reg_renumber)" : "");
1112 if (GET_CODE (x) == PLUS
1113 && REG_P (XEXP (x, 0))
1114 && GET_CODE (XEXP (x, 1)) == CONST_INT
1115 && INTVAL (XEXP (x, 1)) >= 0
1116 && INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode)
1117 && reg_renumber
1118 )
1119 fprintf (stderr, "(r%d ---> r%d)", REGNO (XEXP (x, 0)),
1120 true_regnum (XEXP (x, 0)));
1121 debug_rtx (x);
1122 }
1123 if (!strict && GET_CODE (x) == SUBREG)
1124 x = SUBREG_REG (x);
1125 if (REG_P (x) && (strict ? REG_OK_FOR_BASE_STRICT_P (x)
1126 : REG_OK_FOR_BASE_NOSTRICT_P (x)))
1127 r = POINTER_REGS;
1128 else if (CONSTANT_ADDRESS_P (x))
1129 r = ALL_REGS;
1130 else if (GET_CODE (x) == PLUS
1131 && REG_P (XEXP (x, 0))
1132 && GET_CODE (XEXP (x, 1)) == CONST_INT
1133 && INTVAL (XEXP (x, 1)) >= 0)
1134 {
1135 int fit = INTVAL (XEXP (x, 1)) <= MAX_LD_OFFSET (mode);
1136 if (fit)
1137 {
1138 if (! strict
1139 || REGNO (XEXP (x,0)) == REG_X
1140 || REGNO (XEXP (x,0)) == REG_Y
1141 || REGNO (XEXP (x,0)) == REG_Z)
1142 r = BASE_POINTER_REGS;
1143 if (XEXP (x,0) == frame_pointer_rtx
1144 || XEXP (x,0) == arg_pointer_rtx)
1145 r = BASE_POINTER_REGS;
1146 }
1147 else if (frame_pointer_needed && XEXP (x,0) == frame_pointer_rtx)
1148 r = POINTER_Y_REGS;
1149 }
1150 else if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1151 && REG_P (XEXP (x, 0))
1152 && (strict ? REG_OK_FOR_BASE_STRICT_P (XEXP (x, 0))
1153 : REG_OK_FOR_BASE_NOSTRICT_P (XEXP (x, 0))))
1154 {
1155 r = POINTER_REGS;
1156 }
1157 if (TARGET_ALL_DEBUG)
1158 {
1159 fprintf (stderr, " ret = %c\n", r + '0');
1160 }
1161 return r == NO_REGS ? 0 : (int)r;
1162 }
1163
1164 /* Attempts to replace X with a valid
1165 memory address for an operand of mode MODE */
1166
1167 rtx
1168 legitimize_address (rtx x, rtx oldx, enum machine_mode mode)
1169 {
1170 x = oldx;
1171 if (TARGET_ALL_DEBUG)
1172 {
1173 fprintf (stderr, "legitimize_address mode: %s", GET_MODE_NAME(mode));
1174 debug_rtx (oldx);
1175 }
1176
1177 if (GET_CODE (oldx) == PLUS
1178 && REG_P (XEXP (oldx,0)))
1179 {
1180 if (REG_P (XEXP (oldx,1)))
1181 x = force_reg (GET_MODE (oldx), oldx);
1182 else if (GET_CODE (XEXP (oldx, 1)) == CONST_INT)
1183 {
1184 int offs = INTVAL (XEXP (oldx,1));
1185 if (frame_pointer_rtx != XEXP (oldx,0))
1186 if (offs > MAX_LD_OFFSET (mode))
1187 {
1188 if (TARGET_ALL_DEBUG)
1189 fprintf (stderr, "force_reg (big offset)\n");
1190 x = force_reg (GET_MODE (oldx), oldx);
1191 }
1192 }
1193 }
1194 return x;
1195 }
1196
1197
1198 /* Return a pointer register name as a string. */
1199
1200 static const char *
1201 ptrreg_to_str (int regno)
1202 {
1203 switch (regno)
1204 {
1205 case REG_X: return "X";
1206 case REG_Y: return "Y";
1207 case REG_Z: return "Z";
1208 default:
1209 output_operand_lossage ("address operand requires constraint for X, Y, or Z register");
1210 }
1211 return NULL;
1212 }
1213
1214 /* Return the condition name as a string.
1215 Used in conditional jump constructing */
1216
1217 static const char *
1218 cond_string (enum rtx_code code)
1219 {
1220 switch (code)
1221 {
1222 case NE:
1223 return "ne";
1224 case EQ:
1225 return "eq";
1226 case GE:
1227 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1228 return "pl";
1229 else
1230 return "ge";
1231 case LT:
1232 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1233 return "mi";
1234 else
1235 return "lt";
1236 case GEU:
1237 return "sh";
1238 case LTU:
1239 return "lo";
1240 default:
1241 gcc_unreachable ();
1242 }
1243 }
1244
1245 /* Output ADDR to FILE as address. */
1246
1247 void
1248 print_operand_address (FILE *file, rtx addr)
1249 {
1250 switch (GET_CODE (addr))
1251 {
1252 case REG:
1253 fprintf (file, ptrreg_to_str (REGNO (addr)));
1254 break;
1255
1256 case PRE_DEC:
1257 fprintf (file, "-%s", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1258 break;
1259
1260 case POST_INC:
1261 fprintf (file, "%s+", ptrreg_to_str (REGNO (XEXP (addr, 0))));
1262 break;
1263
1264 default:
1265 if (CONSTANT_ADDRESS_P (addr)
1266 && ((GET_CODE (addr) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (addr))
1267 || GET_CODE (addr) == LABEL_REF))
1268 {
1269 fprintf (file, "gs(");
1270 output_addr_const (file,addr);
1271 fprintf (file ,")");
1272 }
1273 else
1274 output_addr_const (file, addr);
1275 }
1276 }
1277
1278
1279 /* Output X as assembler operand to file FILE. */
1280
1281 void
1282 print_operand (FILE *file, rtx x, int code)
1283 {
1284 int abcd = 0;
1285
1286 if (code >= 'A' && code <= 'D')
1287 abcd = code - 'A';
1288
1289 if (code == '~')
1290 {
1291 if (!AVR_HAVE_JMP_CALL)
1292 fputc ('r', file);
1293 }
1294 else if (code == '!')
1295 {
1296 if (AVR_HAVE_EIJMP_EICALL)
1297 fputc ('e', file);
1298 }
1299 else if (REG_P (x))
1300 {
1301 if (x == zero_reg_rtx)
1302 fprintf (file, "__zero_reg__");
1303 else
1304 fprintf (file, reg_names[true_regnum (x) + abcd]);
1305 }
1306 else if (GET_CODE (x) == CONST_INT)
1307 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + abcd);
1308 else if (GET_CODE (x) == MEM)
1309 {
1310 rtx addr = XEXP (x,0);
1311
1312 if (CONSTANT_P (addr) && abcd)
1313 {
1314 fputc ('(', file);
1315 output_address (addr);
1316 fprintf (file, ")+%d", abcd);
1317 }
1318 else if (code == 'o')
1319 {
1320 if (GET_CODE (addr) != PLUS)
1321 fatal_insn ("bad address, not (reg+disp):", addr);
1322
1323 print_operand (file, XEXP (addr, 1), 0);
1324 }
1325 else if (code == 'p' || code == 'r')
1326 {
1327 if (GET_CODE (addr) != POST_INC && GET_CODE (addr) != PRE_DEC)
1328 fatal_insn ("bad address, not post_inc or pre_dec:", addr);
1329
1330 if (code == 'p')
1331 print_operand_address (file, XEXP (addr, 0)); /* X, Y, Z */
1332 else
1333 print_operand (file, XEXP (addr, 0), 0); /* r26, r28, r30 */
1334 }
1335 else if (GET_CODE (addr) == PLUS)
1336 {
1337 print_operand_address (file, XEXP (addr,0));
1338 if (REGNO (XEXP (addr, 0)) == REG_X)
1339 fatal_insn ("internal compiler error. Bad address:"
1340 ,addr);
1341 fputc ('+', file);
1342 print_operand (file, XEXP (addr,1), code);
1343 }
1344 else
1345 print_operand_address (file, addr);
1346 }
1347 else if (GET_CODE (x) == CONST_DOUBLE)
1348 {
1349 long val;
1350 REAL_VALUE_TYPE rv;
1351 if (GET_MODE (x) != SFmode)
1352 fatal_insn ("internal compiler error. Unknown mode:", x);
1353 REAL_VALUE_FROM_CONST_DOUBLE (rv, x);
1354 REAL_VALUE_TO_TARGET_SINGLE (rv, val);
1355 fprintf (file, "0x%lx", val);
1356 }
1357 else if (code == 'j')
1358 fputs (cond_string (GET_CODE (x)), file);
1359 else if (code == 'k')
1360 fputs (cond_string (reverse_condition (GET_CODE (x))), file);
1361 else
1362 print_operand_address (file, x);
1363 }
1364
1365 /* Update the condition code in the INSN. */
1366
1367 void
1368 notice_update_cc (rtx body ATTRIBUTE_UNUSED, rtx insn)
1369 {
1370 rtx set;
1371
1372 switch (get_attr_cc (insn))
1373 {
1374 case CC_NONE:
1375 /* Insn does not affect CC at all. */
1376 break;
1377
1378 case CC_SET_N:
1379 CC_STATUS_INIT;
1380 break;
1381
1382 case CC_SET_ZN:
1383 set = single_set (insn);
1384 CC_STATUS_INIT;
1385 if (set)
1386 {
1387 cc_status.flags |= CC_NO_OVERFLOW;
1388 cc_status.value1 = SET_DEST (set);
1389 }
1390 break;
1391
1392 case CC_SET_CZN:
1393 /* Insn sets the Z,N,C flags of CC to recog_operand[0].
1394 The V flag may or may not be known but that's ok because
1395 alter_cond will change tests to use EQ/NE. */
1396 set = single_set (insn);
1397 CC_STATUS_INIT;
1398 if (set)
1399 {
1400 cc_status.value1 = SET_DEST (set);
1401 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1402 }
1403 break;
1404
1405 case CC_COMPARE:
1406 set = single_set (insn);
1407 CC_STATUS_INIT;
1408 if (set)
1409 cc_status.value1 = SET_SRC (set);
1410 break;
1411
1412 case CC_CLOBBER:
1413 /* Insn doesn't leave CC in a usable state. */
1414 CC_STATUS_INIT;
1415
1416 /* Correct CC for the ashrqi3 with the shift count as CONST_INT != 6 */
1417 set = single_set (insn);
1418 if (set)
1419 {
1420 rtx src = SET_SRC (set);
1421
1422 if (GET_CODE (src) == ASHIFTRT
1423 && GET_MODE (src) == QImode)
1424 {
1425 rtx x = XEXP (src, 1);
1426
1427 if (GET_CODE (x) == CONST_INT
1428 && INTVAL (x) > 0
1429 && INTVAL (x) != 6)
1430 {
1431 cc_status.value1 = SET_DEST (set);
1432 cc_status.flags |= CC_OVERFLOW_UNUSABLE;
1433 }
1434 }
1435 }
1436 break;
1437 }
1438 }
1439
1440 /* Return maximum number of consecutive registers of
1441 class CLASS needed to hold a value of mode MODE. */
1442
1443 int
1444 class_max_nregs (enum reg_class rclass ATTRIBUTE_UNUSED,enum machine_mode mode)
1445 {
1446 return ((GET_MODE_SIZE (mode) + UNITS_PER_WORD - 1) / UNITS_PER_WORD);
1447 }
1448
1449 /* Choose mode for jump insn:
1450 1 - relative jump in range -63 <= x <= 62 ;
1451 2 - relative jump in range -2046 <= x <= 2045 ;
1452 3 - absolute jump (only for ATmega[16]03). */
1453
1454 int
1455 avr_jump_mode (rtx x, rtx insn)
1456 {
1457 int dest_addr = INSN_ADDRESSES (INSN_UID (GET_MODE (x) == LABEL_REF
1458 ? XEXP (x, 0) : x));
1459 int cur_addr = INSN_ADDRESSES (INSN_UID (insn));
1460 int jump_distance = cur_addr - dest_addr;
1461
1462 if (-63 <= jump_distance && jump_distance <= 62)
1463 return 1;
1464 else if (-2046 <= jump_distance && jump_distance <= 2045)
1465 return 2;
1466 else if (AVR_HAVE_JMP_CALL)
1467 return 3;
1468
1469 return 2;
1470 }
1471
1472 /* return an AVR condition jump commands.
1473 X is a comparison RTX.
1474 LEN is a number returned by avr_jump_mode function.
1475 if REVERSE nonzero then condition code in X must be reversed. */
1476
1477 const char *
1478 ret_cond_branch (rtx x, int len, int reverse)
1479 {
1480 RTX_CODE cond = reverse ? reverse_condition (GET_CODE (x)) : GET_CODE (x);
1481
1482 switch (cond)
1483 {
1484 case GT:
1485 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1486 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1487 AS1 (brpl,%0)) :
1488 len == 2 ? (AS1 (breq,.+4) CR_TAB
1489 AS1 (brmi,.+2) CR_TAB
1490 AS1 (rjmp,%0)) :
1491 (AS1 (breq,.+6) CR_TAB
1492 AS1 (brmi,.+4) CR_TAB
1493 AS1 (jmp,%0)));
1494
1495 else
1496 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1497 AS1 (brge,%0)) :
1498 len == 2 ? (AS1 (breq,.+4) CR_TAB
1499 AS1 (brlt,.+2) CR_TAB
1500 AS1 (rjmp,%0)) :
1501 (AS1 (breq,.+6) CR_TAB
1502 AS1 (brlt,.+4) CR_TAB
1503 AS1 (jmp,%0)));
1504 case GTU:
1505 return (len == 1 ? (AS1 (breq,.+2) CR_TAB
1506 AS1 (brsh,%0)) :
1507 len == 2 ? (AS1 (breq,.+4) CR_TAB
1508 AS1 (brlo,.+2) CR_TAB
1509 AS1 (rjmp,%0)) :
1510 (AS1 (breq,.+6) CR_TAB
1511 AS1 (brlo,.+4) CR_TAB
1512 AS1 (jmp,%0)));
1513 case LE:
1514 if (cc_prev_status.flags & CC_OVERFLOW_UNUSABLE)
1515 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1516 AS1 (brmi,%0)) :
1517 len == 2 ? (AS1 (breq,.+2) CR_TAB
1518 AS1 (brpl,.+2) CR_TAB
1519 AS1 (rjmp,%0)) :
1520 (AS1 (breq,.+2) CR_TAB
1521 AS1 (brpl,.+4) CR_TAB
1522 AS1 (jmp,%0)));
1523 else
1524 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1525 AS1 (brlt,%0)) :
1526 len == 2 ? (AS1 (breq,.+2) CR_TAB
1527 AS1 (brge,.+2) CR_TAB
1528 AS1 (rjmp,%0)) :
1529 (AS1 (breq,.+2) CR_TAB
1530 AS1 (brge,.+4) CR_TAB
1531 AS1 (jmp,%0)));
1532 case LEU:
1533 return (len == 1 ? (AS1 (breq,%0) CR_TAB
1534 AS1 (brlo,%0)) :
1535 len == 2 ? (AS1 (breq,.+2) CR_TAB
1536 AS1 (brsh,.+2) CR_TAB
1537 AS1 (rjmp,%0)) :
1538 (AS1 (breq,.+2) CR_TAB
1539 AS1 (brsh,.+4) CR_TAB
1540 AS1 (jmp,%0)));
1541 default:
1542 if (reverse)
1543 {
1544 switch (len)
1545 {
1546 case 1:
1547 return AS1 (br%k1,%0);
1548 case 2:
1549 return (AS1 (br%j1,.+2) CR_TAB
1550 AS1 (rjmp,%0));
1551 default:
1552 return (AS1 (br%j1,.+4) CR_TAB
1553 AS1 (jmp,%0));
1554 }
1555 }
1556 else
1557 {
1558 switch (len)
1559 {
1560 case 1:
1561 return AS1 (br%j1,%0);
1562 case 2:
1563 return (AS1 (br%k1,.+2) CR_TAB
1564 AS1 (rjmp,%0));
1565 default:
1566 return (AS1 (br%k1,.+4) CR_TAB
1567 AS1 (jmp,%0));
1568 }
1569 }
1570 }
1571 return "";
1572 }
1573
1574 /* Predicate function for immediate operand which fits to byte (8bit) */
1575
1576 int
1577 byte_immediate_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED)
1578 {
1579 return (GET_CODE (op) == CONST_INT
1580 && INTVAL (op) <= 0xff && INTVAL (op) >= 0);
1581 }
1582
1583 /* Output all insn addresses and their sizes into the assembly language
1584 output file. This is helpful for debugging whether the length attributes
1585 in the md file are correct.
1586 Output insn cost for next insn. */
1587
1588 void
1589 final_prescan_insn (rtx insn, rtx *operand ATTRIBUTE_UNUSED,
1590 int num_operands ATTRIBUTE_UNUSED)
1591 {
1592 int uid = INSN_UID (insn);
1593
1594 if (TARGET_INSN_SIZE_DUMP || TARGET_ALL_DEBUG)
1595 {
1596 fprintf (asm_out_file, "/*DEBUG: 0x%x\t\t%d\t%d */\n",
1597 INSN_ADDRESSES (uid),
1598 INSN_ADDRESSES (uid) - last_insn_address,
1599 rtx_cost (PATTERN (insn), INSN, !optimize_size));
1600 }
1601 last_insn_address = INSN_ADDRESSES (uid);
1602 }
1603
1604 /* Return 0 if undefined, 1 if always true or always false. */
1605
1606 int
1607 avr_simplify_comparison_p (enum machine_mode mode, RTX_CODE op, rtx x)
1608 {
1609 unsigned int max = (mode == QImode ? 0xff :
1610 mode == HImode ? 0xffff :
1611 mode == SImode ? 0xffffffff : 0);
1612 if (max && op && GET_CODE (x) == CONST_INT)
1613 {
1614 if (unsigned_condition (op) != op)
1615 max >>= 1;
1616
1617 if (max != (INTVAL (x) & max)
1618 && INTVAL (x) != 0xff)
1619 return 1;
1620 }
1621 return 0;
1622 }
1623
1624
1625 /* Returns nonzero if REGNO is the number of a hard
1626 register in which function arguments are sometimes passed. */
1627
1628 int
1629 function_arg_regno_p(int r)
1630 {
1631 return (r >= 8 && r <= 25);
1632 }
1633
1634 /* Initializing the variable cum for the state at the beginning
1635 of the argument list. */
1636
1637 void
1638 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, rtx libname,
1639 tree fndecl ATTRIBUTE_UNUSED)
1640 {
1641 cum->nregs = 18;
1642 cum->regno = FIRST_CUM_REG;
1643 if (!libname && fntype)
1644 {
1645 int stdarg = (TYPE_ARG_TYPES (fntype) != 0
1646 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1647 != void_type_node));
1648 if (stdarg)
1649 cum->nregs = 0;
1650 }
1651 }
1652
1653 /* Returns the number of registers to allocate for a function argument. */
1654
1655 static int
1656 avr_num_arg_regs (enum machine_mode mode, tree type)
1657 {
1658 int size;
1659
1660 if (mode == BLKmode)
1661 size = int_size_in_bytes (type);
1662 else
1663 size = GET_MODE_SIZE (mode);
1664
1665 /* Align all function arguments to start in even-numbered registers.
1666 Odd-sized arguments leave holes above them. */
1667
1668 return (size + 1) & ~1;
1669 }
1670
1671 /* Controls whether a function argument is passed
1672 in a register, and which register. */
1673
1674 rtx
1675 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1676 int named ATTRIBUTE_UNUSED)
1677 {
1678 int bytes = avr_num_arg_regs (mode, type);
1679
1680 if (cum->nregs && bytes <= cum->nregs)
1681 return gen_rtx_REG (mode, cum->regno - bytes);
1682
1683 return NULL_RTX;
1684 }
1685
1686 /* Update the summarizer variable CUM to advance past an argument
1687 in the argument list. */
1688
1689 void
1690 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type,
1691 int named ATTRIBUTE_UNUSED)
1692 {
1693 int bytes = avr_num_arg_regs (mode, type);
1694
1695 cum->nregs -= bytes;
1696 cum->regno -= bytes;
1697
1698 if (cum->nregs <= 0)
1699 {
1700 cum->nregs = 0;
1701 cum->regno = FIRST_CUM_REG;
1702 }
1703 }
1704
1705 /***********************************************************************
1706 Functions for outputting various mov's for a various modes
1707 ************************************************************************/
1708 const char *
1709 output_movqi (rtx insn, rtx operands[], int *l)
1710 {
1711 int dummy;
1712 rtx dest = operands[0];
1713 rtx src = operands[1];
1714 int *real_l = l;
1715
1716 if (!l)
1717 l = &dummy;
1718
1719 *l = 1;
1720
1721 if (register_operand (dest, QImode))
1722 {
1723 if (register_operand (src, QImode)) /* mov r,r */
1724 {
1725 if (test_hard_reg_class (STACK_REG, dest))
1726 return AS2 (out,%0,%1);
1727 else if (test_hard_reg_class (STACK_REG, src))
1728 return AS2 (in,%0,%1);
1729
1730 return AS2 (mov,%0,%1);
1731 }
1732 else if (CONSTANT_P (src))
1733 {
1734 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1735 return AS2 (ldi,%0,lo8(%1));
1736
1737 if (GET_CODE (src) == CONST_INT)
1738 {
1739 if (src == const0_rtx) /* mov r,L */
1740 return AS1 (clr,%0);
1741 else if (src == const1_rtx)
1742 {
1743 *l = 2;
1744 return (AS1 (clr,%0) CR_TAB
1745 AS1 (inc,%0));
1746 }
1747 else if (src == constm1_rtx)
1748 {
1749 /* Immediate constants -1 to any register */
1750 *l = 2;
1751 return (AS1 (clr,%0) CR_TAB
1752 AS1 (dec,%0));
1753 }
1754 else
1755 {
1756 int bit_nr = exact_log2 (INTVAL (src));
1757
1758 if (bit_nr >= 0)
1759 {
1760 *l = 3;
1761 if (!real_l)
1762 output_asm_insn ((AS1 (clr,%0) CR_TAB
1763 "set"), operands);
1764 if (!real_l)
1765 avr_output_bld (operands, bit_nr);
1766
1767 return "";
1768 }
1769 }
1770 }
1771
1772 /* Last resort, larger than loading from memory. */
1773 *l = 4;
1774 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1775 AS2 (ldi,r31,lo8(%1)) CR_TAB
1776 AS2 (mov,%0,r31) CR_TAB
1777 AS2 (mov,r31,__tmp_reg__));
1778 }
1779 else if (GET_CODE (src) == MEM)
1780 return out_movqi_r_mr (insn, operands, real_l); /* mov r,m */
1781 }
1782 else if (GET_CODE (dest) == MEM)
1783 {
1784 const char *templ;
1785
1786 if (src == const0_rtx)
1787 operands[1] = zero_reg_rtx;
1788
1789 templ = out_movqi_mr_r (insn, operands, real_l);
1790
1791 if (!real_l)
1792 output_asm_insn (templ, operands);
1793
1794 operands[1] = src;
1795 }
1796 return "";
1797 }
1798
1799
1800 const char *
1801 output_movhi (rtx insn, rtx operands[], int *l)
1802 {
1803 int dummy;
1804 rtx dest = operands[0];
1805 rtx src = operands[1];
1806 int *real_l = l;
1807
1808 if (!l)
1809 l = &dummy;
1810
1811 if (register_operand (dest, HImode))
1812 {
1813 if (register_operand (src, HImode)) /* mov r,r */
1814 {
1815 if (test_hard_reg_class (STACK_REG, dest))
1816 {
1817 if (TARGET_TINY_STACK)
1818 return *l = 1, AS2 (out,__SP_L__,%A1);
1819 /* Use simple load of stack pointer if no interrupts are
1820 used. */
1821 else if (TARGET_NO_INTERRUPTS)
1822 return *l = 2, (AS2 (out,__SP_H__,%B1) CR_TAB
1823 AS2 (out,__SP_L__,%A1));
1824 *l = 5;
1825 return (AS2 (in,__tmp_reg__,__SREG__) CR_TAB
1826 "cli" CR_TAB
1827 AS2 (out,__SP_H__,%B1) CR_TAB
1828 AS2 (out,__SREG__,__tmp_reg__) CR_TAB
1829 AS2 (out,__SP_L__,%A1));
1830 }
1831 else if (test_hard_reg_class (STACK_REG, src))
1832 {
1833 *l = 2;
1834 return (AS2 (in,%A0,__SP_L__) CR_TAB
1835 AS2 (in,%B0,__SP_H__));
1836 }
1837
1838 if (AVR_HAVE_MOVW)
1839 {
1840 *l = 1;
1841 return (AS2 (movw,%0,%1));
1842 }
1843 else
1844 {
1845 *l = 2;
1846 return (AS2 (mov,%A0,%A1) CR_TAB
1847 AS2 (mov,%B0,%B1));
1848 }
1849 }
1850 else if (CONSTANT_P (src))
1851 {
1852 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
1853 {
1854 *l = 2;
1855 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
1856 AS2 (ldi,%B0,hi8(%1)));
1857 }
1858
1859 if (GET_CODE (src) == CONST_INT)
1860 {
1861 if (src == const0_rtx) /* mov r,L */
1862 {
1863 *l = 2;
1864 return (AS1 (clr,%A0) CR_TAB
1865 AS1 (clr,%B0));
1866 }
1867 else if (src == const1_rtx)
1868 {
1869 *l = 3;
1870 return (AS1 (clr,%A0) CR_TAB
1871 AS1 (clr,%B0) CR_TAB
1872 AS1 (inc,%A0));
1873 }
1874 else if (src == constm1_rtx)
1875 {
1876 /* Immediate constants -1 to any register */
1877 *l = 3;
1878 return (AS1 (clr,%0) CR_TAB
1879 AS1 (dec,%A0) CR_TAB
1880 AS2 (mov,%B0,%A0));
1881 }
1882 else
1883 {
1884 int bit_nr = exact_log2 (INTVAL (src));
1885
1886 if (bit_nr >= 0)
1887 {
1888 *l = 4;
1889 if (!real_l)
1890 output_asm_insn ((AS1 (clr,%A0) CR_TAB
1891 AS1 (clr,%B0) CR_TAB
1892 "set"), operands);
1893 if (!real_l)
1894 avr_output_bld (operands, bit_nr);
1895
1896 return "";
1897 }
1898 }
1899
1900 if ((INTVAL (src) & 0xff) == 0)
1901 {
1902 *l = 5;
1903 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1904 AS1 (clr,%A0) CR_TAB
1905 AS2 (ldi,r31,hi8(%1)) CR_TAB
1906 AS2 (mov,%B0,r31) CR_TAB
1907 AS2 (mov,r31,__tmp_reg__));
1908 }
1909 else if ((INTVAL (src) & 0xff00) == 0)
1910 {
1911 *l = 5;
1912 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1913 AS2 (ldi,r31,lo8(%1)) CR_TAB
1914 AS2 (mov,%A0,r31) CR_TAB
1915 AS1 (clr,%B0) CR_TAB
1916 AS2 (mov,r31,__tmp_reg__));
1917 }
1918 }
1919
1920 /* Last resort, equal to loading from memory. */
1921 *l = 6;
1922 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
1923 AS2 (ldi,r31,lo8(%1)) CR_TAB
1924 AS2 (mov,%A0,r31) CR_TAB
1925 AS2 (ldi,r31,hi8(%1)) CR_TAB
1926 AS2 (mov,%B0,r31) CR_TAB
1927 AS2 (mov,r31,__tmp_reg__));
1928 }
1929 else if (GET_CODE (src) == MEM)
1930 return out_movhi_r_mr (insn, operands, real_l); /* mov r,m */
1931 }
1932 else if (GET_CODE (dest) == MEM)
1933 {
1934 const char *templ;
1935
1936 if (src == const0_rtx)
1937 operands[1] = zero_reg_rtx;
1938
1939 templ = out_movhi_mr_r (insn, operands, real_l);
1940
1941 if (!real_l)
1942 output_asm_insn (templ, operands);
1943
1944 operands[1] = src;
1945 return "";
1946 }
1947 fatal_insn ("invalid insn:", insn);
1948 return "";
1949 }
1950
1951 const char *
1952 out_movqi_r_mr (rtx insn, rtx op[], int *l)
1953 {
1954 rtx dest = op[0];
1955 rtx src = op[1];
1956 rtx x = XEXP (src, 0);
1957 int dummy;
1958
1959 if (!l)
1960 l = &dummy;
1961
1962 if (CONSTANT_ADDRESS_P (x))
1963 {
1964 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
1965 {
1966 *l = 1;
1967 return AS2 (in,%0,__SREG__);
1968 }
1969 if (optimize > 0 && io_address_operand (x, QImode))
1970 {
1971 *l = 1;
1972 return AS2 (in,%0,%1-0x20);
1973 }
1974 *l = 2;
1975 return AS2 (lds,%0,%1);
1976 }
1977 /* memory access by reg+disp */
1978 else if (GET_CODE (x) == PLUS
1979 && REG_P (XEXP (x,0))
1980 && GET_CODE (XEXP (x,1)) == CONST_INT)
1981 {
1982 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (src))) >= 63)
1983 {
1984 int disp = INTVAL (XEXP (x,1));
1985 if (REGNO (XEXP (x,0)) != REG_Y)
1986 fatal_insn ("incorrect insn:",insn);
1987
1988 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
1989 return *l = 3, (AS2 (adiw,r28,%o1-63) CR_TAB
1990 AS2 (ldd,%0,Y+63) CR_TAB
1991 AS2 (sbiw,r28,%o1-63));
1992
1993 return *l = 5, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
1994 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
1995 AS2 (ld,%0,Y) CR_TAB
1996 AS2 (subi,r28,lo8(%o1)) CR_TAB
1997 AS2 (sbci,r29,hi8(%o1)));
1998 }
1999 else if (REGNO (XEXP (x,0)) == REG_X)
2000 {
2001 /* This is a paranoid case LEGITIMIZE_RELOAD_ADDRESS must exclude
2002 it but I have this situation with extremal optimizing options. */
2003 if (reg_overlap_mentioned_p (dest, XEXP (x,0))
2004 || reg_unused_after (insn, XEXP (x,0)))
2005 return *l = 2, (AS2 (adiw,r26,%o1) CR_TAB
2006 AS2 (ld,%0,X));
2007
2008 return *l = 3, (AS2 (adiw,r26,%o1) CR_TAB
2009 AS2 (ld,%0,X) CR_TAB
2010 AS2 (sbiw,r26,%o1));
2011 }
2012 *l = 1;
2013 return AS2 (ldd,%0,%1);
2014 }
2015 *l = 1;
2016 return AS2 (ld,%0,%1);
2017 }
2018
2019 const char *
2020 out_movhi_r_mr (rtx insn, rtx op[], int *l)
2021 {
2022 rtx dest = op[0];
2023 rtx src = op[1];
2024 rtx base = XEXP (src, 0);
2025 int reg_dest = true_regnum (dest);
2026 int reg_base = true_regnum (base);
2027 /* "volatile" forces reading low byte first, even if less efficient,
2028 for correct operation with 16-bit I/O registers. */
2029 int mem_volatile_p = MEM_VOLATILE_P (src);
2030 int tmp;
2031
2032 if (!l)
2033 l = &tmp;
2034
2035 if (reg_base > 0)
2036 {
2037 if (reg_dest == reg_base) /* R = (R) */
2038 {
2039 *l = 3;
2040 return (AS2 (ld,__tmp_reg__,%1+) CR_TAB
2041 AS2 (ld,%B0,%1) CR_TAB
2042 AS2 (mov,%A0,__tmp_reg__));
2043 }
2044 else if (reg_base == REG_X) /* (R26) */
2045 {
2046 if (reg_unused_after (insn, base))
2047 {
2048 *l = 2;
2049 return (AS2 (ld,%A0,X+) CR_TAB
2050 AS2 (ld,%B0,X));
2051 }
2052 *l = 3;
2053 return (AS2 (ld,%A0,X+) CR_TAB
2054 AS2 (ld,%B0,X) CR_TAB
2055 AS2 (sbiw,r26,1));
2056 }
2057 else /* (R) */
2058 {
2059 *l = 2;
2060 return (AS2 (ld,%A0,%1) CR_TAB
2061 AS2 (ldd,%B0,%1+1));
2062 }
2063 }
2064 else if (GET_CODE (base) == PLUS) /* (R + i) */
2065 {
2066 int disp = INTVAL (XEXP (base, 1));
2067 int reg_base = true_regnum (XEXP (base, 0));
2068
2069 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2070 {
2071 if (REGNO (XEXP (base, 0)) != REG_Y)
2072 fatal_insn ("incorrect insn:",insn);
2073
2074 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2075 return *l = 4, (AS2 (adiw,r28,%o1-62) CR_TAB
2076 AS2 (ldd,%A0,Y+62) CR_TAB
2077 AS2 (ldd,%B0,Y+63) CR_TAB
2078 AS2 (sbiw,r28,%o1-62));
2079
2080 return *l = 6, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2081 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2082 AS2 (ld,%A0,Y) CR_TAB
2083 AS2 (ldd,%B0,Y+1) CR_TAB
2084 AS2 (subi,r28,lo8(%o1)) CR_TAB
2085 AS2 (sbci,r29,hi8(%o1)));
2086 }
2087 if (reg_base == REG_X)
2088 {
2089 /* This is a paranoid case. LEGITIMIZE_RELOAD_ADDRESS must exclude
2090 it but I have this situation with extremal
2091 optimization options. */
2092
2093 *l = 4;
2094 if (reg_base == reg_dest)
2095 return (AS2 (adiw,r26,%o1) CR_TAB
2096 AS2 (ld,__tmp_reg__,X+) CR_TAB
2097 AS2 (ld,%B0,X) CR_TAB
2098 AS2 (mov,%A0,__tmp_reg__));
2099
2100 return (AS2 (adiw,r26,%o1) CR_TAB
2101 AS2 (ld,%A0,X+) CR_TAB
2102 AS2 (ld,%B0,X) CR_TAB
2103 AS2 (sbiw,r26,%o1+1));
2104 }
2105
2106 if (reg_base == reg_dest)
2107 {
2108 *l = 3;
2109 return (AS2 (ldd,__tmp_reg__,%A1) CR_TAB
2110 AS2 (ldd,%B0,%B1) CR_TAB
2111 AS2 (mov,%A0,__tmp_reg__));
2112 }
2113
2114 *l = 2;
2115 return (AS2 (ldd,%A0,%A1) CR_TAB
2116 AS2 (ldd,%B0,%B1));
2117 }
2118 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2119 {
2120 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2121 fatal_insn ("incorrect insn:", insn);
2122
2123 if (mem_volatile_p)
2124 {
2125 if (REGNO (XEXP (base, 0)) == REG_X)
2126 {
2127 *l = 4;
2128 return (AS2 (sbiw,r26,2) CR_TAB
2129 AS2 (ld,%A0,X+) CR_TAB
2130 AS2 (ld,%B0,X) CR_TAB
2131 AS2 (sbiw,r26,1));
2132 }
2133 else
2134 {
2135 *l = 3;
2136 return (AS2 (sbiw,%r1,2) CR_TAB
2137 AS2 (ld,%A0,%p1) CR_TAB
2138 AS2 (ldd,%B0,%p1+1));
2139 }
2140 }
2141
2142 *l = 2;
2143 return (AS2 (ld,%B0,%1) CR_TAB
2144 AS2 (ld,%A0,%1));
2145 }
2146 else if (GET_CODE (base) == POST_INC) /* (R++) */
2147 {
2148 if (reg_overlap_mentioned_p (dest, XEXP (base, 0)))
2149 fatal_insn ("incorrect insn:", insn);
2150
2151 *l = 2;
2152 return (AS2 (ld,%A0,%1) CR_TAB
2153 AS2 (ld,%B0,%1));
2154 }
2155 else if (CONSTANT_ADDRESS_P (base))
2156 {
2157 if (optimize > 0 && io_address_operand (base, HImode))
2158 {
2159 *l = 2;
2160 return (AS2 (in,%A0,%A1-0x20) CR_TAB
2161 AS2 (in,%B0,%B1-0x20));
2162 }
2163 *l = 4;
2164 return (AS2 (lds,%A0,%A1) CR_TAB
2165 AS2 (lds,%B0,%B1));
2166 }
2167
2168 fatal_insn ("unknown move insn:",insn);
2169 return "";
2170 }
2171
2172 const char *
2173 out_movsi_r_mr (rtx insn, rtx op[], int *l)
2174 {
2175 rtx dest = op[0];
2176 rtx src = op[1];
2177 rtx base = XEXP (src, 0);
2178 int reg_dest = true_regnum (dest);
2179 int reg_base = true_regnum (base);
2180 int tmp;
2181
2182 if (!l)
2183 l = &tmp;
2184
2185 if (reg_base > 0)
2186 {
2187 if (reg_base == REG_X) /* (R26) */
2188 {
2189 if (reg_dest == REG_X)
2190 /* "ld r26,-X" is undefined */
2191 return *l=7, (AS2 (adiw,r26,3) CR_TAB
2192 AS2 (ld,r29,X) CR_TAB
2193 AS2 (ld,r28,-X) CR_TAB
2194 AS2 (ld,__tmp_reg__,-X) CR_TAB
2195 AS2 (sbiw,r26,1) CR_TAB
2196 AS2 (ld,r26,X) CR_TAB
2197 AS2 (mov,r27,__tmp_reg__));
2198 else if (reg_dest == REG_X - 2)
2199 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2200 AS2 (ld,%B0,X+) CR_TAB
2201 AS2 (ld,__tmp_reg__,X+) CR_TAB
2202 AS2 (ld,%D0,X) CR_TAB
2203 AS2 (mov,%C0,__tmp_reg__));
2204 else if (reg_unused_after (insn, base))
2205 return *l=4, (AS2 (ld,%A0,X+) CR_TAB
2206 AS2 (ld,%B0,X+) CR_TAB
2207 AS2 (ld,%C0,X+) CR_TAB
2208 AS2 (ld,%D0,X));
2209 else
2210 return *l=5, (AS2 (ld,%A0,X+) CR_TAB
2211 AS2 (ld,%B0,X+) CR_TAB
2212 AS2 (ld,%C0,X+) CR_TAB
2213 AS2 (ld,%D0,X) CR_TAB
2214 AS2 (sbiw,r26,3));
2215 }
2216 else
2217 {
2218 if (reg_dest == reg_base)
2219 return *l=5, (AS2 (ldd,%D0,%1+3) CR_TAB
2220 AS2 (ldd,%C0,%1+2) CR_TAB
2221 AS2 (ldd,__tmp_reg__,%1+1) CR_TAB
2222 AS2 (ld,%A0,%1) CR_TAB
2223 AS2 (mov,%B0,__tmp_reg__));
2224 else if (reg_base == reg_dest + 2)
2225 return *l=5, (AS2 (ld ,%A0,%1) CR_TAB
2226 AS2 (ldd,%B0,%1+1) CR_TAB
2227 AS2 (ldd,__tmp_reg__,%1+2) CR_TAB
2228 AS2 (ldd,%D0,%1+3) CR_TAB
2229 AS2 (mov,%C0,__tmp_reg__));
2230 else
2231 return *l=4, (AS2 (ld ,%A0,%1) CR_TAB
2232 AS2 (ldd,%B0,%1+1) CR_TAB
2233 AS2 (ldd,%C0,%1+2) CR_TAB
2234 AS2 (ldd,%D0,%1+3));
2235 }
2236 }
2237 else if (GET_CODE (base) == PLUS) /* (R + i) */
2238 {
2239 int disp = INTVAL (XEXP (base, 1));
2240
2241 if (disp > MAX_LD_OFFSET (GET_MODE (src)))
2242 {
2243 if (REGNO (XEXP (base, 0)) != REG_Y)
2244 fatal_insn ("incorrect insn:",insn);
2245
2246 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (src)))
2247 return *l = 6, (AS2 (adiw,r28,%o1-60) CR_TAB
2248 AS2 (ldd,%A0,Y+60) CR_TAB
2249 AS2 (ldd,%B0,Y+61) CR_TAB
2250 AS2 (ldd,%C0,Y+62) CR_TAB
2251 AS2 (ldd,%D0,Y+63) CR_TAB
2252 AS2 (sbiw,r28,%o1-60));
2253
2254 return *l = 8, (AS2 (subi,r28,lo8(-%o1)) CR_TAB
2255 AS2 (sbci,r29,hi8(-%o1)) CR_TAB
2256 AS2 (ld,%A0,Y) CR_TAB
2257 AS2 (ldd,%B0,Y+1) CR_TAB
2258 AS2 (ldd,%C0,Y+2) CR_TAB
2259 AS2 (ldd,%D0,Y+3) CR_TAB
2260 AS2 (subi,r28,lo8(%o1)) CR_TAB
2261 AS2 (sbci,r29,hi8(%o1)));
2262 }
2263
2264 reg_base = true_regnum (XEXP (base, 0));
2265 if (reg_base == REG_X)
2266 {
2267 /* R = (X + d) */
2268 if (reg_dest == REG_X)
2269 {
2270 *l = 7;
2271 /* "ld r26,-X" is undefined */
2272 return (AS2 (adiw,r26,%o1+3) CR_TAB
2273 AS2 (ld,r29,X) CR_TAB
2274 AS2 (ld,r28,-X) CR_TAB
2275 AS2 (ld,__tmp_reg__,-X) CR_TAB
2276 AS2 (sbiw,r26,1) CR_TAB
2277 AS2 (ld,r26,X) CR_TAB
2278 AS2 (mov,r27,__tmp_reg__));
2279 }
2280 *l = 6;
2281 if (reg_dest == REG_X - 2)
2282 return (AS2 (adiw,r26,%o1) CR_TAB
2283 AS2 (ld,r24,X+) CR_TAB
2284 AS2 (ld,r25,X+) CR_TAB
2285 AS2 (ld,__tmp_reg__,X+) CR_TAB
2286 AS2 (ld,r27,X) CR_TAB
2287 AS2 (mov,r26,__tmp_reg__));
2288
2289 return (AS2 (adiw,r26,%o1) CR_TAB
2290 AS2 (ld,%A0,X+) CR_TAB
2291 AS2 (ld,%B0,X+) CR_TAB
2292 AS2 (ld,%C0,X+) CR_TAB
2293 AS2 (ld,%D0,X) CR_TAB
2294 AS2 (sbiw,r26,%o1+3));
2295 }
2296 if (reg_dest == reg_base)
2297 return *l=5, (AS2 (ldd,%D0,%D1) CR_TAB
2298 AS2 (ldd,%C0,%C1) CR_TAB
2299 AS2 (ldd,__tmp_reg__,%B1) CR_TAB
2300 AS2 (ldd,%A0,%A1) CR_TAB
2301 AS2 (mov,%B0,__tmp_reg__));
2302 else if (reg_dest == reg_base - 2)
2303 return *l=5, (AS2 (ldd,%A0,%A1) CR_TAB
2304 AS2 (ldd,%B0,%B1) CR_TAB
2305 AS2 (ldd,__tmp_reg__,%C1) CR_TAB
2306 AS2 (ldd,%D0,%D1) CR_TAB
2307 AS2 (mov,%C0,__tmp_reg__));
2308 return *l=4, (AS2 (ldd,%A0,%A1) CR_TAB
2309 AS2 (ldd,%B0,%B1) CR_TAB
2310 AS2 (ldd,%C0,%C1) CR_TAB
2311 AS2 (ldd,%D0,%D1));
2312 }
2313 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2314 return *l=4, (AS2 (ld,%D0,%1) CR_TAB
2315 AS2 (ld,%C0,%1) CR_TAB
2316 AS2 (ld,%B0,%1) CR_TAB
2317 AS2 (ld,%A0,%1));
2318 else if (GET_CODE (base) == POST_INC) /* (R++) */
2319 return *l=4, (AS2 (ld,%A0,%1) CR_TAB
2320 AS2 (ld,%B0,%1) CR_TAB
2321 AS2 (ld,%C0,%1) CR_TAB
2322 AS2 (ld,%D0,%1));
2323 else if (CONSTANT_ADDRESS_P (base))
2324 return *l=8, (AS2 (lds,%A0,%A1) CR_TAB
2325 AS2 (lds,%B0,%B1) CR_TAB
2326 AS2 (lds,%C0,%C1) CR_TAB
2327 AS2 (lds,%D0,%D1));
2328
2329 fatal_insn ("unknown move insn:",insn);
2330 return "";
2331 }
2332
2333 const char *
2334 out_movsi_mr_r (rtx insn, rtx op[], int *l)
2335 {
2336 rtx dest = op[0];
2337 rtx src = op[1];
2338 rtx base = XEXP (dest, 0);
2339 int reg_base = true_regnum (base);
2340 int reg_src = true_regnum (src);
2341 int tmp;
2342
2343 if (!l)
2344 l = &tmp;
2345
2346 if (CONSTANT_ADDRESS_P (base))
2347 return *l=8,(AS2 (sts,%A0,%A1) CR_TAB
2348 AS2 (sts,%B0,%B1) CR_TAB
2349 AS2 (sts,%C0,%C1) CR_TAB
2350 AS2 (sts,%D0,%D1));
2351 if (reg_base > 0) /* (r) */
2352 {
2353 if (reg_base == REG_X) /* (R26) */
2354 {
2355 if (reg_src == REG_X)
2356 {
2357 /* "st X+,r26" is undefined */
2358 if (reg_unused_after (insn, base))
2359 return *l=6, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2360 AS2 (st,X,r26) CR_TAB
2361 AS2 (adiw,r26,1) CR_TAB
2362 AS2 (st,X+,__tmp_reg__) CR_TAB
2363 AS2 (st,X+,r28) CR_TAB
2364 AS2 (st,X,r29));
2365 else
2366 return *l=7, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2367 AS2 (st,X,r26) CR_TAB
2368 AS2 (adiw,r26,1) CR_TAB
2369 AS2 (st,X+,__tmp_reg__) CR_TAB
2370 AS2 (st,X+,r28) CR_TAB
2371 AS2 (st,X,r29) CR_TAB
2372 AS2 (sbiw,r26,3));
2373 }
2374 else if (reg_base == reg_src + 2)
2375 {
2376 if (reg_unused_after (insn, base))
2377 return *l=7, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2378 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2379 AS2 (st,%0+,%A1) CR_TAB
2380 AS2 (st,%0+,%B1) CR_TAB
2381 AS2 (st,%0+,__zero_reg__) CR_TAB
2382 AS2 (st,%0,__tmp_reg__) CR_TAB
2383 AS1 (clr,__zero_reg__));
2384 else
2385 return *l=8, (AS2 (mov,__zero_reg__,%C1) CR_TAB
2386 AS2 (mov,__tmp_reg__,%D1) CR_TAB
2387 AS2 (st,%0+,%A1) CR_TAB
2388 AS2 (st,%0+,%B1) CR_TAB
2389 AS2 (st,%0+,__zero_reg__) CR_TAB
2390 AS2 (st,%0,__tmp_reg__) CR_TAB
2391 AS1 (clr,__zero_reg__) CR_TAB
2392 AS2 (sbiw,r26,3));
2393 }
2394 return *l=5, (AS2 (st,%0+,%A1) CR_TAB
2395 AS2 (st,%0+,%B1) CR_TAB
2396 AS2 (st,%0+,%C1) CR_TAB
2397 AS2 (st,%0,%D1) CR_TAB
2398 AS2 (sbiw,r26,3));
2399 }
2400 else
2401 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2402 AS2 (std,%0+1,%B1) CR_TAB
2403 AS2 (std,%0+2,%C1) CR_TAB
2404 AS2 (std,%0+3,%D1));
2405 }
2406 else if (GET_CODE (base) == PLUS) /* (R + i) */
2407 {
2408 int disp = INTVAL (XEXP (base, 1));
2409 reg_base = REGNO (XEXP (base, 0));
2410 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2411 {
2412 if (reg_base != REG_Y)
2413 fatal_insn ("incorrect insn:",insn);
2414
2415 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2416 return *l = 6, (AS2 (adiw,r28,%o0-60) CR_TAB
2417 AS2 (std,Y+60,%A1) CR_TAB
2418 AS2 (std,Y+61,%B1) CR_TAB
2419 AS2 (std,Y+62,%C1) CR_TAB
2420 AS2 (std,Y+63,%D1) CR_TAB
2421 AS2 (sbiw,r28,%o0-60));
2422
2423 return *l = 8, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2424 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2425 AS2 (st,Y,%A1) CR_TAB
2426 AS2 (std,Y+1,%B1) CR_TAB
2427 AS2 (std,Y+2,%C1) CR_TAB
2428 AS2 (std,Y+3,%D1) CR_TAB
2429 AS2 (subi,r28,lo8(%o0)) CR_TAB
2430 AS2 (sbci,r29,hi8(%o0)));
2431 }
2432 if (reg_base == REG_X)
2433 {
2434 /* (X + d) = R */
2435 if (reg_src == REG_X)
2436 {
2437 *l = 9;
2438 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2439 AS2 (mov,__zero_reg__,r27) CR_TAB
2440 AS2 (adiw,r26,%o0) CR_TAB
2441 AS2 (st,X+,__tmp_reg__) CR_TAB
2442 AS2 (st,X+,__zero_reg__) CR_TAB
2443 AS2 (st,X+,r28) CR_TAB
2444 AS2 (st,X,r29) CR_TAB
2445 AS1 (clr,__zero_reg__) CR_TAB
2446 AS2 (sbiw,r26,%o0+3));
2447 }
2448 else if (reg_src == REG_X - 2)
2449 {
2450 *l = 9;
2451 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2452 AS2 (mov,__zero_reg__,r27) CR_TAB
2453 AS2 (adiw,r26,%o0) CR_TAB
2454 AS2 (st,X+,r24) CR_TAB
2455 AS2 (st,X+,r25) CR_TAB
2456 AS2 (st,X+,__tmp_reg__) CR_TAB
2457 AS2 (st,X,__zero_reg__) CR_TAB
2458 AS1 (clr,__zero_reg__) CR_TAB
2459 AS2 (sbiw,r26,%o0+3));
2460 }
2461 *l = 6;
2462 return (AS2 (adiw,r26,%o0) CR_TAB
2463 AS2 (st,X+,%A1) CR_TAB
2464 AS2 (st,X+,%B1) CR_TAB
2465 AS2 (st,X+,%C1) CR_TAB
2466 AS2 (st,X,%D1) CR_TAB
2467 AS2 (sbiw,r26,%o0+3));
2468 }
2469 return *l=4, (AS2 (std,%A0,%A1) CR_TAB
2470 AS2 (std,%B0,%B1) CR_TAB
2471 AS2 (std,%C0,%C1) CR_TAB
2472 AS2 (std,%D0,%D1));
2473 }
2474 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2475 return *l=4, (AS2 (st,%0,%D1) CR_TAB
2476 AS2 (st,%0,%C1) CR_TAB
2477 AS2 (st,%0,%B1) CR_TAB
2478 AS2 (st,%0,%A1));
2479 else if (GET_CODE (base) == POST_INC) /* (R++) */
2480 return *l=4, (AS2 (st,%0,%A1) CR_TAB
2481 AS2 (st,%0,%B1) CR_TAB
2482 AS2 (st,%0,%C1) CR_TAB
2483 AS2 (st,%0,%D1));
2484 fatal_insn ("unknown move insn:",insn);
2485 return "";
2486 }
2487
2488 const char *
2489 output_movsisf(rtx insn, rtx operands[], int *l)
2490 {
2491 int dummy;
2492 rtx dest = operands[0];
2493 rtx src = operands[1];
2494 int *real_l = l;
2495
2496 if (!l)
2497 l = &dummy;
2498
2499 if (register_operand (dest, VOIDmode))
2500 {
2501 if (register_operand (src, VOIDmode)) /* mov r,r */
2502 {
2503 if (true_regnum (dest) > true_regnum (src))
2504 {
2505 if (AVR_HAVE_MOVW)
2506 {
2507 *l = 2;
2508 return (AS2 (movw,%C0,%C1) CR_TAB
2509 AS2 (movw,%A0,%A1));
2510 }
2511 *l = 4;
2512 return (AS2 (mov,%D0,%D1) CR_TAB
2513 AS2 (mov,%C0,%C1) CR_TAB
2514 AS2 (mov,%B0,%B1) CR_TAB
2515 AS2 (mov,%A0,%A1));
2516 }
2517 else
2518 {
2519 if (AVR_HAVE_MOVW)
2520 {
2521 *l = 2;
2522 return (AS2 (movw,%A0,%A1) CR_TAB
2523 AS2 (movw,%C0,%C1));
2524 }
2525 *l = 4;
2526 return (AS2 (mov,%A0,%A1) CR_TAB
2527 AS2 (mov,%B0,%B1) CR_TAB
2528 AS2 (mov,%C0,%C1) CR_TAB
2529 AS2 (mov,%D0,%D1));
2530 }
2531 }
2532 else if (CONSTANT_P (src))
2533 {
2534 if (test_hard_reg_class (LD_REGS, dest)) /* ldi d,i */
2535 {
2536 *l = 4;
2537 return (AS2 (ldi,%A0,lo8(%1)) CR_TAB
2538 AS2 (ldi,%B0,hi8(%1)) CR_TAB
2539 AS2 (ldi,%C0,hlo8(%1)) CR_TAB
2540 AS2 (ldi,%D0,hhi8(%1)));
2541 }
2542
2543 if (GET_CODE (src) == CONST_INT)
2544 {
2545 const char *const clr_op0 =
2546 AVR_HAVE_MOVW ? (AS1 (clr,%A0) CR_TAB
2547 AS1 (clr,%B0) CR_TAB
2548 AS2 (movw,%C0,%A0))
2549 : (AS1 (clr,%A0) CR_TAB
2550 AS1 (clr,%B0) CR_TAB
2551 AS1 (clr,%C0) CR_TAB
2552 AS1 (clr,%D0));
2553
2554 if (src == const0_rtx) /* mov r,L */
2555 {
2556 *l = AVR_HAVE_MOVW ? 3 : 4;
2557 return clr_op0;
2558 }
2559 else if (src == const1_rtx)
2560 {
2561 if (!real_l)
2562 output_asm_insn (clr_op0, operands);
2563 *l = AVR_HAVE_MOVW ? 4 : 5;
2564 return AS1 (inc,%A0);
2565 }
2566 else if (src == constm1_rtx)
2567 {
2568 /* Immediate constants -1 to any register */
2569 if (AVR_HAVE_MOVW)
2570 {
2571 *l = 4;
2572 return (AS1 (clr,%A0) CR_TAB
2573 AS1 (dec,%A0) CR_TAB
2574 AS2 (mov,%B0,%A0) CR_TAB
2575 AS2 (movw,%C0,%A0));
2576 }
2577 *l = 5;
2578 return (AS1 (clr,%A0) CR_TAB
2579 AS1 (dec,%A0) CR_TAB
2580 AS2 (mov,%B0,%A0) CR_TAB
2581 AS2 (mov,%C0,%A0) CR_TAB
2582 AS2 (mov,%D0,%A0));
2583 }
2584 else
2585 {
2586 int bit_nr = exact_log2 (INTVAL (src));
2587
2588 if (bit_nr >= 0)
2589 {
2590 *l = AVR_HAVE_MOVW ? 5 : 6;
2591 if (!real_l)
2592 {
2593 output_asm_insn (clr_op0, operands);
2594 output_asm_insn ("set", operands);
2595 }
2596 if (!real_l)
2597 avr_output_bld (operands, bit_nr);
2598
2599 return "";
2600 }
2601 }
2602 }
2603
2604 /* Last resort, better than loading from memory. */
2605 *l = 10;
2606 return (AS2 (mov,__tmp_reg__,r31) CR_TAB
2607 AS2 (ldi,r31,lo8(%1)) CR_TAB
2608 AS2 (mov,%A0,r31) CR_TAB
2609 AS2 (ldi,r31,hi8(%1)) CR_TAB
2610 AS2 (mov,%B0,r31) CR_TAB
2611 AS2 (ldi,r31,hlo8(%1)) CR_TAB
2612 AS2 (mov,%C0,r31) CR_TAB
2613 AS2 (ldi,r31,hhi8(%1)) CR_TAB
2614 AS2 (mov,%D0,r31) CR_TAB
2615 AS2 (mov,r31,__tmp_reg__));
2616 }
2617 else if (GET_CODE (src) == MEM)
2618 return out_movsi_r_mr (insn, operands, real_l); /* mov r,m */
2619 }
2620 else if (GET_CODE (dest) == MEM)
2621 {
2622 const char *templ;
2623
2624 if (src == const0_rtx)
2625 operands[1] = zero_reg_rtx;
2626
2627 templ = out_movsi_mr_r (insn, operands, real_l);
2628
2629 if (!real_l)
2630 output_asm_insn (templ, operands);
2631
2632 operands[1] = src;
2633 return "";
2634 }
2635 fatal_insn ("invalid insn:", insn);
2636 return "";
2637 }
2638
2639 const char *
2640 out_movqi_mr_r (rtx insn, rtx op[], int *l)
2641 {
2642 rtx dest = op[0];
2643 rtx src = op[1];
2644 rtx x = XEXP (dest, 0);
2645 int dummy;
2646
2647 if (!l)
2648 l = &dummy;
2649
2650 if (CONSTANT_ADDRESS_P (x))
2651 {
2652 if (CONST_INT_P (x) && INTVAL (x) == SREG_ADDR)
2653 {
2654 *l = 1;
2655 return AS2 (out,__SREG__,%1);
2656 }
2657 if (optimize > 0 && io_address_operand (x, QImode))
2658 {
2659 *l = 1;
2660 return AS2 (out,%0-0x20,%1);
2661 }
2662 *l = 2;
2663 return AS2 (sts,%0,%1);
2664 }
2665 /* memory access by reg+disp */
2666 else if (GET_CODE (x) == PLUS
2667 && REG_P (XEXP (x,0))
2668 && GET_CODE (XEXP (x,1)) == CONST_INT)
2669 {
2670 if ((INTVAL (XEXP (x,1)) - GET_MODE_SIZE (GET_MODE (dest))) >= 63)
2671 {
2672 int disp = INTVAL (XEXP (x,1));
2673 if (REGNO (XEXP (x,0)) != REG_Y)
2674 fatal_insn ("incorrect insn:",insn);
2675
2676 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2677 return *l = 3, (AS2 (adiw,r28,%o0-63) CR_TAB
2678 AS2 (std,Y+63,%1) CR_TAB
2679 AS2 (sbiw,r28,%o0-63));
2680
2681 return *l = 5, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2682 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2683 AS2 (st,Y,%1) CR_TAB
2684 AS2 (subi,r28,lo8(%o0)) CR_TAB
2685 AS2 (sbci,r29,hi8(%o0)));
2686 }
2687 else if (REGNO (XEXP (x,0)) == REG_X)
2688 {
2689 if (reg_overlap_mentioned_p (src, XEXP (x, 0)))
2690 {
2691 if (reg_unused_after (insn, XEXP (x,0)))
2692 return *l = 3, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2693 AS2 (adiw,r26,%o0) CR_TAB
2694 AS2 (st,X,__tmp_reg__));
2695
2696 return *l = 4, (AS2 (mov,__tmp_reg__,%1) CR_TAB
2697 AS2 (adiw,r26,%o0) CR_TAB
2698 AS2 (st,X,__tmp_reg__) CR_TAB
2699 AS2 (sbiw,r26,%o0));
2700 }
2701 else
2702 {
2703 if (reg_unused_after (insn, XEXP (x,0)))
2704 return *l = 2, (AS2 (adiw,r26,%o0) CR_TAB
2705 AS2 (st,X,%1));
2706
2707 return *l = 3, (AS2 (adiw,r26,%o0) CR_TAB
2708 AS2 (st,X,%1) CR_TAB
2709 AS2 (sbiw,r26,%o0));
2710 }
2711 }
2712 *l = 1;
2713 return AS2 (std,%0,%1);
2714 }
2715 *l = 1;
2716 return AS2 (st,%0,%1);
2717 }
2718
2719 const char *
2720 out_movhi_mr_r (rtx insn, rtx op[], int *l)
2721 {
2722 rtx dest = op[0];
2723 rtx src = op[1];
2724 rtx base = XEXP (dest, 0);
2725 int reg_base = true_regnum (base);
2726 int reg_src = true_regnum (src);
2727 /* "volatile" forces writing high byte first, even if less efficient,
2728 for correct operation with 16-bit I/O registers. */
2729 int mem_volatile_p = MEM_VOLATILE_P (dest);
2730 int tmp;
2731
2732 if (!l)
2733 l = &tmp;
2734 if (CONSTANT_ADDRESS_P (base))
2735 {
2736 if (optimize > 0 && io_address_operand (base, HImode))
2737 {
2738 *l = 2;
2739 return (AS2 (out,%B0-0x20,%B1) CR_TAB
2740 AS2 (out,%A0-0x20,%A1));
2741 }
2742 return *l = 4, (AS2 (sts,%B0,%B1) CR_TAB
2743 AS2 (sts,%A0,%A1));
2744 }
2745 if (reg_base > 0)
2746 {
2747 if (reg_base == REG_X)
2748 {
2749 if (reg_src == REG_X)
2750 {
2751 /* "st X+,r26" and "st -X,r26" are undefined. */
2752 if (!mem_volatile_p && reg_unused_after (insn, src))
2753 return *l=4, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2754 AS2 (st,X,r26) CR_TAB
2755 AS2 (adiw,r26,1) CR_TAB
2756 AS2 (st,X,__tmp_reg__));
2757 else
2758 return *l=5, (AS2 (mov,__tmp_reg__,r27) CR_TAB
2759 AS2 (adiw,r26,1) CR_TAB
2760 AS2 (st,X,__tmp_reg__) CR_TAB
2761 AS2 (sbiw,r26,1) CR_TAB
2762 AS2 (st,X,r26));
2763 }
2764 else
2765 {
2766 if (!mem_volatile_p && reg_unused_after (insn, base))
2767 return *l=2, (AS2 (st,X+,%A1) CR_TAB
2768 AS2 (st,X,%B1));
2769 else
2770 return *l=3, (AS2 (adiw,r26,1) CR_TAB
2771 AS2 (st,X,%B1) CR_TAB
2772 AS2 (st,-X,%A1));
2773 }
2774 }
2775 else
2776 return *l=2, (AS2 (std,%0+1,%B1) CR_TAB
2777 AS2 (st,%0,%A1));
2778 }
2779 else if (GET_CODE (base) == PLUS)
2780 {
2781 int disp = INTVAL (XEXP (base, 1));
2782 reg_base = REGNO (XEXP (base, 0));
2783 if (disp > MAX_LD_OFFSET (GET_MODE (dest)))
2784 {
2785 if (reg_base != REG_Y)
2786 fatal_insn ("incorrect insn:",insn);
2787
2788 if (disp <= 63 + MAX_LD_OFFSET (GET_MODE (dest)))
2789 return *l = 4, (AS2 (adiw,r28,%o0-62) CR_TAB
2790 AS2 (std,Y+63,%B1) CR_TAB
2791 AS2 (std,Y+62,%A1) CR_TAB
2792 AS2 (sbiw,r28,%o0-62));
2793
2794 return *l = 6, (AS2 (subi,r28,lo8(-%o0)) CR_TAB
2795 AS2 (sbci,r29,hi8(-%o0)) CR_TAB
2796 AS2 (std,Y+1,%B1) CR_TAB
2797 AS2 (st,Y,%A1) CR_TAB
2798 AS2 (subi,r28,lo8(%o0)) CR_TAB
2799 AS2 (sbci,r29,hi8(%o0)));
2800 }
2801 if (reg_base == REG_X)
2802 {
2803 /* (X + d) = R */
2804 if (reg_src == REG_X)
2805 {
2806 *l = 7;
2807 return (AS2 (mov,__tmp_reg__,r26) CR_TAB
2808 AS2 (mov,__zero_reg__,r27) CR_TAB
2809 AS2 (adiw,r26,%o0+1) CR_TAB
2810 AS2 (st,X,__zero_reg__) CR_TAB
2811 AS2 (st,-X,__tmp_reg__) CR_TAB
2812 AS1 (clr,__zero_reg__) CR_TAB
2813 AS2 (sbiw,r26,%o0));
2814 }
2815 *l = 4;
2816 return (AS2 (adiw,r26,%o0+1) CR_TAB
2817 AS2 (st,X,%B1) CR_TAB
2818 AS2 (st,-X,%A1) CR_TAB
2819 AS2 (sbiw,r26,%o0));
2820 }
2821 return *l=2, (AS2 (std,%B0,%B1) CR_TAB
2822 AS2 (std,%A0,%A1));
2823 }
2824 else if (GET_CODE (base) == PRE_DEC) /* (--R) */
2825 return *l=2, (AS2 (st,%0,%B1) CR_TAB
2826 AS2 (st,%0,%A1));
2827 else if (GET_CODE (base) == POST_INC) /* (R++) */
2828 {
2829 if (mem_volatile_p)
2830 {
2831 if (REGNO (XEXP (base, 0)) == REG_X)
2832 {
2833 *l = 4;
2834 return (AS2 (adiw,r26,1) CR_TAB
2835 AS2 (st,X,%B1) CR_TAB
2836 AS2 (st,-X,%A1) CR_TAB
2837 AS2 (adiw,r26,2));
2838 }
2839 else
2840 {
2841 *l = 3;
2842 return (AS2 (std,%p0+1,%B1) CR_TAB
2843 AS2 (st,%p0,%A1) CR_TAB
2844 AS2 (adiw,%r0,2));
2845 }
2846 }
2847
2848 *l = 2;
2849 return (AS2 (st,%0,%A1) CR_TAB
2850 AS2 (st,%0,%B1));
2851 }
2852 fatal_insn ("unknown move insn:",insn);
2853 return "";
2854 }
2855
2856 /* Return 1 if frame pointer for current function required. */
2857
2858 int
2859 frame_pointer_required_p (void)
2860 {
2861 return (cfun->calls_alloca
2862 || crtl->args.info.nregs == 0
2863 || get_frame_size () > 0);
2864 }
2865
2866 /* Returns the condition of compare insn INSN, or UNKNOWN. */
2867
2868 static RTX_CODE
2869 compare_condition (rtx insn)
2870 {
2871 rtx next = next_real_insn (insn);
2872 RTX_CODE cond = UNKNOWN;
2873 if (next && GET_CODE (next) == JUMP_INSN)
2874 {
2875 rtx pat = PATTERN (next);
2876 rtx src = SET_SRC (pat);
2877 rtx t = XEXP (src, 0);
2878 cond = GET_CODE (t);
2879 }
2880 return cond;
2881 }
2882
2883 /* Returns nonzero if INSN is a tst insn that only tests the sign. */
2884
2885 static int
2886 compare_sign_p (rtx insn)
2887 {
2888 RTX_CODE cond = compare_condition (insn);
2889 return (cond == GE || cond == LT);
2890 }
2891
2892 /* Returns nonzero if the next insn is a JUMP_INSN with a condition
2893 that needs to be swapped (GT, GTU, LE, LEU). */
2894
2895 int
2896 compare_diff_p (rtx insn)
2897 {
2898 RTX_CODE cond = compare_condition (insn);
2899 return (cond == GT || cond == GTU || cond == LE || cond == LEU) ? cond : 0;
2900 }
2901
2902 /* Returns nonzero if INSN is a compare insn with the EQ or NE condition. */
2903
2904 int
2905 compare_eq_p (rtx insn)
2906 {
2907 RTX_CODE cond = compare_condition (insn);
2908 return (cond == EQ || cond == NE);
2909 }
2910
2911
2912 /* Output test instruction for HImode. */
2913
2914 const char *
2915 out_tsthi (rtx insn, int *l)
2916 {
2917 if (compare_sign_p (insn))
2918 {
2919 if (l) *l = 1;
2920 return AS1 (tst,%B0);
2921 }
2922 if (reg_unused_after (insn, SET_SRC (PATTERN (insn)))
2923 && compare_eq_p (insn))
2924 {
2925 /* Faster than sbiw if we can clobber the operand. */
2926 if (l) *l = 1;
2927 return AS2 (or,%A0,%B0);
2928 }
2929 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2930 {
2931 if (l) *l = 1;
2932 return AS2 (sbiw,%0,0);
2933 }
2934 if (l) *l = 2;
2935 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2936 AS2 (cpc,%B0,__zero_reg__));
2937 }
2938
2939
2940 /* Output test instruction for SImode. */
2941
2942 const char *
2943 out_tstsi (rtx insn, int *l)
2944 {
2945 if (compare_sign_p (insn))
2946 {
2947 if (l) *l = 1;
2948 return AS1 (tst,%D0);
2949 }
2950 if (test_hard_reg_class (ADDW_REGS, SET_SRC (PATTERN (insn))))
2951 {
2952 if (l) *l = 3;
2953 return (AS2 (sbiw,%A0,0) CR_TAB
2954 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2955 AS2 (cpc,%D0,__zero_reg__));
2956 }
2957 if (l) *l = 4;
2958 return (AS2 (cp,%A0,__zero_reg__) CR_TAB
2959 AS2 (cpc,%B0,__zero_reg__) CR_TAB
2960 AS2 (cpc,%C0,__zero_reg__) CR_TAB
2961 AS2 (cpc,%D0,__zero_reg__));
2962 }
2963
2964
2965 /* Generate asm equivalent for various shifts.
2966 Shift count is a CONST_INT, MEM or REG.
2967 This only handles cases that are not already
2968 carefully hand-optimized in ?sh??i3_out. */
2969
2970 void
2971 out_shift_with_cnt (const char *templ, rtx insn, rtx operands[],
2972 int *len, int t_len)
2973 {
2974 rtx op[10];
2975 char str[500];
2976 int second_label = 1;
2977 int saved_in_tmp = 0;
2978 int use_zero_reg = 0;
2979
2980 op[0] = operands[0];
2981 op[1] = operands[1];
2982 op[2] = operands[2];
2983 op[3] = operands[3];
2984 str[0] = 0;
2985
2986 if (len)
2987 *len = 1;
2988
2989 if (GET_CODE (operands[2]) == CONST_INT)
2990 {
2991 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
2992 int count = INTVAL (operands[2]);
2993 int max_len = 10; /* If larger than this, always use a loop. */
2994
2995 if (count <= 0)
2996 {
2997 if (len)
2998 *len = 0;
2999 return;
3000 }
3001
3002 if (count < 8 && !scratch)
3003 use_zero_reg = 1;
3004
3005 if (optimize_size)
3006 max_len = t_len + (scratch ? 3 : (use_zero_reg ? 4 : 5));
3007
3008 if (t_len * count <= max_len)
3009 {
3010 /* Output shifts inline with no loop - faster. */
3011 if (len)
3012 *len = t_len * count;
3013 else
3014 {
3015 while (count-- > 0)
3016 output_asm_insn (templ, op);
3017 }
3018
3019 return;
3020 }
3021
3022 if (scratch)
3023 {
3024 if (!len)
3025 strcat (str, AS2 (ldi,%3,%2));
3026 }
3027 else if (use_zero_reg)
3028 {
3029 /* Hack to save one word: use __zero_reg__ as loop counter.
3030 Set one bit, then shift in a loop until it is 0 again. */
3031
3032 op[3] = zero_reg_rtx;
3033 if (len)
3034 *len = 2;
3035 else
3036 strcat (str, ("set" CR_TAB
3037 AS2 (bld,%3,%2-1)));
3038 }
3039 else
3040 {
3041 /* No scratch register available, use one from LD_REGS (saved in
3042 __tmp_reg__) that doesn't overlap with registers to shift. */
3043
3044 op[3] = gen_rtx_REG (QImode,
3045 ((true_regnum (operands[0]) - 1) & 15) + 16);
3046 op[4] = tmp_reg_rtx;
3047 saved_in_tmp = 1;
3048
3049 if (len)
3050 *len = 3; /* Includes "mov %3,%4" after the loop. */
3051 else
3052 strcat (str, (AS2 (mov,%4,%3) CR_TAB
3053 AS2 (ldi,%3,%2)));
3054 }
3055
3056 second_label = 0;
3057 }
3058 else if (GET_CODE (operands[2]) == MEM)
3059 {
3060 rtx op_mov[10];
3061
3062 op[3] = op_mov[0] = tmp_reg_rtx;
3063 op_mov[1] = op[2];
3064
3065 if (len)
3066 out_movqi_r_mr (insn, op_mov, len);
3067 else
3068 output_asm_insn (out_movqi_r_mr (insn, op_mov, NULL), op_mov);
3069 }
3070 else if (register_operand (operands[2], QImode))
3071 {
3072 if (reg_unused_after (insn, operands[2]))
3073 op[3] = op[2];
3074 else
3075 {
3076 op[3] = tmp_reg_rtx;
3077 if (!len)
3078 strcat (str, (AS2 (mov,%3,%2) CR_TAB));
3079 }
3080 }
3081 else
3082 fatal_insn ("bad shift insn:", insn);
3083
3084 if (second_label)
3085 {
3086 if (len)
3087 ++*len;
3088 else
3089 strcat (str, AS1 (rjmp,2f));
3090 }
3091
3092 if (len)
3093 *len += t_len + 2; /* template + dec + brXX */
3094 else
3095 {
3096 strcat (str, "\n1:\t");
3097 strcat (str, templ);
3098 strcat (str, second_label ? "\n2:\t" : "\n\t");
3099 strcat (str, use_zero_reg ? AS1 (lsr,%3) : AS1 (dec,%3));
3100 strcat (str, CR_TAB);
3101 strcat (str, second_label ? AS1 (brpl,1b) : AS1 (brne,1b));
3102 if (saved_in_tmp)
3103 strcat (str, (CR_TAB AS2 (mov,%3,%4)));
3104 output_asm_insn (str, op);
3105 }
3106 }
3107
3108
3109 /* 8bit shift left ((char)x << i) */
3110
3111 const char *
3112 ashlqi3_out (rtx insn, rtx operands[], int *len)
3113 {
3114 if (GET_CODE (operands[2]) == CONST_INT)
3115 {
3116 int k;
3117
3118 if (!len)
3119 len = &k;
3120
3121 switch (INTVAL (operands[2]))
3122 {
3123 default:
3124 if (INTVAL (operands[2]) < 8)
3125 break;
3126
3127 *len = 1;
3128 return AS1 (clr,%0);
3129
3130 case 1:
3131 *len = 1;
3132 return AS1 (lsl,%0);
3133
3134 case 2:
3135 *len = 2;
3136 return (AS1 (lsl,%0) CR_TAB
3137 AS1 (lsl,%0));
3138
3139 case 3:
3140 *len = 3;
3141 return (AS1 (lsl,%0) CR_TAB
3142 AS1 (lsl,%0) CR_TAB
3143 AS1 (lsl,%0));
3144
3145 case 4:
3146 if (test_hard_reg_class (LD_REGS, operands[0]))
3147 {
3148 *len = 2;
3149 return (AS1 (swap,%0) CR_TAB
3150 AS2 (andi,%0,0xf0));
3151 }
3152 *len = 4;
3153 return (AS1 (lsl,%0) CR_TAB
3154 AS1 (lsl,%0) CR_TAB
3155 AS1 (lsl,%0) CR_TAB
3156 AS1 (lsl,%0));
3157
3158 case 5:
3159 if (test_hard_reg_class (LD_REGS, operands[0]))
3160 {
3161 *len = 3;
3162 return (AS1 (swap,%0) CR_TAB
3163 AS1 (lsl,%0) CR_TAB
3164 AS2 (andi,%0,0xe0));
3165 }
3166 *len = 5;
3167 return (AS1 (lsl,%0) CR_TAB
3168 AS1 (lsl,%0) CR_TAB
3169 AS1 (lsl,%0) CR_TAB
3170 AS1 (lsl,%0) CR_TAB
3171 AS1 (lsl,%0));
3172
3173 case 6:
3174 if (test_hard_reg_class (LD_REGS, operands[0]))
3175 {
3176 *len = 4;
3177 return (AS1 (swap,%0) CR_TAB
3178 AS1 (lsl,%0) CR_TAB
3179 AS1 (lsl,%0) CR_TAB
3180 AS2 (andi,%0,0xc0));
3181 }
3182 *len = 6;
3183 return (AS1 (lsl,%0) CR_TAB
3184 AS1 (lsl,%0) CR_TAB
3185 AS1 (lsl,%0) CR_TAB
3186 AS1 (lsl,%0) CR_TAB
3187 AS1 (lsl,%0) CR_TAB
3188 AS1 (lsl,%0));
3189
3190 case 7:
3191 *len = 3;
3192 return (AS1 (ror,%0) CR_TAB
3193 AS1 (clr,%0) CR_TAB
3194 AS1 (ror,%0));
3195 }
3196 }
3197 else if (CONSTANT_P (operands[2]))
3198 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3199
3200 out_shift_with_cnt (AS1 (lsl,%0),
3201 insn, operands, len, 1);
3202 return "";
3203 }
3204
3205
3206 /* 16bit shift left ((short)x << i) */
3207
3208 const char *
3209 ashlhi3_out (rtx insn, rtx operands[], int *len)
3210 {
3211 if (GET_CODE (operands[2]) == CONST_INT)
3212 {
3213 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3214 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3215 int k;
3216 int *t = len;
3217
3218 if (!len)
3219 len = &k;
3220
3221 switch (INTVAL (operands[2]))
3222 {
3223 default:
3224 if (INTVAL (operands[2]) < 16)
3225 break;
3226
3227 *len = 2;
3228 return (AS1 (clr,%B0) CR_TAB
3229 AS1 (clr,%A0));
3230
3231 case 4:
3232 if (optimize_size && scratch)
3233 break; /* 5 */
3234 if (ldi_ok)
3235 {
3236 *len = 6;
3237 return (AS1 (swap,%A0) CR_TAB
3238 AS1 (swap,%B0) CR_TAB
3239 AS2 (andi,%B0,0xf0) CR_TAB
3240 AS2 (eor,%B0,%A0) CR_TAB
3241 AS2 (andi,%A0,0xf0) CR_TAB
3242 AS2 (eor,%B0,%A0));
3243 }
3244 if (scratch)
3245 {
3246 *len = 7;
3247 return (AS1 (swap,%A0) CR_TAB
3248 AS1 (swap,%B0) CR_TAB
3249 AS2 (ldi,%3,0xf0) CR_TAB
3250 AS2 (and,%B0,%3) CR_TAB
3251 AS2 (eor,%B0,%A0) CR_TAB
3252 AS2 (and,%A0,%3) CR_TAB
3253 AS2 (eor,%B0,%A0));
3254 }
3255 break; /* optimize_size ? 6 : 8 */
3256
3257 case 5:
3258 if (optimize_size)
3259 break; /* scratch ? 5 : 6 */
3260 if (ldi_ok)
3261 {
3262 *len = 8;
3263 return (AS1 (lsl,%A0) CR_TAB
3264 AS1 (rol,%B0) CR_TAB
3265 AS1 (swap,%A0) CR_TAB
3266 AS1 (swap,%B0) CR_TAB
3267 AS2 (andi,%B0,0xf0) CR_TAB
3268 AS2 (eor,%B0,%A0) CR_TAB
3269 AS2 (andi,%A0,0xf0) CR_TAB
3270 AS2 (eor,%B0,%A0));
3271 }
3272 if (scratch)
3273 {
3274 *len = 9;
3275 return (AS1 (lsl,%A0) CR_TAB
3276 AS1 (rol,%B0) CR_TAB
3277 AS1 (swap,%A0) CR_TAB
3278 AS1 (swap,%B0) CR_TAB
3279 AS2 (ldi,%3,0xf0) CR_TAB
3280 AS2 (and,%B0,%3) CR_TAB
3281 AS2 (eor,%B0,%A0) CR_TAB
3282 AS2 (and,%A0,%3) CR_TAB
3283 AS2 (eor,%B0,%A0));
3284 }
3285 break; /* 10 */
3286
3287 case 6:
3288 if (optimize_size)
3289 break; /* scratch ? 5 : 6 */
3290 *len = 9;
3291 return (AS1 (clr,__tmp_reg__) CR_TAB
3292 AS1 (lsr,%B0) CR_TAB
3293 AS1 (ror,%A0) CR_TAB
3294 AS1 (ror,__tmp_reg__) CR_TAB
3295 AS1 (lsr,%B0) CR_TAB
3296 AS1 (ror,%A0) CR_TAB
3297 AS1 (ror,__tmp_reg__) CR_TAB
3298 AS2 (mov,%B0,%A0) CR_TAB
3299 AS2 (mov,%A0,__tmp_reg__));
3300
3301 case 7:
3302 *len = 5;
3303 return (AS1 (lsr,%B0) CR_TAB
3304 AS2 (mov,%B0,%A0) CR_TAB
3305 AS1 (clr,%A0) CR_TAB
3306 AS1 (ror,%B0) CR_TAB
3307 AS1 (ror,%A0));
3308
3309 case 8:
3310 return *len = 2, (AS2 (mov,%B0,%A1) CR_TAB
3311 AS1 (clr,%A0));
3312
3313 case 9:
3314 *len = 3;
3315 return (AS2 (mov,%B0,%A0) CR_TAB
3316 AS1 (clr,%A0) CR_TAB
3317 AS1 (lsl,%B0));
3318
3319 case 10:
3320 *len = 4;
3321 return (AS2 (mov,%B0,%A0) CR_TAB
3322 AS1 (clr,%A0) CR_TAB
3323 AS1 (lsl,%B0) CR_TAB
3324 AS1 (lsl,%B0));
3325
3326 case 11:
3327 *len = 5;
3328 return (AS2 (mov,%B0,%A0) CR_TAB
3329 AS1 (clr,%A0) CR_TAB
3330 AS1 (lsl,%B0) CR_TAB
3331 AS1 (lsl,%B0) CR_TAB
3332 AS1 (lsl,%B0));
3333
3334 case 12:
3335 if (ldi_ok)
3336 {
3337 *len = 4;
3338 return (AS2 (mov,%B0,%A0) CR_TAB
3339 AS1 (clr,%A0) CR_TAB
3340 AS1 (swap,%B0) CR_TAB
3341 AS2 (andi,%B0,0xf0));
3342 }
3343 if (scratch)
3344 {
3345 *len = 5;
3346 return (AS2 (mov,%B0,%A0) CR_TAB
3347 AS1 (clr,%A0) CR_TAB
3348 AS1 (swap,%B0) CR_TAB
3349 AS2 (ldi,%3,0xf0) CR_TAB
3350 AS2 (and,%B0,%3));
3351 }
3352 *len = 6;
3353 return (AS2 (mov,%B0,%A0) CR_TAB
3354 AS1 (clr,%A0) CR_TAB
3355 AS1 (lsl,%B0) CR_TAB
3356 AS1 (lsl,%B0) CR_TAB
3357 AS1 (lsl,%B0) CR_TAB
3358 AS1 (lsl,%B0));
3359
3360 case 13:
3361 if (ldi_ok)
3362 {
3363 *len = 5;
3364 return (AS2 (mov,%B0,%A0) CR_TAB
3365 AS1 (clr,%A0) CR_TAB
3366 AS1 (swap,%B0) CR_TAB
3367 AS1 (lsl,%B0) CR_TAB
3368 AS2 (andi,%B0,0xe0));
3369 }
3370 if (AVR_HAVE_MUL && scratch)
3371 {
3372 *len = 5;
3373 return (AS2 (ldi,%3,0x20) CR_TAB
3374 AS2 (mul,%A0,%3) CR_TAB
3375 AS2 (mov,%B0,r0) CR_TAB
3376 AS1 (clr,%A0) CR_TAB
3377 AS1 (clr,__zero_reg__));
3378 }
3379 if (optimize_size && scratch)
3380 break; /* 5 */
3381 if (scratch)
3382 {
3383 *len = 6;
3384 return (AS2 (mov,%B0,%A0) CR_TAB
3385 AS1 (clr,%A0) CR_TAB
3386 AS1 (swap,%B0) CR_TAB
3387 AS1 (lsl,%B0) CR_TAB
3388 AS2 (ldi,%3,0xe0) CR_TAB
3389 AS2 (and,%B0,%3));
3390 }
3391 if (AVR_HAVE_MUL)
3392 {
3393 *len = 6;
3394 return ("set" CR_TAB
3395 AS2 (bld,r1,5) CR_TAB
3396 AS2 (mul,%A0,r1) CR_TAB
3397 AS2 (mov,%B0,r0) CR_TAB
3398 AS1 (clr,%A0) CR_TAB
3399 AS1 (clr,__zero_reg__));
3400 }
3401 *len = 7;
3402 return (AS2 (mov,%B0,%A0) CR_TAB
3403 AS1 (clr,%A0) CR_TAB
3404 AS1 (lsl,%B0) CR_TAB
3405 AS1 (lsl,%B0) CR_TAB
3406 AS1 (lsl,%B0) CR_TAB
3407 AS1 (lsl,%B0) CR_TAB
3408 AS1 (lsl,%B0));
3409
3410 case 14:
3411 if (AVR_HAVE_MUL && ldi_ok)
3412 {
3413 *len = 5;
3414 return (AS2 (ldi,%B0,0x40) CR_TAB
3415 AS2 (mul,%A0,%B0) CR_TAB
3416 AS2 (mov,%B0,r0) CR_TAB
3417 AS1 (clr,%A0) CR_TAB
3418 AS1 (clr,__zero_reg__));
3419 }
3420 if (AVR_HAVE_MUL && scratch)
3421 {
3422 *len = 5;
3423 return (AS2 (ldi,%3,0x40) CR_TAB
3424 AS2 (mul,%A0,%3) CR_TAB
3425 AS2 (mov,%B0,r0) CR_TAB
3426 AS1 (clr,%A0) CR_TAB
3427 AS1 (clr,__zero_reg__));
3428 }
3429 if (optimize_size && ldi_ok)
3430 {
3431 *len = 5;
3432 return (AS2 (mov,%B0,%A0) CR_TAB
3433 AS2 (ldi,%A0,6) "\n1:\t"
3434 AS1 (lsl,%B0) CR_TAB
3435 AS1 (dec,%A0) CR_TAB
3436 AS1 (brne,1b));
3437 }
3438 if (optimize_size && scratch)
3439 break; /* 5 */
3440 *len = 6;
3441 return (AS1 (clr,%B0) CR_TAB
3442 AS1 (lsr,%A0) CR_TAB
3443 AS1 (ror,%B0) CR_TAB
3444 AS1 (lsr,%A0) CR_TAB
3445 AS1 (ror,%B0) CR_TAB
3446 AS1 (clr,%A0));
3447
3448 case 15:
3449 *len = 4;
3450 return (AS1 (clr,%B0) CR_TAB
3451 AS1 (lsr,%A0) CR_TAB
3452 AS1 (ror,%B0) CR_TAB
3453 AS1 (clr,%A0));
3454 }
3455 len = t;
3456 }
3457 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3458 AS1 (rol,%B0)),
3459 insn, operands, len, 2);
3460 return "";
3461 }
3462
3463
3464 /* 32bit shift left ((long)x << i) */
3465
3466 const char *
3467 ashlsi3_out (rtx insn, rtx operands[], int *len)
3468 {
3469 if (GET_CODE (operands[2]) == CONST_INT)
3470 {
3471 int k;
3472 int *t = len;
3473
3474 if (!len)
3475 len = &k;
3476
3477 switch (INTVAL (operands[2]))
3478 {
3479 default:
3480 if (INTVAL (operands[2]) < 32)
3481 break;
3482
3483 if (AVR_HAVE_MOVW)
3484 return *len = 3, (AS1 (clr,%D0) CR_TAB
3485 AS1 (clr,%C0) CR_TAB
3486 AS2 (movw,%A0,%C0));
3487 *len = 4;
3488 return (AS1 (clr,%D0) CR_TAB
3489 AS1 (clr,%C0) CR_TAB
3490 AS1 (clr,%B0) CR_TAB
3491 AS1 (clr,%A0));
3492
3493 case 8:
3494 {
3495 int reg0 = true_regnum (operands[0]);
3496 int reg1 = true_regnum (operands[1]);
3497 *len = 4;
3498 if (reg0 >= reg1)
3499 return (AS2 (mov,%D0,%C1) CR_TAB
3500 AS2 (mov,%C0,%B1) CR_TAB
3501 AS2 (mov,%B0,%A1) CR_TAB
3502 AS1 (clr,%A0));
3503 else
3504 return (AS1 (clr,%A0) CR_TAB
3505 AS2 (mov,%B0,%A1) CR_TAB
3506 AS2 (mov,%C0,%B1) CR_TAB
3507 AS2 (mov,%D0,%C1));
3508 }
3509
3510 case 16:
3511 {
3512 int reg0 = true_regnum (operands[0]);
3513 int reg1 = true_regnum (operands[1]);
3514 if (reg0 + 2 == reg1)
3515 return *len = 2, (AS1 (clr,%B0) CR_TAB
3516 AS1 (clr,%A0));
3517 if (AVR_HAVE_MOVW)
3518 return *len = 3, (AS2 (movw,%C0,%A1) CR_TAB
3519 AS1 (clr,%B0) CR_TAB
3520 AS1 (clr,%A0));
3521 else
3522 return *len = 4, (AS2 (mov,%C0,%A1) CR_TAB
3523 AS2 (mov,%D0,%B1) CR_TAB
3524 AS1 (clr,%B0) CR_TAB
3525 AS1 (clr,%A0));
3526 }
3527
3528 case 24:
3529 *len = 4;
3530 return (AS2 (mov,%D0,%A1) CR_TAB
3531 AS1 (clr,%C0) CR_TAB
3532 AS1 (clr,%B0) CR_TAB
3533 AS1 (clr,%A0));
3534
3535 case 31:
3536 *len = 6;
3537 return (AS1 (clr,%D0) CR_TAB
3538 AS1 (lsr,%A0) CR_TAB
3539 AS1 (ror,%D0) CR_TAB
3540 AS1 (clr,%C0) CR_TAB
3541 AS1 (clr,%B0) CR_TAB
3542 AS1 (clr,%A0));
3543 }
3544 len = t;
3545 }
3546 out_shift_with_cnt ((AS1 (lsl,%A0) CR_TAB
3547 AS1 (rol,%B0) CR_TAB
3548 AS1 (rol,%C0) CR_TAB
3549 AS1 (rol,%D0)),
3550 insn, operands, len, 4);
3551 return "";
3552 }
3553
3554 /* 8bit arithmetic shift right ((signed char)x >> i) */
3555
3556 const char *
3557 ashrqi3_out (rtx insn, rtx operands[], int *len)
3558 {
3559 if (GET_CODE (operands[2]) == CONST_INT)
3560 {
3561 int k;
3562
3563 if (!len)
3564 len = &k;
3565
3566 switch (INTVAL (operands[2]))
3567 {
3568 case 1:
3569 *len = 1;
3570 return AS1 (asr,%0);
3571
3572 case 2:
3573 *len = 2;
3574 return (AS1 (asr,%0) CR_TAB
3575 AS1 (asr,%0));
3576
3577 case 3:
3578 *len = 3;
3579 return (AS1 (asr,%0) CR_TAB
3580 AS1 (asr,%0) CR_TAB
3581 AS1 (asr,%0));
3582
3583 case 4:
3584 *len = 4;
3585 return (AS1 (asr,%0) CR_TAB
3586 AS1 (asr,%0) CR_TAB
3587 AS1 (asr,%0) CR_TAB
3588 AS1 (asr,%0));
3589
3590 case 5:
3591 *len = 5;
3592 return (AS1 (asr,%0) CR_TAB
3593 AS1 (asr,%0) CR_TAB
3594 AS1 (asr,%0) CR_TAB
3595 AS1 (asr,%0) CR_TAB
3596 AS1 (asr,%0));
3597
3598 case 6:
3599 *len = 4;
3600 return (AS2 (bst,%0,6) CR_TAB
3601 AS1 (lsl,%0) CR_TAB
3602 AS2 (sbc,%0,%0) CR_TAB
3603 AS2 (bld,%0,0));
3604
3605 default:
3606 if (INTVAL (operands[2]) < 8)
3607 break;
3608
3609 /* fall through */
3610
3611 case 7:
3612 *len = 2;
3613 return (AS1 (lsl,%0) CR_TAB
3614 AS2 (sbc,%0,%0));
3615 }
3616 }
3617 else if (CONSTANT_P (operands[2]))
3618 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3619
3620 out_shift_with_cnt (AS1 (asr,%0),
3621 insn, operands, len, 1);
3622 return "";
3623 }
3624
3625
3626 /* 16bit arithmetic shift right ((signed short)x >> i) */
3627
3628 const char *
3629 ashrhi3_out (rtx insn, rtx operands[], int *len)
3630 {
3631 if (GET_CODE (operands[2]) == CONST_INT)
3632 {
3633 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3634 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3635 int k;
3636 int *t = len;
3637
3638 if (!len)
3639 len = &k;
3640
3641 switch (INTVAL (operands[2]))
3642 {
3643 case 4:
3644 case 5:
3645 /* XXX try to optimize this too? */
3646 break;
3647
3648 case 6:
3649 if (optimize_size)
3650 break; /* scratch ? 5 : 6 */
3651 *len = 8;
3652 return (AS2 (mov,__tmp_reg__,%A0) CR_TAB
3653 AS2 (mov,%A0,%B0) CR_TAB
3654 AS1 (lsl,__tmp_reg__) CR_TAB
3655 AS1 (rol,%A0) CR_TAB
3656 AS2 (sbc,%B0,%B0) CR_TAB
3657 AS1 (lsl,__tmp_reg__) CR_TAB
3658 AS1 (rol,%A0) CR_TAB
3659 AS1 (rol,%B0));
3660
3661 case 7:
3662 *len = 4;
3663 return (AS1 (lsl,%A0) CR_TAB
3664 AS2 (mov,%A0,%B0) CR_TAB
3665 AS1 (rol,%A0) CR_TAB
3666 AS2 (sbc,%B0,%B0));
3667
3668 case 8:
3669 {
3670 int reg0 = true_regnum (operands[0]);
3671 int reg1 = true_regnum (operands[1]);
3672
3673 if (reg0 == reg1)
3674 return *len = 3, (AS2 (mov,%A0,%B0) CR_TAB
3675 AS1 (lsl,%B0) CR_TAB
3676 AS2 (sbc,%B0,%B0));
3677 else
3678 return *len = 4, (AS2 (mov,%A0,%B1) CR_TAB
3679 AS1 (clr,%B0) CR_TAB
3680 AS2 (sbrc,%A0,7) CR_TAB
3681 AS1 (dec,%B0));
3682 }
3683
3684 case 9:
3685 *len = 4;
3686 return (AS2 (mov,%A0,%B0) CR_TAB
3687 AS1 (lsl,%B0) CR_TAB
3688 AS2 (sbc,%B0,%B0) CR_TAB
3689 AS1 (asr,%A0));
3690
3691 case 10:
3692 *len = 5;
3693 return (AS2 (mov,%A0,%B0) CR_TAB
3694 AS1 (lsl,%B0) CR_TAB
3695 AS2 (sbc,%B0,%B0) CR_TAB
3696 AS1 (asr,%A0) CR_TAB
3697 AS1 (asr,%A0));
3698
3699 case 11:
3700 if (AVR_HAVE_MUL && ldi_ok)
3701 {
3702 *len = 5;
3703 return (AS2 (ldi,%A0,0x20) CR_TAB
3704 AS2 (muls,%B0,%A0) CR_TAB
3705 AS2 (mov,%A0,r1) CR_TAB
3706 AS2 (sbc,%B0,%B0) CR_TAB
3707 AS1 (clr,__zero_reg__));
3708 }
3709 if (optimize_size && scratch)
3710 break; /* 5 */
3711 *len = 6;
3712 return (AS2 (mov,%A0,%B0) CR_TAB
3713 AS1 (lsl,%B0) CR_TAB
3714 AS2 (sbc,%B0,%B0) CR_TAB
3715 AS1 (asr,%A0) CR_TAB
3716 AS1 (asr,%A0) CR_TAB
3717 AS1 (asr,%A0));
3718
3719 case 12:
3720 if (AVR_HAVE_MUL && ldi_ok)
3721 {
3722 *len = 5;
3723 return (AS2 (ldi,%A0,0x10) CR_TAB
3724 AS2 (muls,%B0,%A0) CR_TAB
3725 AS2 (mov,%A0,r1) CR_TAB
3726 AS2 (sbc,%B0,%B0) CR_TAB
3727 AS1 (clr,__zero_reg__));
3728 }
3729 if (optimize_size && scratch)
3730 break; /* 5 */
3731 *len = 7;
3732 return (AS2 (mov,%A0,%B0) CR_TAB
3733 AS1 (lsl,%B0) CR_TAB
3734 AS2 (sbc,%B0,%B0) CR_TAB
3735 AS1 (asr,%A0) CR_TAB
3736 AS1 (asr,%A0) CR_TAB
3737 AS1 (asr,%A0) CR_TAB
3738 AS1 (asr,%A0));
3739
3740 case 13:
3741 if (AVR_HAVE_MUL && ldi_ok)
3742 {
3743 *len = 5;
3744 return (AS2 (ldi,%A0,0x08) CR_TAB
3745 AS2 (muls,%B0,%A0) CR_TAB
3746 AS2 (mov,%A0,r1) CR_TAB
3747 AS2 (sbc,%B0,%B0) CR_TAB
3748 AS1 (clr,__zero_reg__));
3749 }
3750 if (optimize_size)
3751 break; /* scratch ? 5 : 7 */
3752 *len = 8;
3753 return (AS2 (mov,%A0,%B0) CR_TAB
3754 AS1 (lsl,%B0) CR_TAB
3755 AS2 (sbc,%B0,%B0) CR_TAB
3756 AS1 (asr,%A0) CR_TAB
3757 AS1 (asr,%A0) CR_TAB
3758 AS1 (asr,%A0) CR_TAB
3759 AS1 (asr,%A0) CR_TAB
3760 AS1 (asr,%A0));
3761
3762 case 14:
3763 *len = 5;
3764 return (AS1 (lsl,%B0) CR_TAB
3765 AS2 (sbc,%A0,%A0) CR_TAB
3766 AS1 (lsl,%B0) CR_TAB
3767 AS2 (mov,%B0,%A0) CR_TAB
3768 AS1 (rol,%A0));
3769
3770 default:
3771 if (INTVAL (operands[2]) < 16)
3772 break;
3773
3774 /* fall through */
3775
3776 case 15:
3777 return *len = 3, (AS1 (lsl,%B0) CR_TAB
3778 AS2 (sbc,%A0,%A0) CR_TAB
3779 AS2 (mov,%B0,%A0));
3780 }
3781 len = t;
3782 }
3783 out_shift_with_cnt ((AS1 (asr,%B0) CR_TAB
3784 AS1 (ror,%A0)),
3785 insn, operands, len, 2);
3786 return "";
3787 }
3788
3789
3790 /* 32bit arithmetic shift right ((signed long)x >> i) */
3791
3792 const char *
3793 ashrsi3_out (rtx insn, rtx operands[], int *len)
3794 {
3795 if (GET_CODE (operands[2]) == CONST_INT)
3796 {
3797 int k;
3798 int *t = len;
3799
3800 if (!len)
3801 len = &k;
3802
3803 switch (INTVAL (operands[2]))
3804 {
3805 case 8:
3806 {
3807 int reg0 = true_regnum (operands[0]);
3808 int reg1 = true_regnum (operands[1]);
3809 *len=6;
3810 if (reg0 <= reg1)
3811 return (AS2 (mov,%A0,%B1) CR_TAB
3812 AS2 (mov,%B0,%C1) CR_TAB
3813 AS2 (mov,%C0,%D1) CR_TAB
3814 AS1 (clr,%D0) CR_TAB
3815 AS2 (sbrc,%C0,7) CR_TAB
3816 AS1 (dec,%D0));
3817 else
3818 return (AS1 (clr,%D0) CR_TAB
3819 AS2 (sbrc,%D1,7) CR_TAB
3820 AS1 (dec,%D0) CR_TAB
3821 AS2 (mov,%C0,%D1) CR_TAB
3822 AS2 (mov,%B0,%C1) CR_TAB
3823 AS2 (mov,%A0,%B1));
3824 }
3825
3826 case 16:
3827 {
3828 int reg0 = true_regnum (operands[0]);
3829 int reg1 = true_regnum (operands[1]);
3830
3831 if (reg0 == reg1 + 2)
3832 return *len = 4, (AS1 (clr,%D0) CR_TAB
3833 AS2 (sbrc,%B0,7) CR_TAB
3834 AS1 (com,%D0) CR_TAB
3835 AS2 (mov,%C0,%D0));
3836 if (AVR_HAVE_MOVW)
3837 return *len = 5, (AS2 (movw,%A0,%C1) CR_TAB
3838 AS1 (clr,%D0) CR_TAB
3839 AS2 (sbrc,%B0,7) CR_TAB
3840 AS1 (com,%D0) CR_TAB
3841 AS2 (mov,%C0,%D0));
3842 else
3843 return *len = 6, (AS2 (mov,%B0,%D1) CR_TAB
3844 AS2 (mov,%A0,%C1) CR_TAB
3845 AS1 (clr,%D0) CR_TAB
3846 AS2 (sbrc,%B0,7) CR_TAB
3847 AS1 (com,%D0) CR_TAB
3848 AS2 (mov,%C0,%D0));
3849 }
3850
3851 case 24:
3852 return *len = 6, (AS2 (mov,%A0,%D1) CR_TAB
3853 AS1 (clr,%D0) CR_TAB
3854 AS2 (sbrc,%A0,7) CR_TAB
3855 AS1 (com,%D0) CR_TAB
3856 AS2 (mov,%B0,%D0) CR_TAB
3857 AS2 (mov,%C0,%D0));
3858
3859 default:
3860 if (INTVAL (operands[2]) < 32)
3861 break;
3862
3863 /* fall through */
3864
3865 case 31:
3866 if (AVR_HAVE_MOVW)
3867 return *len = 4, (AS1 (lsl,%D0) CR_TAB
3868 AS2 (sbc,%A0,%A0) CR_TAB
3869 AS2 (mov,%B0,%A0) CR_TAB
3870 AS2 (movw,%C0,%A0));
3871 else
3872 return *len = 5, (AS1 (lsl,%D0) CR_TAB
3873 AS2 (sbc,%A0,%A0) CR_TAB
3874 AS2 (mov,%B0,%A0) CR_TAB
3875 AS2 (mov,%C0,%A0) CR_TAB
3876 AS2 (mov,%D0,%A0));
3877 }
3878 len = t;
3879 }
3880 out_shift_with_cnt ((AS1 (asr,%D0) CR_TAB
3881 AS1 (ror,%C0) CR_TAB
3882 AS1 (ror,%B0) CR_TAB
3883 AS1 (ror,%A0)),
3884 insn, operands, len, 4);
3885 return "";
3886 }
3887
3888 /* 8bit logic shift right ((unsigned char)x >> i) */
3889
3890 const char *
3891 lshrqi3_out (rtx insn, rtx operands[], int *len)
3892 {
3893 if (GET_CODE (operands[2]) == CONST_INT)
3894 {
3895 int k;
3896
3897 if (!len)
3898 len = &k;
3899
3900 switch (INTVAL (operands[2]))
3901 {
3902 default:
3903 if (INTVAL (operands[2]) < 8)
3904 break;
3905
3906 *len = 1;
3907 return AS1 (clr,%0);
3908
3909 case 1:
3910 *len = 1;
3911 return AS1 (lsr,%0);
3912
3913 case 2:
3914 *len = 2;
3915 return (AS1 (lsr,%0) CR_TAB
3916 AS1 (lsr,%0));
3917 case 3:
3918 *len = 3;
3919 return (AS1 (lsr,%0) CR_TAB
3920 AS1 (lsr,%0) CR_TAB
3921 AS1 (lsr,%0));
3922
3923 case 4:
3924 if (test_hard_reg_class (LD_REGS, operands[0]))
3925 {
3926 *len=2;
3927 return (AS1 (swap,%0) CR_TAB
3928 AS2 (andi,%0,0x0f));
3929 }
3930 *len = 4;
3931 return (AS1 (lsr,%0) CR_TAB
3932 AS1 (lsr,%0) CR_TAB
3933 AS1 (lsr,%0) CR_TAB
3934 AS1 (lsr,%0));
3935
3936 case 5:
3937 if (test_hard_reg_class (LD_REGS, operands[0]))
3938 {
3939 *len = 3;
3940 return (AS1 (swap,%0) CR_TAB
3941 AS1 (lsr,%0) CR_TAB
3942 AS2 (andi,%0,0x7));
3943 }
3944 *len = 5;
3945 return (AS1 (lsr,%0) CR_TAB
3946 AS1 (lsr,%0) CR_TAB
3947 AS1 (lsr,%0) CR_TAB
3948 AS1 (lsr,%0) CR_TAB
3949 AS1 (lsr,%0));
3950
3951 case 6:
3952 if (test_hard_reg_class (LD_REGS, operands[0]))
3953 {
3954 *len = 4;
3955 return (AS1 (swap,%0) CR_TAB
3956 AS1 (lsr,%0) CR_TAB
3957 AS1 (lsr,%0) CR_TAB
3958 AS2 (andi,%0,0x3));
3959 }
3960 *len = 6;
3961 return (AS1 (lsr,%0) CR_TAB
3962 AS1 (lsr,%0) CR_TAB
3963 AS1 (lsr,%0) CR_TAB
3964 AS1 (lsr,%0) CR_TAB
3965 AS1 (lsr,%0) CR_TAB
3966 AS1 (lsr,%0));
3967
3968 case 7:
3969 *len = 3;
3970 return (AS1 (rol,%0) CR_TAB
3971 AS1 (clr,%0) CR_TAB
3972 AS1 (rol,%0));
3973 }
3974 }
3975 else if (CONSTANT_P (operands[2]))
3976 fatal_insn ("internal compiler error. Incorrect shift:", insn);
3977
3978 out_shift_with_cnt (AS1 (lsr,%0),
3979 insn, operands, len, 1);
3980 return "";
3981 }
3982
3983 /* 16bit logic shift right ((unsigned short)x >> i) */
3984
3985 const char *
3986 lshrhi3_out (rtx insn, rtx operands[], int *len)
3987 {
3988 if (GET_CODE (operands[2]) == CONST_INT)
3989 {
3990 int scratch = (GET_CODE (PATTERN (insn)) == PARALLEL);
3991 int ldi_ok = test_hard_reg_class (LD_REGS, operands[0]);
3992 int k;
3993 int *t = len;
3994
3995 if (!len)
3996 len = &k;
3997
3998 switch (INTVAL (operands[2]))
3999 {
4000 default:
4001 if (INTVAL (operands[2]) < 16)
4002 break;
4003
4004 *len = 2;
4005 return (AS1 (clr,%B0) CR_TAB
4006 AS1 (clr,%A0));
4007
4008 case 4:
4009 if (optimize_size && scratch)
4010 break; /* 5 */
4011 if (ldi_ok)
4012 {
4013 *len = 6;
4014 return (AS1 (swap,%B0) CR_TAB
4015 AS1 (swap,%A0) CR_TAB
4016 AS2 (andi,%A0,0x0f) CR_TAB
4017 AS2 (eor,%A0,%B0) CR_TAB
4018 AS2 (andi,%B0,0x0f) CR_TAB
4019 AS2 (eor,%A0,%B0));
4020 }
4021 if (scratch)
4022 {
4023 *len = 7;
4024 return (AS1 (swap,%B0) CR_TAB
4025 AS1 (swap,%A0) CR_TAB
4026 AS2 (ldi,%3,0x0f) CR_TAB
4027 AS2 (and,%A0,%3) CR_TAB
4028 AS2 (eor,%A0,%B0) CR_TAB
4029 AS2 (and,%B0,%3) CR_TAB
4030 AS2 (eor,%A0,%B0));
4031 }
4032 break; /* optimize_size ? 6 : 8 */
4033
4034 case 5:
4035 if (optimize_size)
4036 break; /* scratch ? 5 : 6 */
4037 if (ldi_ok)
4038 {
4039 *len = 8;
4040 return (AS1 (lsr,%B0) CR_TAB
4041 AS1 (ror,%A0) CR_TAB
4042 AS1 (swap,%B0) CR_TAB
4043 AS1 (swap,%A0) CR_TAB
4044 AS2 (andi,%A0,0x0f) CR_TAB
4045 AS2 (eor,%A0,%B0) CR_TAB
4046 AS2 (andi,%B0,0x0f) CR_TAB
4047 AS2 (eor,%A0,%B0));
4048 }
4049 if (scratch)
4050 {
4051 *len = 9;
4052 return (AS1 (lsr,%B0) CR_TAB
4053 AS1 (ror,%A0) CR_TAB
4054 AS1 (swap,%B0) CR_TAB
4055 AS1 (swap,%A0) CR_TAB
4056 AS2 (ldi,%3,0x0f) CR_TAB
4057 AS2 (and,%A0,%3) CR_TAB
4058 AS2 (eor,%A0,%B0) CR_TAB
4059 AS2 (and,%B0,%3) CR_TAB
4060 AS2 (eor,%A0,%B0));
4061 }
4062 break; /* 10 */
4063
4064 case 6:
4065 if (optimize_size)
4066 break; /* scratch ? 5 : 6 */
4067 *len = 9;
4068 return (AS1 (clr,__tmp_reg__) CR_TAB
4069 AS1 (lsl,%A0) CR_TAB
4070 AS1 (rol,%B0) CR_TAB
4071 AS1 (rol,__tmp_reg__) CR_TAB
4072 AS1 (lsl,%A0) CR_TAB
4073 AS1 (rol,%B0) CR_TAB
4074 AS1 (rol,__tmp_reg__) CR_TAB
4075 AS2 (mov,%A0,%B0) CR_TAB
4076 AS2 (mov,%B0,__tmp_reg__));
4077
4078 case 7:
4079 *len = 5;
4080 return (AS1 (lsl,%A0) CR_TAB
4081 AS2 (mov,%A0,%B0) CR_TAB
4082 AS1 (rol,%A0) CR_TAB
4083 AS2 (sbc,%B0,%B0) CR_TAB
4084 AS1 (neg,%B0));
4085
4086 case 8:
4087 return *len = 2, (AS2 (mov,%A0,%B1) CR_TAB
4088 AS1 (clr,%B0));
4089
4090 case 9:
4091 *len = 3;
4092 return (AS2 (mov,%A0,%B0) CR_TAB
4093 AS1 (clr,%B0) CR_TAB
4094 AS1 (lsr,%A0));
4095
4096 case 10:
4097 *len = 4;
4098 return (AS2 (mov,%A0,%B0) CR_TAB
4099 AS1 (clr,%B0) CR_TAB
4100 AS1 (lsr,%A0) CR_TAB
4101 AS1 (lsr,%A0));
4102
4103 case 11:
4104 *len = 5;
4105 return (AS2 (mov,%A0,%B0) CR_TAB
4106 AS1 (clr,%B0) CR_TAB
4107 AS1 (lsr,%A0) CR_TAB
4108 AS1 (lsr,%A0) CR_TAB
4109 AS1 (lsr,%A0));
4110
4111 case 12:
4112 if (ldi_ok)
4113 {
4114 *len = 4;
4115 return (AS2 (mov,%A0,%B0) CR_TAB
4116 AS1 (clr,%B0) CR_TAB
4117 AS1 (swap,%A0) CR_TAB
4118 AS2 (andi,%A0,0x0f));
4119 }
4120 if (scratch)
4121 {
4122 *len = 5;
4123 return (AS2 (mov,%A0,%B0) CR_TAB
4124 AS1 (clr,%B0) CR_TAB
4125 AS1 (swap,%A0) CR_TAB
4126 AS2 (ldi,%3,0x0f) CR_TAB
4127 AS2 (and,%A0,%3));
4128 }
4129 *len = 6;
4130 return (AS2 (mov,%A0,%B0) CR_TAB
4131 AS1 (clr,%B0) CR_TAB
4132 AS1 (lsr,%A0) CR_TAB
4133 AS1 (lsr,%A0) CR_TAB
4134 AS1 (lsr,%A0) CR_TAB
4135 AS1 (lsr,%A0));
4136
4137 case 13:
4138 if (ldi_ok)
4139 {
4140 *len = 5;
4141 return (AS2 (mov,%A0,%B0) CR_TAB
4142 AS1 (clr,%B0) CR_TAB
4143 AS1 (swap,%A0) CR_TAB
4144 AS1 (lsr,%A0) CR_TAB
4145 AS2 (andi,%A0,0x07));
4146 }
4147 if (AVR_HAVE_MUL && scratch)
4148 {
4149 *len = 5;
4150 return (AS2 (ldi,%3,0x08) CR_TAB
4151 AS2 (mul,%B0,%3) CR_TAB
4152 AS2 (mov,%A0,r1) CR_TAB
4153 AS1 (clr,%B0) CR_TAB
4154 AS1 (clr,__zero_reg__));
4155 }
4156 if (optimize_size && scratch)
4157 break; /* 5 */
4158 if (scratch)
4159 {
4160 *len = 6;
4161 return (AS2 (mov,%A0,%B0) CR_TAB
4162 AS1 (clr,%B0) CR_TAB
4163 AS1 (swap,%A0) CR_TAB
4164 AS1 (lsr,%A0) CR_TAB
4165 AS2 (ldi,%3,0x07) CR_TAB
4166 AS2 (and,%A0,%3));
4167 }
4168 if (AVR_HAVE_MUL)
4169 {
4170 *len = 6;
4171 return ("set" CR_TAB
4172 AS2 (bld,r1,3) CR_TAB
4173 AS2 (mul,%B0,r1) CR_TAB
4174 AS2 (mov,%A0,r1) CR_TAB
4175 AS1 (clr,%B0) CR_TAB
4176 AS1 (clr,__zero_reg__));
4177 }
4178 *len = 7;
4179 return (AS2 (mov,%A0,%B0) CR_TAB
4180 AS1 (clr,%B0) CR_TAB
4181 AS1 (lsr,%A0) CR_TAB
4182 AS1 (lsr,%A0) CR_TAB
4183 AS1 (lsr,%A0) CR_TAB
4184 AS1 (lsr,%A0) CR_TAB
4185 AS1 (lsr,%A0));
4186
4187 case 14:
4188 if (AVR_HAVE_MUL && ldi_ok)
4189 {
4190 *len = 5;
4191 return (AS2 (ldi,%A0,0x04) CR_TAB
4192 AS2 (mul,%B0,%A0) CR_TAB
4193 AS2 (mov,%A0,r1) CR_TAB
4194 AS1 (clr,%B0) CR_TAB
4195 AS1 (clr,__zero_reg__));
4196 }
4197 if (AVR_HAVE_MUL && scratch)
4198 {
4199 *len = 5;
4200 return (AS2 (ldi,%3,0x04) CR_TAB
4201 AS2 (mul,%B0,%3) CR_TAB
4202 AS2 (mov,%A0,r1) CR_TAB
4203 AS1 (clr,%B0) CR_TAB
4204 AS1 (clr,__zero_reg__));
4205 }
4206 if (optimize_size && ldi_ok)
4207 {
4208 *len = 5;
4209 return (AS2 (mov,%A0,%B0) CR_TAB
4210 AS2 (ldi,%B0,6) "\n1:\t"
4211 AS1 (lsr,%A0) CR_TAB
4212 AS1 (dec,%B0) CR_TAB
4213 AS1 (brne,1b));
4214 }
4215 if (optimize_size && scratch)
4216 break; /* 5 */
4217 *len = 6;
4218 return (AS1 (clr,%A0) CR_TAB
4219 AS1 (lsl,%B0) CR_TAB
4220 AS1 (rol,%A0) CR_TAB
4221 AS1 (lsl,%B0) CR_TAB
4222 AS1 (rol,%A0) CR_TAB
4223 AS1 (clr,%B0));
4224
4225 case 15:
4226 *len = 4;
4227 return (AS1 (clr,%A0) CR_TAB
4228 AS1 (lsl,%B0) CR_TAB
4229 AS1 (rol,%A0) CR_TAB
4230 AS1 (clr,%B0));
4231 }
4232 len = t;
4233 }
4234 out_shift_with_cnt ((AS1 (lsr,%B0) CR_TAB
4235 AS1 (ror,%A0)),
4236 insn, operands, len, 2);
4237 return "";
4238 }
4239
4240 /* 32bit logic shift right ((unsigned int)x >> i) */
4241
4242 const char *
4243 lshrsi3_out (rtx insn, rtx operands[], int *len)
4244 {
4245 if (GET_CODE (operands[2]) == CONST_INT)
4246 {
4247 int k;
4248 int *t = len;
4249
4250 if (!len)
4251 len = &k;
4252
4253 switch (INTVAL (operands[2]))
4254 {
4255 default:
4256 if (INTVAL (operands[2]) < 32)
4257 break;
4258
4259 if (AVR_HAVE_MOVW)
4260 return *len = 3, (AS1 (clr,%D0) CR_TAB
4261 AS1 (clr,%C0) CR_TAB
4262 AS2 (movw,%A0,%C0));
4263 *len = 4;
4264 return (AS1 (clr,%D0) CR_TAB
4265 AS1 (clr,%C0) CR_TAB
4266 AS1 (clr,%B0) CR_TAB
4267 AS1 (clr,%A0));
4268
4269 case 8:
4270 {
4271 int reg0 = true_regnum (operands[0]);
4272 int reg1 = true_regnum (operands[1]);
4273 *len = 4;
4274 if (reg0 <= reg1)
4275 return (AS2 (mov,%A0,%B1) CR_TAB
4276 AS2 (mov,%B0,%C1) CR_TAB
4277 AS2 (mov,%C0,%D1) CR_TAB
4278 AS1 (clr,%D0));
4279 else
4280 return (AS1 (clr,%D0) CR_TAB
4281 AS2 (mov,%C0,%D1) CR_TAB
4282 AS2 (mov,%B0,%C1) CR_TAB
4283 AS2 (mov,%A0,%B1));
4284 }
4285
4286 case 16:
4287 {
4288 int reg0 = true_regnum (operands[0]);
4289 int reg1 = true_regnum (operands[1]);
4290
4291 if (reg0 == reg1 + 2)
4292 return *len = 2, (AS1 (clr,%C0) CR_TAB
4293 AS1 (clr,%D0));
4294 if (AVR_HAVE_MOVW)
4295 return *len = 3, (AS2 (movw,%A0,%C1) CR_TAB
4296 AS1 (clr,%C0) CR_TAB
4297 AS1 (clr,%D0));
4298 else
4299 return *len = 4, (AS2 (mov,%B0,%D1) CR_TAB
4300 AS2 (mov,%A0,%C1) CR_TAB
4301 AS1 (clr,%C0) CR_TAB
4302 AS1 (clr,%D0));
4303 }
4304
4305 case 24:
4306 return *len = 4, (AS2 (mov,%A0,%D1) CR_TAB
4307 AS1 (clr,%B0) CR_TAB
4308 AS1 (clr,%C0) CR_TAB
4309 AS1 (clr,%D0));
4310
4311 case 31:
4312 *len = 6;
4313 return (AS1 (clr,%A0) CR_TAB
4314 AS2 (sbrc,%D0,7) CR_TAB
4315 AS1 (inc,%A0) CR_TAB
4316 AS1 (clr,%B0) CR_TAB
4317 AS1 (clr,%C0) CR_TAB
4318 AS1 (clr,%D0));
4319 }
4320 len = t;
4321 }
4322 out_shift_with_cnt ((AS1 (lsr,%D0) CR_TAB
4323 AS1 (ror,%C0) CR_TAB
4324 AS1 (ror,%B0) CR_TAB
4325 AS1 (ror,%A0)),
4326 insn, operands, len, 4);
4327 return "";
4328 }
4329
4330 /* Modifies the length assigned to instruction INSN
4331 LEN is the initially computed length of the insn. */
4332
4333 int
4334 adjust_insn_length (rtx insn, int len)
4335 {
4336 rtx patt = PATTERN (insn);
4337 rtx set;
4338
4339 if (GET_CODE (patt) == SET)
4340 {
4341 rtx op[10];
4342 op[1] = SET_SRC (patt);
4343 op[0] = SET_DEST (patt);
4344 if (general_operand (op[1], VOIDmode)
4345 && general_operand (op[0], VOIDmode))
4346 {
4347 switch (GET_MODE (op[0]))
4348 {
4349 case QImode:
4350 output_movqi (insn, op, &len);
4351 break;
4352 case HImode:
4353 output_movhi (insn, op, &len);
4354 break;
4355 case SImode:
4356 case SFmode:
4357 output_movsisf (insn, op, &len);
4358 break;
4359 default:
4360 break;
4361 }
4362 }
4363 else if (op[0] == cc0_rtx && REG_P (op[1]))
4364 {
4365 switch (GET_MODE (op[1]))
4366 {
4367 case HImode: out_tsthi (insn,&len); break;
4368 case SImode: out_tstsi (insn,&len); break;
4369 default: break;
4370 }
4371 }
4372 else if (GET_CODE (op[1]) == AND)
4373 {
4374 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4375 {
4376 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4377 if (GET_MODE (op[1]) == SImode)
4378 len = (((mask & 0xff) != 0xff)
4379 + ((mask & 0xff00) != 0xff00)
4380 + ((mask & 0xff0000L) != 0xff0000L)
4381 + ((mask & 0xff000000L) != 0xff000000L));
4382 else if (GET_MODE (op[1]) == HImode)
4383 len = (((mask & 0xff) != 0xff)
4384 + ((mask & 0xff00) != 0xff00));
4385 }
4386 }
4387 else if (GET_CODE (op[1]) == IOR)
4388 {
4389 if (GET_CODE (XEXP (op[1],1)) == CONST_INT)
4390 {
4391 HOST_WIDE_INT mask = INTVAL (XEXP (op[1],1));
4392 if (GET_MODE (op[1]) == SImode)
4393 len = (((mask & 0xff) != 0)
4394 + ((mask & 0xff00) != 0)
4395 + ((mask & 0xff0000L) != 0)
4396 + ((mask & 0xff000000L) != 0));
4397 else if (GET_MODE (op[1]) == HImode)
4398 len = (((mask & 0xff) != 0)
4399 + ((mask & 0xff00) != 0));
4400 }
4401 }
4402 }
4403 set = single_set (insn);
4404 if (set)
4405 {
4406 rtx op[10];
4407
4408 op[1] = SET_SRC (set);
4409 op[0] = SET_DEST (set);
4410
4411 if (GET_CODE (patt) == PARALLEL
4412 && general_operand (op[1], VOIDmode)
4413 && general_operand (op[0], VOIDmode))
4414 {
4415 if (XVECLEN (patt, 0) == 2)
4416 op[2] = XVECEXP (patt, 0, 1);
4417
4418 switch (GET_MODE (op[0]))
4419 {
4420 case QImode:
4421 len = 2;
4422 break;
4423 case HImode:
4424 output_reload_inhi (insn, op, &len);
4425 break;
4426 case SImode:
4427 case SFmode:
4428 output_reload_insisf (insn, op, &len);
4429 break;
4430 default:
4431 break;
4432 }
4433 }
4434 else if (GET_CODE (op[1]) == ASHIFT
4435 || GET_CODE (op[1]) == ASHIFTRT
4436 || GET_CODE (op[1]) == LSHIFTRT)
4437 {
4438 rtx ops[10];
4439 ops[0] = op[0];
4440 ops[1] = XEXP (op[1],0);
4441 ops[2] = XEXP (op[1],1);
4442 switch (GET_CODE (op[1]))
4443 {
4444 case ASHIFT:
4445 switch (GET_MODE (op[0]))
4446 {
4447 case QImode: ashlqi3_out (insn,ops,&len); break;
4448 case HImode: ashlhi3_out (insn,ops,&len); break;
4449 case SImode: ashlsi3_out (insn,ops,&len); break;
4450 default: break;
4451 }
4452 break;
4453 case ASHIFTRT:
4454 switch (GET_MODE (op[0]))
4455 {
4456 case QImode: ashrqi3_out (insn,ops,&len); break;
4457 case HImode: ashrhi3_out (insn,ops,&len); break;
4458 case SImode: ashrsi3_out (insn,ops,&len); break;
4459 default: break;
4460 }
4461 break;
4462 case LSHIFTRT:
4463 switch (GET_MODE (op[0]))
4464 {
4465 case QImode: lshrqi3_out (insn,ops,&len); break;
4466 case HImode: lshrhi3_out (insn,ops,&len); break;
4467 case SImode: lshrsi3_out (insn,ops,&len); break;
4468 default: break;
4469 }
4470 break;
4471 default:
4472 break;
4473 }
4474 }
4475 }
4476 return len;
4477 }
4478
4479 /* Return nonzero if register REG dead after INSN. */
4480
4481 int
4482 reg_unused_after (rtx insn, rtx reg)
4483 {
4484 return (dead_or_set_p (insn, reg)
4485 || (REG_P(reg) && _reg_unused_after (insn, reg)));
4486 }
4487
4488 /* Return nonzero if REG is not used after INSN.
4489 We assume REG is a reload reg, and therefore does
4490 not live past labels. It may live past calls or jumps though. */
4491
4492 int
4493 _reg_unused_after (rtx insn, rtx reg)
4494 {
4495 enum rtx_code code;
4496 rtx set;
4497
4498 /* If the reg is set by this instruction, then it is safe for our
4499 case. Disregard the case where this is a store to memory, since
4500 we are checking a register used in the store address. */
4501 set = single_set (insn);
4502 if (set && GET_CODE (SET_DEST (set)) != MEM
4503 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4504 return 1;
4505
4506 while ((insn = NEXT_INSN (insn)))
4507 {
4508 rtx set;
4509 code = GET_CODE (insn);
4510
4511 #if 0
4512 /* If this is a label that existed before reload, then the register
4513 if dead here. However, if this is a label added by reorg, then
4514 the register may still be live here. We can't tell the difference,
4515 so we just ignore labels completely. */
4516 if (code == CODE_LABEL)
4517 return 1;
4518 /* else */
4519 #endif
4520
4521 if (!INSN_P (insn))
4522 continue;
4523
4524 if (code == JUMP_INSN)
4525 return 0;
4526
4527 /* If this is a sequence, we must handle them all at once.
4528 We could have for instance a call that sets the target register,
4529 and an insn in a delay slot that uses the register. In this case,
4530 we must return 0. */
4531 else if (code == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE)
4532 {
4533 int i;
4534 int retval = 0;
4535
4536 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
4537 {
4538 rtx this_insn = XVECEXP (PATTERN (insn), 0, i);
4539 rtx set = single_set (this_insn);
4540
4541 if (GET_CODE (this_insn) == CALL_INSN)
4542 code = CALL_INSN;
4543 else if (GET_CODE (this_insn) == JUMP_INSN)
4544 {
4545 if (INSN_ANNULLED_BRANCH_P (this_insn))
4546 return 0;
4547 code = JUMP_INSN;
4548 }
4549
4550 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4551 return 0;
4552 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4553 {
4554 if (GET_CODE (SET_DEST (set)) != MEM)
4555 retval = 1;
4556 else
4557 return 0;
4558 }
4559 if (set == 0
4560 && reg_overlap_mentioned_p (reg, PATTERN (this_insn)))
4561 return 0;
4562 }
4563 if (retval == 1)
4564 return 1;
4565 else if (code == JUMP_INSN)
4566 return 0;
4567 }
4568
4569 if (code == CALL_INSN)
4570 {
4571 rtx tem;
4572 for (tem = CALL_INSN_FUNCTION_USAGE (insn); tem; tem = XEXP (tem, 1))
4573 if (GET_CODE (XEXP (tem, 0)) == USE
4574 && REG_P (XEXP (XEXP (tem, 0), 0))
4575 && reg_overlap_mentioned_p (reg, XEXP (XEXP (tem, 0), 0)))
4576 return 0;
4577 if (call_used_regs[REGNO (reg)])
4578 return 1;
4579 }
4580
4581 set = single_set (insn);
4582
4583 if (set && reg_overlap_mentioned_p (reg, SET_SRC (set)))
4584 return 0;
4585 if (set && reg_overlap_mentioned_p (reg, SET_DEST (set)))
4586 return GET_CODE (SET_DEST (set)) != MEM;
4587 if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
4588 return 0;
4589 }
4590 return 1;
4591 }
4592
4593 /* Target hook for assembling integer objects. The AVR version needs
4594 special handling for references to certain labels. */
4595
4596 static bool
4597 avr_assemble_integer (rtx x, unsigned int size, int aligned_p)
4598 {
4599 if (size == POINTER_SIZE / BITS_PER_UNIT && aligned_p
4600 && ((GET_CODE (x) == SYMBOL_REF && SYMBOL_REF_FUNCTION_P (x))
4601 || GET_CODE (x) == LABEL_REF))
4602 {
4603 fputs ("\t.word\tgs(", asm_out_file);
4604 output_addr_const (asm_out_file, x);
4605 fputs (")\n", asm_out_file);
4606 return true;
4607 }
4608 return default_assemble_integer (x, size, aligned_p);
4609 }
4610
4611 /* Worker function for ASM_DECLARE_FUNCTION_NAME. */
4612
4613 void
4614 avr_asm_declare_function_name (FILE *file, const char *name, tree decl)
4615 {
4616
4617 /* If the function has the 'signal' or 'interrupt' attribute, test to
4618 make sure that the name of the function is "__vector_NN" so as to
4619 catch when the user misspells the interrupt vector name. */
4620
4621 if (cfun->machine->is_interrupt)
4622 {
4623 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4624 {
4625 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4626 "%qs appears to be a misspelled interrupt handler",
4627 name);
4628 }
4629 }
4630 else if (cfun->machine->is_signal)
4631 {
4632 if (strncmp (name, "__vector", strlen ("__vector")) != 0)
4633 {
4634 warning_at (DECL_SOURCE_LOCATION (decl), 0,
4635 "%qs appears to be a misspelled signal handler",
4636 name);
4637 }
4638 }
4639
4640 ASM_OUTPUT_TYPE_DIRECTIVE (file, name, "function");
4641 ASM_OUTPUT_LABEL (file, name);
4642 }
4643
4644 /* The routine used to output NUL terminated strings. We use a special
4645 version of this for most svr4 targets because doing so makes the
4646 generated assembly code more compact (and thus faster to assemble)
4647 as well as more readable, especially for targets like the i386
4648 (where the only alternative is to output character sequences as
4649 comma separated lists of numbers). */
4650
4651 void
4652 gas_output_limited_string(FILE *file, const char *str)
4653 {
4654 const unsigned char *_limited_str = (const unsigned char *) str;
4655 unsigned ch;
4656 fprintf (file, "%s\"", STRING_ASM_OP);
4657 for (; (ch = *_limited_str); _limited_str++)
4658 {
4659 int escape;
4660 switch (escape = ESCAPES[ch])
4661 {
4662 case 0:
4663 putc (ch, file);
4664 break;
4665 case 1:
4666 fprintf (file, "\\%03o", ch);
4667 break;
4668 default:
4669 putc ('\\', file);
4670 putc (escape, file);
4671 break;
4672 }
4673 }
4674 fprintf (file, "\"\n");
4675 }
4676
4677 /* The routine used to output sequences of byte values. We use a special
4678 version of this for most svr4 targets because doing so makes the
4679 generated assembly code more compact (and thus faster to assemble)
4680 as well as more readable. Note that if we find subparts of the
4681 character sequence which end with NUL (and which are shorter than
4682 STRING_LIMIT) we output those using ASM_OUTPUT_LIMITED_STRING. */
4683
4684 void
4685 gas_output_ascii(FILE *file, const char *str, size_t length)
4686 {
4687 const unsigned char *_ascii_bytes = (const unsigned char *) str;
4688 const unsigned char *limit = _ascii_bytes + length;
4689 unsigned bytes_in_chunk = 0;
4690 for (; _ascii_bytes < limit; _ascii_bytes++)
4691 {
4692 const unsigned char *p;
4693 if (bytes_in_chunk >= 60)
4694 {
4695 fprintf (file, "\"\n");
4696 bytes_in_chunk = 0;
4697 }
4698 for (p = _ascii_bytes; p < limit && *p != '\0'; p++)
4699 continue;
4700 if (p < limit && (p - _ascii_bytes) <= (signed)STRING_LIMIT)
4701 {
4702 if (bytes_in_chunk > 0)
4703 {
4704 fprintf (file, "\"\n");
4705 bytes_in_chunk = 0;
4706 }
4707 gas_output_limited_string (file, (const char*)_ascii_bytes);
4708 _ascii_bytes = p;
4709 }
4710 else
4711 {
4712 int escape;
4713 unsigned ch;
4714 if (bytes_in_chunk == 0)
4715 fprintf (file, "\t.ascii\t\"");
4716 switch (escape = ESCAPES[ch = *_ascii_bytes])
4717 {
4718 case 0:
4719 putc (ch, file);
4720 bytes_in_chunk++;
4721 break;
4722 case 1:
4723 fprintf (file, "\\%03o", ch);
4724 bytes_in_chunk += 4;
4725 break;
4726 default:
4727 putc ('\\', file);
4728 putc (escape, file);
4729 bytes_in_chunk += 2;
4730 break;
4731 }
4732 }
4733 }
4734 if (bytes_in_chunk > 0)
4735 fprintf (file, "\"\n");
4736 }
4737
4738 /* Return value is nonzero if pseudos that have been
4739 assigned to registers of class CLASS would likely be spilled
4740 because registers of CLASS are needed for spill registers. */
4741
4742 enum reg_class
4743 class_likely_spilled_p (int c)
4744 {
4745 return (c != ALL_REGS && c != ADDW_REGS);
4746 }
4747
4748 /* Valid attributes:
4749 progmem - put data to program memory;
4750 signal - make a function to be hardware interrupt. After function
4751 prologue interrupts are disabled;
4752 interrupt - make a function to be hardware interrupt. After function
4753 prologue interrupts are enabled;
4754 naked - don't generate function prologue/epilogue and `ret' command.
4755
4756 Only `progmem' attribute valid for type. */
4757
4758 const struct attribute_spec avr_attribute_table[] =
4759 {
4760 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
4761 { "progmem", 0, 0, false, false, false, avr_handle_progmem_attribute },
4762 { "signal", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4763 { "interrupt", 0, 0, true, false, false, avr_handle_fndecl_attribute },
4764 { "naked", 0, 0, false, true, true, avr_handle_fntype_attribute },
4765 { "OS_task", 0, 0, false, true, true, avr_handle_fntype_attribute },
4766 { "OS_main", 0, 0, false, true, true, avr_handle_fntype_attribute },
4767 { NULL, 0, 0, false, false, false, NULL }
4768 };
4769
4770 /* Handle a "progmem" attribute; arguments as in
4771 struct attribute_spec.handler. */
4772 static tree
4773 avr_handle_progmem_attribute (tree *node, tree name,
4774 tree args ATTRIBUTE_UNUSED,
4775 int flags ATTRIBUTE_UNUSED,
4776 bool *no_add_attrs)
4777 {
4778 if (DECL_P (*node))
4779 {
4780 if (TREE_CODE (*node) == TYPE_DECL)
4781 {
4782 /* This is really a decl attribute, not a type attribute,
4783 but try to handle it for GCC 3.0 backwards compatibility. */
4784
4785 tree type = TREE_TYPE (*node);
4786 tree attr = tree_cons (name, args, TYPE_ATTRIBUTES (type));
4787 tree newtype = build_type_attribute_variant (type, attr);
4788
4789 TYPE_MAIN_VARIANT (newtype) = TYPE_MAIN_VARIANT (type);
4790 TREE_TYPE (*node) = newtype;
4791 *no_add_attrs = true;
4792 }
4793 else if (TREE_STATIC (*node) || DECL_EXTERNAL (*node))
4794 {
4795 if (DECL_INITIAL (*node) == NULL_TREE && !DECL_EXTERNAL (*node))
4796 {
4797 warning (0, "only initialized variables can be placed into "
4798 "program memory area");
4799 *no_add_attrs = true;
4800 }
4801 }
4802 else
4803 {
4804 warning (OPT_Wattributes, "%qs attribute ignored",
4805 IDENTIFIER_POINTER (name));
4806 *no_add_attrs = true;
4807 }
4808 }
4809
4810 return NULL_TREE;
4811 }
4812
4813 /* Handle an attribute requiring a FUNCTION_DECL; arguments as in
4814 struct attribute_spec.handler. */
4815
4816 static tree
4817 avr_handle_fndecl_attribute (tree *node, tree name,
4818 tree args ATTRIBUTE_UNUSED,
4819 int flags ATTRIBUTE_UNUSED,
4820 bool *no_add_attrs)
4821 {
4822 if (TREE_CODE (*node) != FUNCTION_DECL)
4823 {
4824 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4825 IDENTIFIER_POINTER (name));
4826 *no_add_attrs = true;
4827 }
4828
4829 return NULL_TREE;
4830 }
4831
4832 static tree
4833 avr_handle_fntype_attribute (tree *node, tree name,
4834 tree args ATTRIBUTE_UNUSED,
4835 int flags ATTRIBUTE_UNUSED,
4836 bool *no_add_attrs)
4837 {
4838 if (TREE_CODE (*node) != FUNCTION_TYPE)
4839 {
4840 warning (OPT_Wattributes, "%qs attribute only applies to functions",
4841 IDENTIFIER_POINTER (name));
4842 *no_add_attrs = true;
4843 }
4844
4845 return NULL_TREE;
4846 }
4847
4848 /* Look for attribute `progmem' in DECL
4849 if found return 1, otherwise 0. */
4850
4851 int
4852 avr_progmem_p (tree decl, tree attributes)
4853 {
4854 tree a;
4855
4856 if (TREE_CODE (decl) != VAR_DECL)
4857 return 0;
4858
4859 if (NULL_TREE
4860 != lookup_attribute ("progmem", attributes))
4861 return 1;
4862
4863 a=decl;
4864 do
4865 a = TREE_TYPE(a);
4866 while (TREE_CODE (a) == ARRAY_TYPE);
4867
4868 if (a == error_mark_node)
4869 return 0;
4870
4871 if (NULL_TREE != lookup_attribute ("progmem", TYPE_ATTRIBUTES (a)))
4872 return 1;
4873
4874 return 0;
4875 }
4876
4877 /* Add the section attribute if the variable is in progmem. */
4878
4879 static void
4880 avr_insert_attributes (tree node, tree *attributes)
4881 {
4882 if (TREE_CODE (node) == VAR_DECL
4883 && (TREE_STATIC (node) || DECL_EXTERNAL (node))
4884 && avr_progmem_p (node, *attributes))
4885 {
4886 static const char dsec[] = ".progmem.data";
4887 *attributes = tree_cons (get_identifier ("section"),
4888 build_tree_list (NULL, build_string (strlen (dsec), dsec)),
4889 *attributes);
4890
4891 /* ??? This seems sketchy. Why can't the user declare the
4892 thing const in the first place? */
4893 TREE_READONLY (node) = 1;
4894 }
4895 }
4896
4897 /* A get_unnamed_section callback for switching to progmem_section. */
4898
4899 static void
4900 avr_output_progmem_section_asm_op (const void *arg ATTRIBUTE_UNUSED)
4901 {
4902 fprintf (asm_out_file,
4903 "\t.section .progmem.gcc_sw_table, \"%s\", @progbits\n",
4904 AVR_HAVE_JMP_CALL ? "a" : "ax");
4905 /* Should already be aligned, this is just to be safe if it isn't. */
4906 fprintf (asm_out_file, "\t.p2align 1\n");
4907 }
4908
4909 /* Implement TARGET_ASM_INIT_SECTIONS. */
4910
4911 static void
4912 avr_asm_init_sections (void)
4913 {
4914 progmem_section = get_unnamed_section (AVR_HAVE_JMP_CALL ? 0 : SECTION_CODE,
4915 avr_output_progmem_section_asm_op,
4916 NULL);
4917 readonly_data_section = data_section;
4918 }
4919
4920 static unsigned int
4921 avr_section_type_flags (tree decl, const char *name, int reloc)
4922 {
4923 unsigned int flags = default_section_type_flags (decl, name, reloc);
4924
4925 if (strncmp (name, ".noinit", 7) == 0)
4926 {
4927 if (decl && TREE_CODE (decl) == VAR_DECL
4928 && DECL_INITIAL (decl) == NULL_TREE)
4929 flags |= SECTION_BSS; /* @nobits */
4930 else
4931 warning (0, "only uninitialized variables can be placed in the "
4932 ".noinit section");
4933 }
4934
4935 return flags;
4936 }
4937
4938 /* Outputs some appropriate text to go at the start of an assembler
4939 file. */
4940
4941 static void
4942 avr_file_start (void)
4943 {
4944 if (avr_current_arch->asm_only)
4945 error ("MCU %qs supported for assembler only", avr_mcu_name);
4946
4947 default_file_start ();
4948
4949 /* fprintf (asm_out_file, "\t.arch %s\n", avr_mcu_name);*/
4950 fputs ("__SREG__ = 0x3f\n"
4951 "__SP_H__ = 0x3e\n"
4952 "__SP_L__ = 0x3d\n", asm_out_file);
4953
4954 fputs ("__tmp_reg__ = 0\n"
4955 "__zero_reg__ = 1\n", asm_out_file);
4956
4957 /* FIXME: output these only if there is anything in the .data / .bss
4958 sections - some code size could be saved by not linking in the
4959 initialization code from libgcc if one or both sections are empty. */
4960 fputs ("\t.global __do_copy_data\n", asm_out_file);
4961 fputs ("\t.global __do_clear_bss\n", asm_out_file);
4962 }
4963
4964 /* Outputs to the stdio stream FILE some
4965 appropriate text to go at the end of an assembler file. */
4966
4967 static void
4968 avr_file_end (void)
4969 {
4970 }
4971
4972 /* Choose the order in which to allocate hard registers for
4973 pseudo-registers local to a basic block.
4974
4975 Store the desired register order in the array `reg_alloc_order'.
4976 Element 0 should be the register to allocate first; element 1, the
4977 next register; and so on. */
4978
4979 void
4980 order_regs_for_local_alloc (void)
4981 {
4982 unsigned int i;
4983 static const int order_0[] = {
4984 24,25,
4985 18,19,
4986 20,21,
4987 22,23,
4988 30,31,
4989 26,27,
4990 28,29,
4991 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
4992 0,1,
4993 32,33,34,35
4994 };
4995 static const int order_1[] = {
4996 18,19,
4997 20,21,
4998 22,23,
4999 24,25,
5000 30,31,
5001 26,27,
5002 28,29,
5003 17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5004 0,1,
5005 32,33,34,35
5006 };
5007 static const int order_2[] = {
5008 25,24,
5009 23,22,
5010 21,20,
5011 19,18,
5012 30,31,
5013 26,27,
5014 28,29,
5015 17,16,
5016 15,14,13,12,11,10,9,8,7,6,5,4,3,2,
5017 1,0,
5018 32,33,34,35
5019 };
5020
5021 const int *order = (TARGET_ORDER_1 ? order_1 :
5022 TARGET_ORDER_2 ? order_2 :
5023 order_0);
5024 for (i=0; i < ARRAY_SIZE (order_0); ++i)
5025 reg_alloc_order[i] = order[i];
5026 }
5027
5028
5029 /* Mutually recursive subroutine of avr_rtx_cost for calculating the
5030 cost of an RTX operand given its context. X is the rtx of the
5031 operand, MODE is its mode, and OUTER is the rtx_code of this
5032 operand's parent operator. */
5033
5034 static int
5035 avr_operand_rtx_cost (rtx x, enum machine_mode mode, enum rtx_code outer,
5036 bool speed)
5037 {
5038 enum rtx_code code = GET_CODE (x);
5039 int total;
5040
5041 switch (code)
5042 {
5043 case REG:
5044 case SUBREG:
5045 return 0;
5046
5047 case CONST_INT:
5048 case CONST_DOUBLE:
5049 return COSTS_N_INSNS (GET_MODE_SIZE (mode));
5050
5051 default:
5052 break;
5053 }
5054
5055 total = 0;
5056 avr_rtx_costs (x, code, outer, &total, speed);
5057 return total;
5058 }
5059
5060 /* The AVR backend's rtx_cost function. X is rtx expression whose cost
5061 is to be calculated. Return true if the complete cost has been
5062 computed, and false if subexpressions should be scanned. In either
5063 case, *TOTAL contains the cost result. */
5064
5065 static bool
5066 avr_rtx_costs (rtx x, int code, int outer_code ATTRIBUTE_UNUSED, int *total,
5067 bool speed)
5068 {
5069 enum machine_mode mode = GET_MODE (x);
5070 HOST_WIDE_INT val;
5071
5072 switch (code)
5073 {
5074 case CONST_INT:
5075 case CONST_DOUBLE:
5076 /* Immediate constants are as cheap as registers. */
5077 *total = 0;
5078 return true;
5079
5080 case MEM:
5081 case CONST:
5082 case LABEL_REF:
5083 case SYMBOL_REF:
5084 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5085 return true;
5086
5087 case NEG:
5088 switch (mode)
5089 {
5090 case QImode:
5091 case SFmode:
5092 *total = COSTS_N_INSNS (1);
5093 break;
5094
5095 case HImode:
5096 *total = COSTS_N_INSNS (3);
5097 break;
5098
5099 case SImode:
5100 *total = COSTS_N_INSNS (7);
5101 break;
5102
5103 default:
5104 return false;
5105 }
5106 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5107 return true;
5108
5109 case ABS:
5110 switch (mode)
5111 {
5112 case QImode:
5113 case SFmode:
5114 *total = COSTS_N_INSNS (1);
5115 break;
5116
5117 default:
5118 return false;
5119 }
5120 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5121 return true;
5122
5123 case NOT:
5124 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5125 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5126 return true;
5127
5128 case ZERO_EXTEND:
5129 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode)
5130 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5131 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5132 return true;
5133
5134 case SIGN_EXTEND:
5135 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode) + 2
5136 - GET_MODE_SIZE (GET_MODE (XEXP (x, 0))));
5137 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5138 return true;
5139
5140 case PLUS:
5141 switch (mode)
5142 {
5143 case QImode:
5144 *total = COSTS_N_INSNS (1);
5145 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5146 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5147 break;
5148
5149 case HImode:
5150 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5151 {
5152 *total = COSTS_N_INSNS (2);
5153 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5154 }
5155 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5156 *total = COSTS_N_INSNS (1);
5157 else
5158 *total = COSTS_N_INSNS (2);
5159 break;
5160
5161 case SImode:
5162 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5163 {
5164 *total = COSTS_N_INSNS (4);
5165 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5166 }
5167 else if (INTVAL (XEXP (x, 1)) >= -63 && INTVAL (XEXP (x, 1)) <= 63)
5168 *total = COSTS_N_INSNS (1);
5169 else
5170 *total = COSTS_N_INSNS (4);
5171 break;
5172
5173 default:
5174 return false;
5175 }
5176 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5177 return true;
5178
5179 case MINUS:
5180 case AND:
5181 case IOR:
5182 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5183 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5184 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5185 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5186 return true;
5187
5188 case XOR:
5189 *total = COSTS_N_INSNS (GET_MODE_SIZE (mode));
5190 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5191 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5192 return true;
5193
5194 case MULT:
5195 switch (mode)
5196 {
5197 case QImode:
5198 if (AVR_HAVE_MUL)
5199 *total = COSTS_N_INSNS (!speed ? 3 : 4);
5200 else if (!speed)
5201 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5202 else
5203 return false;
5204 break;
5205
5206 case HImode:
5207 if (AVR_HAVE_MUL)
5208 *total = COSTS_N_INSNS (!speed ? 7 : 10);
5209 else if (!speed)
5210 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5211 else
5212 return false;
5213 break;
5214
5215 default:
5216 return false;
5217 }
5218 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5219 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5220 return true;
5221
5222 case DIV:
5223 case MOD:
5224 case UDIV:
5225 case UMOD:
5226 if (!speed)
5227 *total = COSTS_N_INSNS (AVR_HAVE_JMP_CALL ? 2 : 1);
5228 else
5229 return false;
5230 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5231 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5232 return true;
5233
5234 case ROTATE:
5235 switch (mode)
5236 {
5237 case QImode:
5238 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 4)
5239 *total = COSTS_N_INSNS (1);
5240
5241 break;
5242
5243 case HImode:
5244 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 8)
5245 *total = COSTS_N_INSNS (3);
5246
5247 break;
5248
5249 case SImode:
5250 if (CONST_INT_P (XEXP (x, 1)))
5251 switch (INTVAL (XEXP (x, 1)))
5252 {
5253 case 8:
5254 case 24:
5255 *total = COSTS_N_INSNS (5);
5256 break;
5257 case 16:
5258 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 6);
5259 break;
5260 }
5261 break;
5262
5263 default:
5264 return false;
5265 }
5266 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5267 return true;
5268
5269 case ASHIFT:
5270 switch (mode)
5271 {
5272 case QImode:
5273 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5274 {
5275 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5276 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5277 }
5278 else
5279 {
5280 val = INTVAL (XEXP (x, 1));
5281 if (val == 7)
5282 *total = COSTS_N_INSNS (3);
5283 else if (val >= 0 && val <= 7)
5284 *total = COSTS_N_INSNS (val);
5285 else
5286 *total = COSTS_N_INSNS (1);
5287 }
5288 break;
5289
5290 case HImode:
5291 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5292 {
5293 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5294 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5295 }
5296 else
5297 switch (INTVAL (XEXP (x, 1)))
5298 {
5299 case 0:
5300 *total = 0;
5301 break;
5302 case 1:
5303 case 8:
5304 *total = COSTS_N_INSNS (2);
5305 break;
5306 case 9:
5307 *total = COSTS_N_INSNS (3);
5308 break;
5309 case 2:
5310 case 3:
5311 case 10:
5312 case 15:
5313 *total = COSTS_N_INSNS (4);
5314 break;
5315 case 7:
5316 case 11:
5317 case 12:
5318 *total = COSTS_N_INSNS (5);
5319 break;
5320 case 4:
5321 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5322 break;
5323 case 6:
5324 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5325 break;
5326 case 5:
5327 *total = COSTS_N_INSNS (!speed ? 5 : 10);
5328 break;
5329 default:
5330 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5331 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5332 }
5333 break;
5334
5335 case SImode:
5336 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5337 {
5338 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5339 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5340 }
5341 else
5342 switch (INTVAL (XEXP (x, 1)))
5343 {
5344 case 0:
5345 *total = 0;
5346 break;
5347 case 24:
5348 *total = COSTS_N_INSNS (3);
5349 break;
5350 case 1:
5351 case 8:
5352 case 16:
5353 *total = COSTS_N_INSNS (4);
5354 break;
5355 case 31:
5356 *total = COSTS_N_INSNS (6);
5357 break;
5358 case 2:
5359 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5360 break;
5361 default:
5362 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5363 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5364 }
5365 break;
5366
5367 default:
5368 return false;
5369 }
5370 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5371 return true;
5372
5373 case ASHIFTRT:
5374 switch (mode)
5375 {
5376 case QImode:
5377 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5378 {
5379 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5380 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5381 }
5382 else
5383 {
5384 val = INTVAL (XEXP (x, 1));
5385 if (val == 6)
5386 *total = COSTS_N_INSNS (4);
5387 else if (val == 7)
5388 *total = COSTS_N_INSNS (2);
5389 else if (val >= 0 && val <= 7)
5390 *total = COSTS_N_INSNS (val);
5391 else
5392 *total = COSTS_N_INSNS (1);
5393 }
5394 break;
5395
5396 case HImode:
5397 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5398 {
5399 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5400 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5401 }
5402 else
5403 switch (INTVAL (XEXP (x, 1)))
5404 {
5405 case 0:
5406 *total = 0;
5407 break;
5408 case 1:
5409 *total = COSTS_N_INSNS (2);
5410 break;
5411 case 15:
5412 *total = COSTS_N_INSNS (3);
5413 break;
5414 case 2:
5415 case 7:
5416 case 8:
5417 case 9:
5418 *total = COSTS_N_INSNS (4);
5419 break;
5420 case 10:
5421 case 14:
5422 *total = COSTS_N_INSNS (5);
5423 break;
5424 case 11:
5425 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5426 break;
5427 case 12:
5428 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5429 break;
5430 case 6:
5431 case 13:
5432 *total = COSTS_N_INSNS (!speed ? 5 : 8);
5433 break;
5434 default:
5435 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5436 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5437 }
5438 break;
5439
5440 case SImode:
5441 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5442 {
5443 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5444 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5445 }
5446 else
5447 switch (INTVAL (XEXP (x, 1)))
5448 {
5449 case 0:
5450 *total = 0;
5451 break;
5452 case 1:
5453 *total = COSTS_N_INSNS (4);
5454 break;
5455 case 8:
5456 case 16:
5457 case 24:
5458 *total = COSTS_N_INSNS (6);
5459 break;
5460 case 2:
5461 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5462 break;
5463 case 31:
5464 *total = COSTS_N_INSNS (AVR_HAVE_MOVW ? 4 : 5);
5465 break;
5466 default:
5467 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5468 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5469 }
5470 break;
5471
5472 default:
5473 return false;
5474 }
5475 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5476 return true;
5477
5478 case LSHIFTRT:
5479 switch (mode)
5480 {
5481 case QImode:
5482 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5483 {
5484 *total = COSTS_N_INSNS (!speed ? 4 : 17);
5485 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5486 }
5487 else
5488 {
5489 val = INTVAL (XEXP (x, 1));
5490 if (val == 7)
5491 *total = COSTS_N_INSNS (3);
5492 else if (val >= 0 && val <= 7)
5493 *total = COSTS_N_INSNS (val);
5494 else
5495 *total = COSTS_N_INSNS (1);
5496 }
5497 break;
5498
5499 case HImode:
5500 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5501 {
5502 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5503 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5504 }
5505 else
5506 switch (INTVAL (XEXP (x, 1)))
5507 {
5508 case 0:
5509 *total = 0;
5510 break;
5511 case 1:
5512 case 8:
5513 *total = COSTS_N_INSNS (2);
5514 break;
5515 case 9:
5516 *total = COSTS_N_INSNS (3);
5517 break;
5518 case 2:
5519 case 10:
5520 case 15:
5521 *total = COSTS_N_INSNS (4);
5522 break;
5523 case 7:
5524 case 11:
5525 *total = COSTS_N_INSNS (5);
5526 break;
5527 case 3:
5528 case 12:
5529 case 13:
5530 case 14:
5531 *total = COSTS_N_INSNS (!speed ? 5 : 6);
5532 break;
5533 case 4:
5534 *total = COSTS_N_INSNS (!speed ? 5 : 7);
5535 break;
5536 case 5:
5537 case 6:
5538 *total = COSTS_N_INSNS (!speed ? 5 : 9);
5539 break;
5540 default:
5541 *total = COSTS_N_INSNS (!speed ? 5 : 41);
5542 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5543 }
5544 break;
5545
5546 case SImode:
5547 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5548 {
5549 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5550 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5551 }
5552 else
5553 switch (INTVAL (XEXP (x, 1)))
5554 {
5555 case 0:
5556 *total = 0;
5557 break;
5558 case 1:
5559 *total = COSTS_N_INSNS (4);
5560 break;
5561 case 2:
5562 *total = COSTS_N_INSNS (!speed ? 7 : 8);
5563 break;
5564 case 8:
5565 case 16:
5566 case 24:
5567 *total = COSTS_N_INSNS (4);
5568 break;
5569 case 31:
5570 *total = COSTS_N_INSNS (6);
5571 break;
5572 default:
5573 *total = COSTS_N_INSNS (!speed ? 7 : 113);
5574 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5575 }
5576 break;
5577
5578 default:
5579 return false;
5580 }
5581 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5582 return true;
5583
5584 case COMPARE:
5585 switch (GET_MODE (XEXP (x, 0)))
5586 {
5587 case QImode:
5588 *total = COSTS_N_INSNS (1);
5589 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5590 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5591 break;
5592
5593 case HImode:
5594 *total = COSTS_N_INSNS (2);
5595 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5596 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5597 else if (INTVAL (XEXP (x, 1)) != 0)
5598 *total += COSTS_N_INSNS (1);
5599 break;
5600
5601 case SImode:
5602 *total = COSTS_N_INSNS (4);
5603 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
5604 *total += avr_operand_rtx_cost (XEXP (x, 1), mode, code, speed);
5605 else if (INTVAL (XEXP (x, 1)) != 0)
5606 *total += COSTS_N_INSNS (3);
5607 break;
5608
5609 default:
5610 return false;
5611 }
5612 *total += avr_operand_rtx_cost (XEXP (x, 0), mode, code, speed);
5613 return true;
5614
5615 default:
5616 break;
5617 }
5618 return false;
5619 }
5620
5621 /* Calculate the cost of a memory address. */
5622
5623 static int
5624 avr_address_cost (rtx x, bool speed ATTRIBUTE_UNUSED)
5625 {
5626 if (GET_CODE (x) == PLUS
5627 && GET_CODE (XEXP (x,1)) == CONST_INT
5628 && (REG_P (XEXP (x,0)) || GET_CODE (XEXP (x,0)) == SUBREG)
5629 && INTVAL (XEXP (x,1)) >= 61)
5630 return 18;
5631 if (CONSTANT_ADDRESS_P (x))
5632 {
5633 if (optimize > 0 && io_address_operand (x, QImode))
5634 return 2;
5635 return 4;
5636 }
5637 return 4;
5638 }
5639
5640 /* Test for extra memory constraint 'Q'.
5641 It's a memory address based on Y or Z pointer with valid displacement. */
5642
5643 int
5644 extra_constraint_Q (rtx x)
5645 {
5646 if (GET_CODE (XEXP (x,0)) == PLUS
5647 && REG_P (XEXP (XEXP (x,0), 0))
5648 && GET_CODE (XEXP (XEXP (x,0), 1)) == CONST_INT
5649 && (INTVAL (XEXP (XEXP (x,0), 1))
5650 <= MAX_LD_OFFSET (GET_MODE (x))))
5651 {
5652 rtx xx = XEXP (XEXP (x,0), 0);
5653 int regno = REGNO (xx);
5654 if (TARGET_ALL_DEBUG)
5655 {
5656 fprintf (stderr, ("extra_constraint:\n"
5657 "reload_completed: %d\n"
5658 "reload_in_progress: %d\n"),
5659 reload_completed, reload_in_progress);
5660 debug_rtx (x);
5661 }
5662 if (regno >= FIRST_PSEUDO_REGISTER)
5663 return 1; /* allocate pseudos */
5664 else if (regno == REG_Z || regno == REG_Y)
5665 return 1; /* strictly check */
5666 else if (xx == frame_pointer_rtx
5667 || xx == arg_pointer_rtx)
5668 return 1; /* XXX frame & arg pointer checks */
5669 }
5670 return 0;
5671 }
5672
5673 /* Convert condition code CONDITION to the valid AVR condition code. */
5674
5675 RTX_CODE
5676 avr_normalize_condition (RTX_CODE condition)
5677 {
5678 switch (condition)
5679 {
5680 case GT:
5681 return GE;
5682 case GTU:
5683 return GEU;
5684 case LE:
5685 return LT;
5686 case LEU:
5687 return LTU;
5688 default:
5689 gcc_unreachable ();
5690 }
5691 }
5692
5693 /* This function optimizes conditional jumps. */
5694
5695 static void
5696 avr_reorg (void)
5697 {
5698 rtx insn, pattern;
5699
5700 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
5701 {
5702 if (! (GET_CODE (insn) == INSN
5703 || GET_CODE (insn) == CALL_INSN
5704 || GET_CODE (insn) == JUMP_INSN)
5705 || !single_set (insn))
5706 continue;
5707
5708 pattern = PATTERN (insn);
5709
5710 if (GET_CODE (pattern) == PARALLEL)
5711 pattern = XVECEXP (pattern, 0, 0);
5712 if (GET_CODE (pattern) == SET
5713 && SET_DEST (pattern) == cc0_rtx
5714 && compare_diff_p (insn))
5715 {
5716 if (GET_CODE (SET_SRC (pattern)) == COMPARE)
5717 {
5718 /* Now we work under compare insn. */
5719
5720 pattern = SET_SRC (pattern);
5721 if (true_regnum (XEXP (pattern,0)) >= 0
5722 && true_regnum (XEXP (pattern,1)) >= 0 )
5723 {
5724 rtx x = XEXP (pattern,0);
5725 rtx next = next_real_insn (insn);
5726 rtx pat = PATTERN (next);
5727 rtx src = SET_SRC (pat);
5728 rtx t = XEXP (src,0);
5729 PUT_CODE (t, swap_condition (GET_CODE (t)));
5730 XEXP (pattern,0) = XEXP (pattern,1);
5731 XEXP (pattern,1) = x;
5732 INSN_CODE (next) = -1;
5733 }
5734 else if (true_regnum (XEXP (pattern,0)) >= 0
5735 && GET_CODE (XEXP (pattern,1)) == CONST_INT)
5736 {
5737 rtx x = XEXP (pattern,1);
5738 rtx next = next_real_insn (insn);
5739 rtx pat = PATTERN (next);
5740 rtx src = SET_SRC (pat);
5741 rtx t = XEXP (src,0);
5742 enum machine_mode mode = GET_MODE (XEXP (pattern, 0));
5743
5744 if (avr_simplify_comparison_p (mode, GET_CODE (t), x))
5745 {
5746 XEXP (pattern, 1) = gen_int_mode (INTVAL (x) + 1, mode);
5747 PUT_CODE (t, avr_normalize_condition (GET_CODE (t)));
5748 INSN_CODE (next) = -1;
5749 INSN_CODE (insn) = -1;
5750 }
5751 }
5752 }
5753 else if (true_regnum (SET_SRC (pattern)) >= 0)
5754 {
5755 /* This is a tst insn */
5756 rtx next = next_real_insn (insn);
5757 rtx pat = PATTERN (next);
5758 rtx src = SET_SRC (pat);
5759 rtx t = XEXP (src,0);
5760
5761 PUT_CODE (t, swap_condition (GET_CODE (t)));
5762 SET_SRC (pattern) = gen_rtx_COMPARE (GET_MODE (SET_SRC (pattern)), const0_rtx,
5763 SET_SRC (pattern));
5764 INSN_CODE (next) = -1;
5765 INSN_CODE (insn) = -1;
5766 }
5767 }
5768 }
5769 }
5770
5771 /* Returns register number for function return value.*/
5772
5773 int
5774 avr_ret_register (void)
5775 {
5776 return 24;
5777 }
5778
5779 /* Create an RTX representing the place where a
5780 library function returns a value of mode MODE. */
5781
5782 rtx
5783 avr_libcall_value (enum machine_mode mode)
5784 {
5785 int offs = GET_MODE_SIZE (mode);
5786 if (offs < 2)
5787 offs = 2;
5788 return gen_rtx_REG (mode, RET_REGISTER + 2 - offs);
5789 }
5790
5791 /* Create an RTX representing the place where a
5792 function returns a value of data type VALTYPE. */
5793
5794 rtx
5795 avr_function_value (const_tree type,
5796 const_tree func ATTRIBUTE_UNUSED,
5797 bool outgoing ATTRIBUTE_UNUSED)
5798 {
5799 unsigned int offs;
5800
5801 if (TYPE_MODE (type) != BLKmode)
5802 return avr_libcall_value (TYPE_MODE (type));
5803
5804 offs = int_size_in_bytes (type);
5805 if (offs < 2)
5806 offs = 2;
5807 if (offs > 2 && offs < GET_MODE_SIZE (SImode))
5808 offs = GET_MODE_SIZE (SImode);
5809 else if (offs > GET_MODE_SIZE (SImode) && offs < GET_MODE_SIZE (DImode))
5810 offs = GET_MODE_SIZE (DImode);
5811
5812 return gen_rtx_REG (BLKmode, RET_REGISTER + 2 - offs);
5813 }
5814
5815 /* Places additional restrictions on the register class to
5816 use when it is necessary to copy value X into a register
5817 in class CLASS. */
5818
5819 enum reg_class
5820 preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class rclass)
5821 {
5822 return rclass;
5823 }
5824
5825 int
5826 test_hard_reg_class (enum reg_class rclass, rtx x)
5827 {
5828 int regno = true_regnum (x);
5829 if (regno < 0)
5830 return 0;
5831
5832 if (TEST_HARD_REG_CLASS (rclass, regno))
5833 return 1;
5834
5835 return 0;
5836 }
5837
5838
5839 int
5840 jump_over_one_insn_p (rtx insn, rtx dest)
5841 {
5842 int uid = INSN_UID (GET_CODE (dest) == LABEL_REF
5843 ? XEXP (dest, 0)
5844 : dest);
5845 int jump_addr = INSN_ADDRESSES (INSN_UID (insn));
5846 int dest_addr = INSN_ADDRESSES (uid);
5847 return dest_addr - jump_addr == get_attr_length (insn) + 1;
5848 }
5849
5850 /* Returns 1 if a value of mode MODE can be stored starting with hard
5851 register number REGNO. On the enhanced core, anything larger than
5852 1 byte must start in even numbered register for "movw" to work
5853 (this way we don't have to check for odd registers everywhere). */
5854
5855 int
5856 avr_hard_regno_mode_ok (int regno, enum machine_mode mode)
5857 {
5858 /* Disallow QImode in stack pointer regs. */
5859 if ((regno == REG_SP || regno == (REG_SP + 1)) && mode == QImode)
5860 return 0;
5861
5862 /* The only thing that can go into registers r28:r29 is a Pmode. */
5863 if (regno == REG_Y && mode == Pmode)
5864 return 1;
5865
5866 /* Otherwise disallow all regno/mode combinations that span r28:r29. */
5867 if (regno <= (REG_Y + 1) && (regno + GET_MODE_SIZE (mode)) >= (REG_Y + 1))
5868 return 0;
5869
5870 if (mode == QImode)
5871 return 1;
5872
5873 /* Modes larger than QImode occupy consecutive registers. */
5874 if (regno + GET_MODE_SIZE (mode) > FIRST_PSEUDO_REGISTER)
5875 return 0;
5876
5877 /* All modes larger than QImode should start in an even register. */
5878 return !(regno & 1);
5879 }
5880
5881 const char *
5882 output_reload_inhi (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5883 {
5884 int tmp;
5885 if (!len)
5886 len = &tmp;
5887
5888 if (GET_CODE (operands[1]) == CONST_INT)
5889 {
5890 int val = INTVAL (operands[1]);
5891 if ((val & 0xff) == 0)
5892 {
5893 *len = 3;
5894 return (AS2 (mov,%A0,__zero_reg__) CR_TAB
5895 AS2 (ldi,%2,hi8(%1)) CR_TAB
5896 AS2 (mov,%B0,%2));
5897 }
5898 else if ((val & 0xff00) == 0)
5899 {
5900 *len = 3;
5901 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5902 AS2 (mov,%A0,%2) CR_TAB
5903 AS2 (mov,%B0,__zero_reg__));
5904 }
5905 else if ((val & 0xff) == ((val & 0xff00) >> 8))
5906 {
5907 *len = 3;
5908 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5909 AS2 (mov,%A0,%2) CR_TAB
5910 AS2 (mov,%B0,%2));
5911 }
5912 }
5913 *len = 4;
5914 return (AS2 (ldi,%2,lo8(%1)) CR_TAB
5915 AS2 (mov,%A0,%2) CR_TAB
5916 AS2 (ldi,%2,hi8(%1)) CR_TAB
5917 AS2 (mov,%B0,%2));
5918 }
5919
5920
5921 const char *
5922 output_reload_insisf (rtx insn ATTRIBUTE_UNUSED, rtx *operands, int *len)
5923 {
5924 rtx src = operands[1];
5925 int cnst = (GET_CODE (src) == CONST_INT);
5926
5927 if (len)
5928 {
5929 if (cnst)
5930 *len = 4 + ((INTVAL (src) & 0xff) != 0)
5931 + ((INTVAL (src) & 0xff00) != 0)
5932 + ((INTVAL (src) & 0xff0000) != 0)
5933 + ((INTVAL (src) & 0xff000000) != 0);
5934 else
5935 *len = 8;
5936
5937 return "";
5938 }
5939
5940 if (cnst && ((INTVAL (src) & 0xff) == 0))
5941 output_asm_insn (AS2 (mov, %A0, __zero_reg__), operands);
5942 else
5943 {
5944 output_asm_insn (AS2 (ldi, %2, lo8(%1)), operands);
5945 output_asm_insn (AS2 (mov, %A0, %2), operands);
5946 }
5947 if (cnst && ((INTVAL (src) & 0xff00) == 0))
5948 output_asm_insn (AS2 (mov, %B0, __zero_reg__), operands);
5949 else
5950 {
5951 output_asm_insn (AS2 (ldi, %2, hi8(%1)), operands);
5952 output_asm_insn (AS2 (mov, %B0, %2), operands);
5953 }
5954 if (cnst && ((INTVAL (src) & 0xff0000) == 0))
5955 output_asm_insn (AS2 (mov, %C0, __zero_reg__), operands);
5956 else
5957 {
5958 output_asm_insn (AS2 (ldi, %2, hlo8(%1)), operands);
5959 output_asm_insn (AS2 (mov, %C0, %2), operands);
5960 }
5961 if (cnst && ((INTVAL (src) & 0xff000000) == 0))
5962 output_asm_insn (AS2 (mov, %D0, __zero_reg__), operands);
5963 else
5964 {
5965 output_asm_insn (AS2 (ldi, %2, hhi8(%1)), operands);
5966 output_asm_insn (AS2 (mov, %D0, %2), operands);
5967 }
5968 return "";
5969 }
5970
5971 void
5972 avr_output_bld (rtx operands[], int bit_nr)
5973 {
5974 static char s[] = "bld %A0,0";
5975
5976 s[5] = 'A' + (bit_nr >> 3);
5977 s[8] = '0' + (bit_nr & 7);
5978 output_asm_insn (s, operands);
5979 }
5980
5981 void
5982 avr_output_addr_vec_elt (FILE *stream, int value)
5983 {
5984 switch_to_section (progmem_section);
5985 if (AVR_HAVE_JMP_CALL)
5986 fprintf (stream, "\t.word gs(.L%d)\n", value);
5987 else
5988 fprintf (stream, "\trjmp .L%d\n", value);
5989 }
5990
5991 /* Returns true if SCRATCH are safe to be allocated as a scratch
5992 registers (for a define_peephole2) in the current function. */
5993
5994 bool
5995 avr_hard_regno_scratch_ok (unsigned int regno)
5996 {
5997 /* Interrupt functions can only use registers that have already been saved
5998 by the prologue, even if they would normally be call-clobbered. */
5999
6000 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6001 && !df_regs_ever_live_p (regno))
6002 return false;
6003
6004 return true;
6005 }
6006
6007 /* Return nonzero if register OLD_REG can be renamed to register NEW_REG. */
6008
6009 int
6010 avr_hard_regno_rename_ok (unsigned int old_reg ATTRIBUTE_UNUSED,
6011 unsigned int new_reg)
6012 {
6013 /* Interrupt functions can only use registers that have already been
6014 saved by the prologue, even if they would normally be
6015 call-clobbered. */
6016
6017 if ((cfun->machine->is_interrupt || cfun->machine->is_signal)
6018 && !df_regs_ever_live_p (new_reg))
6019 return 0;
6020
6021 return 1;
6022 }
6023
6024 /* Output a branch that tests a single bit of a register (QI, HI or SImode)
6025 or memory location in the I/O space (QImode only).
6026
6027 Operand 0: comparison operator (must be EQ or NE, compare bit to zero).
6028 Operand 1: register operand to test, or CONST_INT memory address.
6029 Operand 2: bit number (for QImode operand) or mask (HImode, SImode).
6030 Operand 3: label to jump to if the test is true. */
6031
6032 const char *
6033 avr_out_sbxx_branch (rtx insn, rtx operands[])
6034 {
6035 enum rtx_code comp = GET_CODE (operands[0]);
6036 int long_jump = (get_attr_length (insn) >= 4);
6037 int reverse = long_jump || jump_over_one_insn_p (insn, operands[3]);
6038
6039 if (comp == GE)
6040 comp = EQ;
6041 else if (comp == LT)
6042 comp = NE;
6043
6044 if (reverse)
6045 comp = reverse_condition (comp);
6046
6047 if (GET_CODE (operands[1]) == CONST_INT)
6048 {
6049 if (INTVAL (operands[1]) < 0x40)
6050 {
6051 if (comp == EQ)
6052 output_asm_insn (AS2 (sbis,%1-0x20,%2), operands);
6053 else
6054 output_asm_insn (AS2 (sbic,%1-0x20,%2), operands);
6055 }
6056 else
6057 {
6058 output_asm_insn (AS2 (in,__tmp_reg__,%1-0x20), operands);
6059 if (comp == EQ)
6060 output_asm_insn (AS2 (sbrs,__tmp_reg__,%2), operands);
6061 else
6062 output_asm_insn (AS2 (sbrc,__tmp_reg__,%2), operands);
6063 }
6064 }
6065 else /* GET_CODE (operands[1]) == REG */
6066 {
6067 if (GET_MODE (operands[1]) == QImode)
6068 {
6069 if (comp == EQ)
6070 output_asm_insn (AS2 (sbrs,%1,%2), operands);
6071 else
6072 output_asm_insn (AS2 (sbrc,%1,%2), operands);
6073 }
6074 else /* HImode or SImode */
6075 {
6076 static char buf[] = "sbrc %A1,0";
6077 int bit_nr = exact_log2 (INTVAL (operands[2])
6078 & GET_MODE_MASK (GET_MODE (operands[1])));
6079
6080 buf[3] = (comp == EQ) ? 's' : 'c';
6081 buf[6] = 'A' + (bit_nr >> 3);
6082 buf[9] = '0' + (bit_nr & 7);
6083 output_asm_insn (buf, operands);
6084 }
6085 }
6086
6087 if (long_jump)
6088 return (AS1 (rjmp,.+4) CR_TAB
6089 AS1 (jmp,%3));
6090 if (!reverse)
6091 return AS1 (rjmp,%3);
6092 return "";
6093 }
6094
6095 /* Worker function for TARGET_ASM_CONSTRUCTOR. */
6096
6097 static void
6098 avr_asm_out_ctor (rtx symbol, int priority)
6099 {
6100 fputs ("\t.global __do_global_ctors\n", asm_out_file);
6101 default_ctor_section_asm_out_constructor (symbol, priority);
6102 }
6103
6104 /* Worker function for TARGET_ASM_DESTRUCTOR. */
6105
6106 static void
6107 avr_asm_out_dtor (rtx symbol, int priority)
6108 {
6109 fputs ("\t.global __do_global_dtors\n", asm_out_file);
6110 default_dtor_section_asm_out_destructor (symbol, priority);
6111 }
6112
6113 /* Worker function for TARGET_RETURN_IN_MEMORY. */
6114
6115 static bool
6116 avr_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6117 {
6118 if (TYPE_MODE (type) == BLKmode)
6119 {
6120 HOST_WIDE_INT size = int_size_in_bytes (type);
6121 return (size == -1 || size > 8);
6122 }
6123 else
6124 return false;
6125 }
6126
6127 #include "gt-avr.h"