Mercurial > hg > CbC > CbC_gcc
comparison gcc/config/score/score.c @ 0:a06113de4d67
first commit
author | kent <kent@cr.ie.u-ryukyu.ac.jp> |
---|---|
date | Fri, 17 Jul 2009 14:47:48 +0900 |
parents | |
children | 77e2b8dfacca |
comparison
equal
deleted
inserted
replaced
-1:000000000000 | 0:a06113de4d67 |
---|---|
1 /* Output routines for Sunplus S+CORE processor | |
2 Copyright (C) 2005, 2007, 2008 Free Software Foundation, Inc. | |
3 Contributed by Sunnorth. | |
4 | |
5 This file is part of GCC. | |
6 | |
7 GCC is free software; you can redistribute it and/or modify it | |
8 under the terms of the GNU General Public License as published | |
9 by the Free Software Foundation; either version 3, or (at your | |
10 option) any later version. | |
11 | |
12 GCC is distributed in the hope that it will be useful, but WITHOUT | |
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY | |
14 or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public | |
15 License for more details. | |
16 | |
17 You should have received a copy of the GNU General Public License | |
18 along with GCC; see the file COPYING3. If not see | |
19 <http://www.gnu.org/licenses/>. */ | |
20 | |
21 #include "config.h" | |
22 #include "system.h" | |
23 #include "coretypes.h" | |
24 #include "tm.h" | |
25 #include "rtl.h" | |
26 #include "regs.h" | |
27 #include "hard-reg-set.h" | |
28 #include "real.h" | |
29 #include "insn-config.h" | |
30 #include "conditions.h" | |
31 #include "insn-attr.h" | |
32 #include "recog.h" | |
33 #include "toplev.h" | |
34 #include "output.h" | |
35 #include "tree.h" | |
36 #include "function.h" | |
37 #include "expr.h" | |
38 #include "optabs.h" | |
39 #include "flags.h" | |
40 #include "reload.h" | |
41 #include "tm_p.h" | |
42 #include "ggc.h" | |
43 #include "gstab.h" | |
44 #include "hashtab.h" | |
45 #include "debug.h" | |
46 #include "target.h" | |
47 #include "target-def.h" | |
48 #include "integrate.h" | |
49 #include "langhooks.h" | |
50 #include "score7.h" | |
51 #include "score3.h" | |
52 #include "df.h" | |
53 | |
54 #undef TARGET_ASM_FILE_START | |
55 #define TARGET_ASM_FILE_START score_asm_file_start | |
56 | |
57 #undef TARGET_ASM_FILE_END | |
58 #define TARGET_ASM_FILE_END score_asm_file_end | |
59 | |
60 #undef TARGET_ASM_FUNCTION_PROLOGUE | |
61 #define TARGET_ASM_FUNCTION_PROLOGUE score_function_prologue | |
62 | |
63 #undef TARGET_ASM_FUNCTION_EPILOGUE | |
64 #define TARGET_ASM_FUNCTION_EPILOGUE score_function_epilogue | |
65 | |
66 #undef TARGET_DEFAULT_TARGET_FLAGS | |
67 #define TARGET_DEFAULT_TARGET_FLAGS TARGET_DEFAULT | |
68 #undef TARGET_HANDLE_OPTION | |
69 #define TARGET_HANDLE_OPTION score_handle_option | |
70 | |
71 #undef TARGET_SCHED_ISSUE_RATE | |
72 #define TARGET_SCHED_ISSUE_RATE score_issue_rate | |
73 | |
74 #undef TARGET_ASM_SELECT_RTX_SECTION | |
75 #define TARGET_ASM_SELECT_RTX_SECTION score_select_rtx_section | |
76 | |
77 #undef TARGET_IN_SMALL_DATA_P | |
78 #define TARGET_IN_SMALL_DATA_P score_in_small_data_p | |
79 | |
80 #undef TARGET_FUNCTION_OK_FOR_SIBCALL | |
81 #define TARGET_FUNCTION_OK_FOR_SIBCALL score_function_ok_for_sibcall | |
82 | |
83 #undef TARGET_STRICT_ARGUMENT_NAMING | |
84 #define TARGET_STRICT_ARGUMENT_NAMING hook_bool_CUMULATIVE_ARGS_true | |
85 | |
86 #undef TARGET_ASM_OUTPUT_MI_THUNK | |
87 #define TARGET_ASM_OUTPUT_MI_THUNK score_output_mi_thunk | |
88 | |
89 #undef TARGET_PROMOTE_FUNCTION_ARGS | |
90 #define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true | |
91 | |
92 #undef TARGET_PROMOTE_FUNCTION_RETURN | |
93 #define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true | |
94 | |
95 #undef TARGET_PROMOTE_PROTOTYPES | |
96 #define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true | |
97 | |
98 #undef TARGET_MUST_PASS_IN_STACK | |
99 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size | |
100 | |
101 #undef TARGET_ARG_PARTIAL_BYTES | |
102 #define TARGET_ARG_PARTIAL_BYTES score_arg_partial_bytes | |
103 | |
104 #undef TARGET_PASS_BY_REFERENCE | |
105 #define TARGET_PASS_BY_REFERENCE score_pass_by_reference | |
106 | |
107 #undef TARGET_RETURN_IN_MEMORY | |
108 #define TARGET_RETURN_IN_MEMORY score_return_in_memory | |
109 | |
110 #undef TARGET_RTX_COSTS | |
111 #define TARGET_RTX_COSTS score_rtx_costs | |
112 | |
113 #undef TARGET_ADDRESS_COST | |
114 #define TARGET_ADDRESS_COST score_address_cost | |
115 | |
116 struct extern_list *extern_head = 0; | |
117 rtx cmp_op0, cmp_op1; | |
118 | |
119 /* default 0 = NO_REGS */ | |
120 enum reg_class score_char_to_class[256]; | |
121 | |
122 /* Implement TARGET_RETURN_IN_MEMORY. In S+core, | |
123 small structures are returned in a register. | |
124 Objects with varying size must still be returned in memory. */ | |
125 static bool | |
126 score_return_in_memory (tree type, tree fndecl ATTRIBUTE_UNUSED) | |
127 { | |
128 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
129 return score7_return_in_memory (type, fndecl); | |
130 else if (TARGET_SCORE3) | |
131 return score3_return_in_memory (type, fndecl); | |
132 | |
133 gcc_unreachable (); | |
134 } | |
135 | |
136 /* Return nonzero when an argument must be passed by reference. */ | |
137 static bool | |
138 score_pass_by_reference (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED, | |
139 enum machine_mode mode, tree type, | |
140 bool named ATTRIBUTE_UNUSED) | |
141 { | |
142 /* If we have a variable-sized parameter, we have no choice. */ | |
143 return targetm.calls.must_pass_in_stack (mode, type); | |
144 } | |
145 | |
146 /* Implement TARGET_ASM_OUTPUT_MI_THUNK. Generate rtl rather than asm text | |
147 in order to avoid duplicating too much logic from elsewhere. */ | |
148 static void | |
149 score_output_mi_thunk (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED, | |
150 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset, | |
151 tree function) | |
152 { | |
153 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
154 return score7_output_mi_thunk (file, thunk_fndecl, delta, | |
155 vcall_offset, function); | |
156 else if (TARGET_SCORE3) | |
157 return score3_output_mi_thunk (file, thunk_fndecl, delta, | |
158 vcall_offset, function); | |
159 gcc_unreachable (); | |
160 } | |
161 | |
162 /* Implement TARGET_FUNCTION_OK_FOR_SIBCALL. */ | |
163 static bool | |
164 score_function_ok_for_sibcall (ATTRIBUTE_UNUSED tree decl, | |
165 ATTRIBUTE_UNUSED tree exp) | |
166 { | |
167 return true; | |
168 } | |
169 | |
170 /* Set up the stack and frame (if desired) for the function. */ | |
171 static void | |
172 score_function_prologue (FILE *file, HOST_WIDE_INT size ATTRIBUTE_UNUSED) | |
173 { | |
174 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
175 return score7_function_prologue (file, size); | |
176 else if (TARGET_SCORE3) | |
177 return score3_function_prologue (file, size); | |
178 | |
179 gcc_unreachable (); | |
180 } | |
181 | |
182 /* Do any necessary cleanup after a function to restore stack, frame, | |
183 and regs. */ | |
184 static void | |
185 score_function_epilogue (FILE *file, | |
186 HOST_WIDE_INT size ATTRIBUTE_UNUSED) | |
187 { | |
188 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
189 return score7_function_epilogue (file, size); | |
190 else if (TARGET_SCORE3) | |
191 return score3_function_epilogue (file, size); | |
192 | |
193 gcc_unreachable (); | |
194 } | |
195 | |
196 /* Implement TARGET_SCHED_ISSUE_RATE. */ | |
197 static int | |
198 score_issue_rate (void) | |
199 { | |
200 return 1; | |
201 } | |
202 | |
203 /* Choose the section to use for the constant rtx expression X that has | |
204 mode MODE. */ | |
205 static section * | |
206 score_select_rtx_section (enum machine_mode mode, rtx x, | |
207 unsigned HOST_WIDE_INT align) | |
208 { | |
209 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
210 return score7_select_rtx_section (mode, x, align); | |
211 else if (TARGET_SCORE3) | |
212 return score3_select_rtx_section (mode, x, align); | |
213 | |
214 gcc_unreachable (); | |
215 } | |
216 | |
217 /* Implement TARGET_IN_SMALL_DATA_P. */ | |
218 static bool | |
219 score_in_small_data_p (tree decl) | |
220 { | |
221 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
222 return score7_in_small_data_p (decl); | |
223 else if (TARGET_SCORE3) | |
224 return score3_in_small_data_p (decl); | |
225 | |
226 gcc_unreachable (); | |
227 } | |
228 | |
229 /* Implement TARGET_ASM_FILE_START. */ | |
230 static void | |
231 score_asm_file_start (void) | |
232 { | |
233 if (TARGET_SCORE5) | |
234 fprintf (asm_out_file, "# Sunplus S+core5 %s rev=%s\n", | |
235 TARGET_LITTLE_ENDIAN ? "el" : "eb", SCORE_GCC_VERSION); | |
236 else if (TARGET_SCORE5U) | |
237 fprintf (asm_out_file, "# Sunplus S+core5u %s rev=%s\n", | |
238 TARGET_LITTLE_ENDIAN ? "el" : "eb", SCORE_GCC_VERSION); | |
239 else if (TARGET_SCORE7D) | |
240 fprintf (asm_out_file, "# Sunplus S+core7d %s rev=%s\n", | |
241 TARGET_LITTLE_ENDIAN ? "el" : "eb", SCORE_GCC_VERSION); | |
242 else if (TARGET_SCORE7) | |
243 fprintf (asm_out_file, "# Sunplus S+core7 %s rev=%s\n", | |
244 TARGET_LITTLE_ENDIAN ? "el" : "eb", SCORE_GCC_VERSION); | |
245 else if (TARGET_SCORE3D) | |
246 fprintf (asm_out_file, "# Sunplus S+core3d %s rev=%s\n", | |
247 TARGET_LITTLE_ENDIAN ? "el" : "eb", SCORE_GCC_VERSION); | |
248 else if (TARGET_SCORE3) | |
249 fprintf (asm_out_file, "# Sunplus S+core3 %s rev=%s\n", | |
250 TARGET_LITTLE_ENDIAN ? "el" : "eb", SCORE_GCC_VERSION); | |
251 else | |
252 fprintf (asm_out_file, "# Sunplus S+core unknown %s rev=%s\n", | |
253 TARGET_LITTLE_ENDIAN ? "el" : "eb", SCORE_GCC_VERSION); | |
254 | |
255 default_file_start (); | |
256 | |
257 if (flag_pic) | |
258 fprintf (asm_out_file, "\t.set pic\n"); | |
259 } | |
260 | |
261 /* Implement TARGET_ASM_FILE_END. When using assembler macros, emit | |
262 .externs for any small-data variables that turned out to be external. */ | |
263 static void | |
264 score_asm_file_end (void) | |
265 { | |
266 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
267 return score7_asm_file_end (); | |
268 else if (TARGET_SCORE3) | |
269 return score3_asm_file_end (); | |
270 | |
271 gcc_unreachable (); | |
272 } | |
273 | |
274 #define MASK_ALL_CPU_BITS \ | |
275 (MASK_SCORE5 | MASK_SCORE5U | MASK_SCORE7 | MASK_SCORE7D \ | |
276 | MASK_SCORE3 | MASK_SCORE3D) | |
277 | |
278 /* Implement TARGET_HANDLE_OPTION. */ | |
279 static bool | |
280 score_handle_option (size_t code, const char *arg, int value ATTRIBUTE_UNUSED) | |
281 { | |
282 switch (code) | |
283 { | |
284 case OPT_mscore7d: | |
285 target_flags &= ~(MASK_ALL_CPU_BITS); | |
286 target_flags |= MASK_SCORE7 | MASK_SCORE7D; | |
287 return true; | |
288 | |
289 case OPT_mscore3d: | |
290 target_flags &= ~(MASK_ALL_CPU_BITS); | |
291 target_flags |= MASK_SCORE3 | MASK_SCORE3D; | |
292 return true; | |
293 | |
294 case OPT_march_: | |
295 if (strcmp (arg, "score5") == 0) | |
296 { | |
297 target_flags &= ~(MASK_ALL_CPU_BITS); | |
298 target_flags |= MASK_SCORE5; | |
299 return true; | |
300 } | |
301 else if (strcmp (arg, "score5u") == 0) | |
302 { | |
303 target_flags &= ~(MASK_ALL_CPU_BITS); | |
304 target_flags |= MASK_SCORE5U; | |
305 return true; | |
306 } | |
307 else if (strcmp (arg, "score7") == 0) | |
308 { | |
309 target_flags &= ~(MASK_ALL_CPU_BITS); | |
310 target_flags |= MASK_SCORE7; | |
311 return true; | |
312 } | |
313 else if (strcmp (arg, "score7d") == 0) | |
314 { | |
315 target_flags &= ~(MASK_ALL_CPU_BITS); | |
316 target_flags |= MASK_SCORE7 | MASK_SCORE7D; | |
317 return true; | |
318 } | |
319 else if (strcmp (arg, "score3") == 0) | |
320 { | |
321 target_flags &= ~(MASK_ALL_CPU_BITS); | |
322 target_flags |= MASK_SCORE3; | |
323 return true; | |
324 } | |
325 else if (strcmp (arg, "score3d") == 0) | |
326 { | |
327 target_flags &= ~(MASK_ALL_CPU_BITS); | |
328 target_flags |= MASK_SCORE3 | MASK_SCORE3D; | |
329 return true; | |
330 } | |
331 else | |
332 return false; | |
333 | |
334 default: | |
335 return true; | |
336 } | |
337 } | |
338 | |
339 /* Implement OVERRIDE_OPTIONS macro. */ | |
340 void | |
341 score_override_options (void) | |
342 { | |
343 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
344 return score7_override_options (); | |
345 else if (TARGET_SCORE3) | |
346 return score3_override_options (); | |
347 | |
348 return score7_override_options (); | |
349 } | |
350 | |
351 /* Implement REGNO_REG_CLASS macro. */ | |
352 int | |
353 score_reg_class (int regno) | |
354 { | |
355 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
356 return score7_reg_class (regno); | |
357 else if (TARGET_SCORE3) | |
358 return score3_reg_class (regno); | |
359 | |
360 gcc_unreachable (); | |
361 } | |
362 | |
363 /* Implement PREFERRED_RELOAD_CLASS macro. */ | |
364 enum reg_class | |
365 score_preferred_reload_class (rtx x ATTRIBUTE_UNUSED, enum reg_class rclass) | |
366 { | |
367 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
368 return score7_preferred_reload_class (x, rclass); | |
369 else if (TARGET_SCORE3) | |
370 return score3_preferred_reload_class (x, rclass); | |
371 | |
372 gcc_unreachable (); | |
373 } | |
374 | |
375 /* Implement SECONDARY_INPUT_RELOAD_CLASS | |
376 and SECONDARY_OUTPUT_RELOAD_CLASS macro. */ | |
377 enum reg_class | |
378 score_secondary_reload_class (enum reg_class rclass, | |
379 enum machine_mode mode ATTRIBUTE_UNUSED, | |
380 rtx x) | |
381 { | |
382 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
383 return score7_secondary_reload_class (rclass, mode, x); | |
384 else if (TARGET_SCORE3) | |
385 return score3_secondary_reload_class (rclass, mode, x); | |
386 | |
387 gcc_unreachable (); | |
388 } | |
389 | |
390 /* Implement CONST_OK_FOR_LETTER_P macro. */ | |
391 int | |
392 score_const_ok_for_letter_p (HOST_WIDE_INT value, char c) | |
393 { | |
394 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
395 return score7_const_ok_for_letter_p (value, c); | |
396 else if (TARGET_SCORE3) | |
397 return score3_const_ok_for_letter_p (value, c); | |
398 | |
399 gcc_unreachable (); | |
400 } | |
401 | |
402 /* Implement EXTRA_CONSTRAINT macro. */ | |
403 int | |
404 score_extra_constraint (rtx op, char c) | |
405 { | |
406 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
407 return score7_extra_constraint (op, c); | |
408 else if (TARGET_SCORE3) | |
409 return score3_extra_constraint (op, c); | |
410 | |
411 gcc_unreachable (); | |
412 } | |
413 | |
414 /* Return truth value on whether or not a given hard register | |
415 can support a given mode. */ | |
416 int | |
417 score_hard_regno_mode_ok (unsigned int regno, enum machine_mode mode) | |
418 { | |
419 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
420 return score7_hard_regno_mode_ok (regno, mode); | |
421 else if (TARGET_SCORE3) | |
422 return score3_hard_regno_mode_ok (regno, mode); | |
423 | |
424 gcc_unreachable (); | |
425 } | |
426 | |
427 /* Implement INITIAL_ELIMINATION_OFFSET. FROM is either the frame | |
428 pointer or argument pointer. TO is either the stack pointer or | |
429 hard frame pointer. */ | |
430 HOST_WIDE_INT | |
431 score_initial_elimination_offset (int from, | |
432 int to ATTRIBUTE_UNUSED) | |
433 { | |
434 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
435 return score7_initial_elimination_offset (from, to); | |
436 else if (TARGET_SCORE3) | |
437 return score3_initial_elimination_offset (from, to); | |
438 | |
439 gcc_unreachable (); | |
440 } | |
441 | |
442 /* Argument support functions. */ | |
443 | |
444 /* Initialize CUMULATIVE_ARGS for a function. */ | |
445 void | |
446 score_init_cumulative_args (CUMULATIVE_ARGS *cum, | |
447 tree fntype ATTRIBUTE_UNUSED, | |
448 rtx libname ATTRIBUTE_UNUSED) | |
449 { | |
450 memset (cum, 0, sizeof (CUMULATIVE_ARGS)); | |
451 } | |
452 | |
453 /* Implement FUNCTION_ARG_ADVANCE macro. */ | |
454 void | |
455 score_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, | |
456 tree type, int named) | |
457 { | |
458 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
459 return score7_function_arg_advance (cum, mode, type, named); | |
460 else if (TARGET_SCORE3) | |
461 return score3_function_arg_advance (cum, mode, type, named); | |
462 | |
463 gcc_unreachable (); | |
464 } | |
465 | |
466 /* Implement TARGET_ARG_PARTIAL_BYTES macro. */ | |
467 int | |
468 score_arg_partial_bytes (CUMULATIVE_ARGS *cum, | |
469 enum machine_mode mode, tree type, bool named) | |
470 { | |
471 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
472 return score7_arg_partial_bytes (cum, mode, type, named); | |
473 else if (TARGET_SCORE3) | |
474 return score3_arg_partial_bytes (cum, mode, type, named); | |
475 | |
476 gcc_unreachable (); | |
477 } | |
478 | |
479 /* Implement FUNCTION_ARG macro. */ | |
480 rtx | |
481 score_function_arg (const CUMULATIVE_ARGS *cum, enum machine_mode mode, | |
482 tree type, int named) | |
483 { | |
484 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
485 return score7_function_arg (cum, mode, type, named); | |
486 else if (TARGET_SCORE3) | |
487 return score3_function_arg (cum, mode, type, named); | |
488 | |
489 gcc_unreachable (); | |
490 } | |
491 | |
492 /* Implement FUNCTION_VALUE and LIBCALL_VALUE. For normal calls, | |
493 VALTYPE is the return type and MODE is VOIDmode. For libcalls, | |
494 VALTYPE is null and MODE is the mode of the return value. */ | |
495 rtx | |
496 score_function_value (tree valtype, tree func ATTRIBUTE_UNUSED, | |
497 enum machine_mode mode) | |
498 { | |
499 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
500 return score7_function_value (valtype, func, mode); | |
501 else if (TARGET_SCORE3) | |
502 return score3_function_value (valtype, func, mode); | |
503 | |
504 gcc_unreachable (); | |
505 } | |
506 | |
507 /* Implement INITIALIZE_TRAMPOLINE macro. */ | |
508 void | |
509 score_initialize_trampoline (rtx ADDR, rtx FUNC, rtx CHAIN) | |
510 { | |
511 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
512 return score7_initialize_trampoline (ADDR, FUNC, CHAIN); | |
513 else if (TARGET_SCORE3) | |
514 return score3_initialize_trampoline (ADDR, FUNC, CHAIN); | |
515 | |
516 gcc_unreachable (); | |
517 } | |
518 | |
519 /* This function is used to implement REG_MODE_OK_FOR_BASE_P macro. */ | |
520 int | |
521 score_regno_mode_ok_for_base_p (int regno, int strict) | |
522 { | |
523 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
524 return score7_regno_mode_ok_for_base_p (regno, strict); | |
525 else if (TARGET_SCORE3) | |
526 return score3_regno_mode_ok_for_base_p (regno, strict); | |
527 | |
528 gcc_unreachable (); | |
529 } | |
530 | |
531 /* Implement GO_IF_LEGITIMATE_ADDRESS macro. */ | |
532 int | |
533 score_address_p (enum machine_mode mode, rtx x, int strict) | |
534 { | |
535 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
536 return score7_address_p (mode, x, strict); | |
537 else if (TARGET_SCORE3) | |
538 return score3_address_p (mode, x, strict); | |
539 | |
540 gcc_unreachable (); | |
541 } | |
542 | |
543 /* This function is used to implement LEGITIMIZE_ADDRESS. If *XLOC can | |
544 be legitimized in a way that the generic machinery might not expect, | |
545 put the new address in *XLOC and return true. */ | |
546 int | |
547 score_legitimize_address (rtx *xloc) | |
548 { | |
549 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
550 return score7_legitimize_address (xloc); | |
551 else if (TARGET_SCORE3) | |
552 return score3_legitimize_address (xloc); | |
553 | |
554 gcc_unreachable (); | |
555 } | |
556 | |
557 /* Return a number assessing the cost of moving a register in class | |
558 FROM to class TO. */ | |
559 int | |
560 score_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED, | |
561 enum reg_class from, enum reg_class to) | |
562 { | |
563 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
564 return score7_register_move_cost (mode, from, to); | |
565 else if (TARGET_SCORE3) | |
566 return score3_register_move_cost (mode, from, to); | |
567 | |
568 gcc_unreachable (); | |
569 } | |
570 | |
571 /* Implement TARGET_RTX_COSTS macro. */ | |
572 bool | |
573 score_rtx_costs (rtx x, int code, int outer_code, int *total, | |
574 bool speed ATTRIBUTE_UNUSED) | |
575 { | |
576 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
577 return score7_rtx_costs (x, code, outer_code, total, speed); | |
578 else if (TARGET_SCORE3) | |
579 return score3_rtx_costs (x, code, outer_code, total, speed); | |
580 | |
581 gcc_unreachable (); | |
582 } | |
583 | |
584 /* Implement TARGET_ADDRESS_COST macro. */ | |
585 int | |
586 score_address_cost (rtx addr, | |
587 bool speed ATTRIBUTE_UNUSED) | |
588 { | |
589 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
590 return score7_address_cost (addr); | |
591 else if (TARGET_SCORE3) | |
592 return score3_address_cost (addr); | |
593 | |
594 gcc_unreachable (); | |
595 } | |
596 | |
597 /* Implement ASM_OUTPUT_EXTERNAL macro. */ | |
598 int | |
599 score_output_external (FILE *file ATTRIBUTE_UNUSED, | |
600 tree decl, const char *name) | |
601 { | |
602 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
603 return score7_output_external (file, decl, name); | |
604 else if (TARGET_SCORE3) | |
605 return score3_output_external (file, decl, name); | |
606 | |
607 gcc_unreachable (); | |
608 } | |
609 | |
610 /* Implement RETURN_ADDR_RTX. Note, we do not support moving | |
611 back to a previous frame. */ | |
612 rtx | |
613 score_return_addr (int count, rtx frame ATTRIBUTE_UNUSED) | |
614 { | |
615 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
616 return score7_return_addr (count, frame); | |
617 else if (TARGET_SCORE3) | |
618 return score3_return_addr (count, frame); | |
619 | |
620 gcc_unreachable (); | |
621 } | |
622 | |
623 /* Implement PRINT_OPERAND macro. */ | |
624 void | |
625 score_print_operand (FILE *file, rtx op, int c) | |
626 { | |
627 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
628 return score7_print_operand (file, op, c); | |
629 else if (TARGET_SCORE3) | |
630 return score3_print_operand (file, op, c); | |
631 | |
632 gcc_unreachable (); | |
633 } | |
634 | |
635 /* Implement PRINT_OPERAND_ADDRESS macro. */ | |
636 void | |
637 score_print_operand_address (FILE *file, rtx x) | |
638 { | |
639 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
640 return score7_print_operand_address (file, x); | |
641 else if (TARGET_SCORE3) | |
642 return score3_print_operand_address (file, x); | |
643 | |
644 gcc_unreachable (); | |
645 } | |
646 | |
647 /* Implement SELECT_CC_MODE macro. */ | |
648 enum machine_mode | |
649 score_select_cc_mode (enum rtx_code op, rtx x, rtx y) | |
650 { | |
651 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
652 return score7_select_cc_mode (op, x, y); | |
653 else if (TARGET_SCORE3) | |
654 return score3_select_cc_mode (op, x, y); | |
655 | |
656 gcc_unreachable (); | |
657 } | |
658 | |
659 /* Return true if X is a symbolic constant that can be calculated in | |
660 the same way as a bare symbol. If it is, store the type of the | |
661 symbol in *SYMBOL_TYPE. */ | |
662 int | |
663 score_symbolic_constant_p (rtx x, enum score_symbol_type *symbol_type) | |
664 { | |
665 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
666 return score7_symbolic_constant_p (x, symbol_type); | |
667 else if (TARGET_SCORE3) | |
668 return score3_symbolic_constant_p (x, symbol_type); | |
669 | |
670 gcc_unreachable (); | |
671 } | |
672 | |
673 /* Generate the prologue instructions for entry into a S+core function. */ | |
674 void | |
675 score_prologue (void) | |
676 { | |
677 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
678 return score7_prologue (); | |
679 else if (TARGET_SCORE3) | |
680 return score3_prologue (); | |
681 | |
682 gcc_unreachable (); | |
683 } | |
684 | |
685 /* Generate the epilogue instructions in a S+core function. */ | |
686 void | |
687 score_epilogue (int sibcall_p) | |
688 { | |
689 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
690 return score7_epilogue (sibcall_p); | |
691 else if (TARGET_SCORE3) | |
692 return score3_epilogue (sibcall_p); | |
693 | |
694 gcc_unreachable (); | |
695 } | |
696 | |
697 void | |
698 score_gen_cmp (enum machine_mode mode) | |
699 { | |
700 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
701 return score7_gen_cmp (mode); | |
702 else if (TARGET_SCORE3) | |
703 return score3_gen_cmp (mode); | |
704 | |
705 gcc_unreachable (); | |
706 } | |
707 | |
708 /* Call and sibcall pattern all need call this function. */ | |
709 void | |
710 score_call (rtx *ops, bool sib) | |
711 { | |
712 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
713 return score7_call (ops, sib); | |
714 else if (TARGET_SCORE3) | |
715 return score3_call (ops, sib); | |
716 | |
717 gcc_unreachable (); | |
718 } | |
719 | |
720 /* Call value and sibcall value pattern all need call this function. */ | |
721 void | |
722 score_call_value (rtx *ops, bool sib) | |
723 { | |
724 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
725 return score7_call_value (ops, sib); | |
726 else if (TARGET_SCORE3) | |
727 return score3_call_value (ops, sib); | |
728 | |
729 gcc_unreachable (); | |
730 } | |
731 | |
732 void | |
733 score_movsicc (rtx *ops) | |
734 { | |
735 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
736 return score7_movsicc (ops); | |
737 else if (TARGET_SCORE3) | |
738 return score3_movsicc (ops); | |
739 | |
740 gcc_unreachable (); | |
741 } | |
742 | |
743 /* Machine Split */ | |
744 void | |
745 score_movdi (rtx *ops) | |
746 { | |
747 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
748 return score7_movdi (ops); | |
749 else if (TARGET_SCORE3) | |
750 return score3_movdi (ops); | |
751 | |
752 gcc_unreachable (); | |
753 } | |
754 | |
755 void | |
756 score_zero_extract_andi (rtx *ops) | |
757 { | |
758 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
759 return score7_zero_extract_andi (ops); | |
760 else if (TARGET_SCORE3) | |
761 return score3_zero_extract_andi (ops); | |
762 | |
763 gcc_unreachable (); | |
764 } | |
765 | |
766 /* Output asm insn for move. */ | |
767 const char * | |
768 score_move (rtx *ops) | |
769 { | |
770 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
771 return score7_move (ops); | |
772 else if (TARGET_SCORE3) | |
773 return score3_move (ops); | |
774 | |
775 gcc_unreachable (); | |
776 } | |
777 | |
778 /* Output asm insn for load. */ | |
779 const char * | |
780 score_linsn (rtx *ops, enum score_mem_unit unit, bool sign) | |
781 { | |
782 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
783 return score7_linsn (ops, unit, sign); | |
784 else if (TARGET_SCORE3) | |
785 return score3_linsn (ops, unit, sign); | |
786 | |
787 gcc_unreachable (); | |
788 } | |
789 | |
790 /* Output asm insn for store. */ | |
791 const char * | |
792 score_sinsn (rtx *ops, enum score_mem_unit unit) | |
793 { | |
794 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
795 return score7_sinsn (ops, unit); | |
796 else if (TARGET_SCORE3) | |
797 return score3_sinsn (ops, unit); | |
798 | |
799 gcc_unreachable (); | |
800 } | |
801 | |
802 /* Output asm insn for load immediate. */ | |
803 const char * | |
804 score_limm (rtx *ops) | |
805 { | |
806 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
807 return score7_limm (ops); | |
808 else if (TARGET_SCORE3) | |
809 return score3_limm (ops); | |
810 | |
811 gcc_unreachable (); | |
812 } | |
813 | |
814 | |
815 /* Generate add insn. */ | |
816 const char * | |
817 score_select_add_imm (rtx *ops, bool set_cc) | |
818 { | |
819 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
820 return score7_select_add_imm (ops, set_cc); | |
821 else if (TARGET_SCORE3) | |
822 return score3_select_add_imm (ops, set_cc); | |
823 | |
824 gcc_unreachable (); | |
825 } | |
826 | |
827 /* Output arith insn. */ | |
828 const char * | |
829 score_select (rtx *ops, const char *inst_pre, | |
830 bool commu, const char *letter, bool set_cc) | |
831 { | |
832 if (TARGET_SCORE5 || TARGET_SCORE5U || TARGET_SCORE7 || TARGET_SCORE7D) | |
833 return score7_select (ops, inst_pre, commu, letter, set_cc); | |
834 else if (TARGET_SCORE3) | |
835 return score3_select (ops, inst_pre, commu, letter, set_cc); | |
836 | |
837 gcc_unreachable (); | |
838 } | |
839 | |
840 /* Output switch case insn, only supported in score3. */ | |
841 const char * | |
842 score_output_casesi (rtx *operands) | |
843 { | |
844 if (TARGET_SCORE3) | |
845 return score3_output_casesi (operands); | |
846 | |
847 gcc_unreachable (); | |
848 } | |
849 | |
850 /* Output rpush insn, only supported in score3. */ | |
851 const char * | |
852 score_rpush (rtx *operands) | |
853 { | |
854 if (TARGET_SCORE3) | |
855 return score3_rpush (operands); | |
856 | |
857 gcc_unreachable (); | |
858 } | |
859 | |
860 /* Output rpop insn, only supported in score3. */ | |
861 const char * | |
862 score_rpop (rtx *operands) | |
863 { | |
864 if (TARGET_SCORE3) | |
865 return score3_rpop (operands); | |
866 | |
867 gcc_unreachable (); | |
868 } | |
869 | |
870 /* Emit lcb/lce insns. */ | |
871 bool | |
872 score_unaligned_load (rtx *ops) | |
873 { | |
874 rtx dst = ops[0]; | |
875 rtx src = ops[1]; | |
876 rtx len = ops[2]; | |
877 rtx off = ops[3]; | |
878 rtx addr_reg; | |
879 | |
880 if (INTVAL (len) != BITS_PER_WORD | |
881 || (INTVAL (off) % BITS_PER_UNIT) != 0) | |
882 return false; | |
883 | |
884 gcc_assert (GET_MODE_SIZE (GET_MODE (dst)) == GET_MODE_SIZE (SImode)); | |
885 | |
886 addr_reg = copy_addr_to_reg (XEXP (src, 0)); | |
887 emit_insn (gen_move_lcb (addr_reg, addr_reg)); | |
888 emit_insn (gen_move_lce (addr_reg, addr_reg, dst)); | |
889 | |
890 return true; | |
891 } | |
892 | |
893 /* Emit scb/sce insns. */ | |
894 bool | |
895 score_unaligned_store (rtx *ops) | |
896 { | |
897 rtx dst = ops[0]; | |
898 rtx len = ops[1]; | |
899 rtx off = ops[2]; | |
900 rtx src = ops[3]; | |
901 rtx addr_reg; | |
902 | |
903 if (INTVAL(len) != BITS_PER_WORD | |
904 || (INTVAL(off) % BITS_PER_UNIT) != 0) | |
905 return false; | |
906 | |
907 gcc_assert (GET_MODE_SIZE (GET_MODE (src)) == GET_MODE_SIZE (SImode)); | |
908 | |
909 addr_reg = copy_addr_to_reg (XEXP (dst, 0)); | |
910 emit_insn (gen_move_scb (addr_reg, addr_reg, src)); | |
911 emit_insn (gen_move_sce (addr_reg, addr_reg)); | |
912 | |
913 return true; | |
914 } | |
915 | |
916 /* If length is short, generate move insns straight. */ | |
917 static void | |
918 score_block_move_straight (rtx dst, rtx src, HOST_WIDE_INT length) | |
919 { | |
920 HOST_WIDE_INT leftover; | |
921 int i, reg_count; | |
922 rtx *regs; | |
923 | |
924 leftover = length % UNITS_PER_WORD; | |
925 length -= leftover; | |
926 reg_count = length / UNITS_PER_WORD; | |
927 | |
928 regs = XALLOCAVEC (rtx, reg_count); | |
929 for (i = 0; i < reg_count; i++) | |
930 regs[i] = gen_reg_rtx (SImode); | |
931 | |
932 /* Load from src to regs. */ | |
933 if (MEM_ALIGN (src) >= BITS_PER_WORD) | |
934 { | |
935 HOST_WIDE_INT offset = 0; | |
936 for (i = 0; i < reg_count; offset += UNITS_PER_WORD, i++) | |
937 emit_move_insn (regs[i], adjust_address (src, SImode, offset)); | |
938 } | |
939 else if (reg_count >= 1) | |
940 { | |
941 rtx src_reg = copy_addr_to_reg (XEXP (src, 0)); | |
942 | |
943 emit_insn (gen_move_lcb (src_reg, src_reg)); | |
944 for (i = 0; i < (reg_count - 1); i++) | |
945 emit_insn (gen_move_lcw (src_reg, src_reg, regs[i])); | |
946 emit_insn (gen_move_lce (src_reg, src_reg, regs[i])); | |
947 } | |
948 | |
949 /* Store regs to dest. */ | |
950 if (MEM_ALIGN (dst) >= BITS_PER_WORD) | |
951 { | |
952 HOST_WIDE_INT offset = 0; | |
953 for (i = 0; i < reg_count; offset += UNITS_PER_WORD, i++) | |
954 emit_move_insn (adjust_address (dst, SImode, offset), regs[i]); | |
955 } | |
956 else if (reg_count >= 1) | |
957 { | |
958 rtx dst_reg = copy_addr_to_reg (XEXP (dst, 0)); | |
959 | |
960 emit_insn (gen_move_scb (dst_reg, dst_reg, regs[0])); | |
961 for (i = 1; i < reg_count; i++) | |
962 emit_insn (gen_move_scw (dst_reg, dst_reg, regs[i])); | |
963 emit_insn (gen_move_sce (dst_reg, dst_reg)); | |
964 } | |
965 | |
966 /* Mop up any left-over bytes. */ | |
967 if (leftover > 0) | |
968 { | |
969 src = adjust_address (src, BLKmode, length); | |
970 dst = adjust_address (dst, BLKmode, length); | |
971 move_by_pieces (dst, src, leftover, | |
972 MIN (MEM_ALIGN (src), MEM_ALIGN (dst)), 0); | |
973 } | |
974 } | |
975 | |
976 /* Generate loop head when dst or src is unaligned. */ | |
977 static void | |
978 score_block_move_loop_head (rtx dst_reg, HOST_WIDE_INT dst_align, | |
979 rtx src_reg, HOST_WIDE_INT src_align, | |
980 HOST_WIDE_INT length) | |
981 { | |
982 bool src_unaligned = (src_align < BITS_PER_WORD); | |
983 bool dst_unaligned = (dst_align < BITS_PER_WORD); | |
984 | |
985 rtx temp = gen_reg_rtx (SImode); | |
986 | |
987 gcc_assert (length == UNITS_PER_WORD); | |
988 | |
989 if (src_unaligned) | |
990 { | |
991 emit_insn (gen_move_lcb (src_reg, src_reg)); | |
992 emit_insn (gen_move_lcw (src_reg, src_reg, temp)); | |
993 } | |
994 else | |
995 emit_insn (gen_move_lw_a (src_reg, | |
996 src_reg, gen_int_mode (4, SImode), temp)); | |
997 | |
998 if (dst_unaligned) | |
999 emit_insn (gen_move_scb (dst_reg, dst_reg, temp)); | |
1000 else | |
1001 emit_insn (gen_move_sw_a (dst_reg, | |
1002 dst_reg, gen_int_mode (4, SImode), temp)); | |
1003 } | |
1004 | |
1005 /* Generate loop body, copy length bytes per iteration. */ | |
1006 static void | |
1007 score_block_move_loop_body (rtx dst_reg, HOST_WIDE_INT dst_align, | |
1008 rtx src_reg, HOST_WIDE_INT src_align, | |
1009 HOST_WIDE_INT length) | |
1010 { | |
1011 int reg_count = length / UNITS_PER_WORD; | |
1012 rtx *regs = XALLOCAVEC (rtx, reg_count); | |
1013 int i; | |
1014 bool src_unaligned = (src_align < BITS_PER_WORD); | |
1015 bool dst_unaligned = (dst_align < BITS_PER_WORD); | |
1016 | |
1017 for (i = 0; i < reg_count; i++) | |
1018 regs[i] = gen_reg_rtx (SImode); | |
1019 | |
1020 if (src_unaligned) | |
1021 { | |
1022 for (i = 0; i < reg_count; i++) | |
1023 emit_insn (gen_move_lcw (src_reg, src_reg, regs[i])); | |
1024 } | |
1025 else | |
1026 { | |
1027 for (i = 0; i < reg_count; i++) | |
1028 emit_insn (gen_move_lw_a (src_reg, | |
1029 src_reg, gen_int_mode (4, SImode), regs[i])); | |
1030 } | |
1031 | |
1032 if (dst_unaligned) | |
1033 { | |
1034 for (i = 0; i < reg_count; i++) | |
1035 emit_insn (gen_move_scw (dst_reg, dst_reg, regs[i])); | |
1036 } | |
1037 else | |
1038 { | |
1039 for (i = 0; i < reg_count; i++) | |
1040 emit_insn (gen_move_sw_a (dst_reg, | |
1041 dst_reg, gen_int_mode (4, SImode), regs[i])); | |
1042 } | |
1043 } | |
1044 | |
1045 /* Generate loop foot, copy the leftover bytes. */ | |
1046 static void | |
1047 score_block_move_loop_foot (rtx dst_reg, HOST_WIDE_INT dst_align, | |
1048 rtx src_reg, HOST_WIDE_INT src_align, | |
1049 HOST_WIDE_INT length) | |
1050 { | |
1051 bool src_unaligned = (src_align < BITS_PER_WORD); | |
1052 bool dst_unaligned = (dst_align < BITS_PER_WORD); | |
1053 | |
1054 HOST_WIDE_INT leftover; | |
1055 | |
1056 leftover = length % UNITS_PER_WORD; | |
1057 length -= leftover; | |
1058 | |
1059 if (length > 0) | |
1060 score_block_move_loop_body (dst_reg, dst_align, | |
1061 src_reg, src_align, length); | |
1062 | |
1063 if (dst_unaligned) | |
1064 emit_insn (gen_move_sce (dst_reg, dst_reg)); | |
1065 | |
1066 if (leftover > 0) | |
1067 { | |
1068 HOST_WIDE_INT src_adj = src_unaligned ? -4 : 0; | |
1069 HOST_WIDE_INT dst_adj = dst_unaligned ? -4 : 0; | |
1070 rtx temp; | |
1071 | |
1072 gcc_assert (leftover < UNITS_PER_WORD); | |
1073 | |
1074 if (leftover >= UNITS_PER_WORD / 2 | |
1075 && src_align >= BITS_PER_WORD / 2 | |
1076 && dst_align >= BITS_PER_WORD / 2) | |
1077 { | |
1078 temp = gen_reg_rtx (HImode); | |
1079 emit_insn (gen_move_lhu_b (src_reg, src_reg, | |
1080 gen_int_mode (src_adj, SImode), temp)); | |
1081 emit_insn (gen_move_sh_b (dst_reg, dst_reg, | |
1082 gen_int_mode (dst_adj, SImode), temp)); | |
1083 leftover -= UNITS_PER_WORD / 2; | |
1084 src_adj = UNITS_PER_WORD / 2; | |
1085 dst_adj = UNITS_PER_WORD / 2; | |
1086 } | |
1087 | |
1088 while (leftover > 0) | |
1089 { | |
1090 temp = gen_reg_rtx (QImode); | |
1091 emit_insn (gen_move_lbu_b (src_reg, src_reg, | |
1092 gen_int_mode (src_adj, SImode), temp)); | |
1093 emit_insn (gen_move_sb_b (dst_reg, dst_reg, | |
1094 gen_int_mode (dst_adj, SImode), temp)); | |
1095 leftover--; | |
1096 src_adj = 1; | |
1097 dst_adj = 1; | |
1098 } | |
1099 } | |
1100 } | |
1101 | |
1102 #define MIN_MOVE_REGS 3 | |
1103 #define MIN_MOVE_BYTES (MIN_MOVE_REGS * UNITS_PER_WORD) | |
1104 #define MAX_MOVE_REGS 4 | |
1105 #define MAX_MOVE_BYTES (MAX_MOVE_REGS * UNITS_PER_WORD) | |
1106 | |
1107 /* The length is large, generate a loop if necessary. | |
1108 The loop is consisted by loop head/body/foot. */ | |
1109 static void | |
1110 score_block_move_loop (rtx dst, rtx src, HOST_WIDE_INT length) | |
1111 { | |
1112 HOST_WIDE_INT src_align = MEM_ALIGN (src); | |
1113 HOST_WIDE_INT dst_align = MEM_ALIGN (dst); | |
1114 HOST_WIDE_INT loop_mov_bytes; | |
1115 HOST_WIDE_INT iteration = 0; | |
1116 HOST_WIDE_INT head_length = 0, leftover; | |
1117 rtx label, src_reg, dst_reg, final_dst; | |
1118 | |
1119 bool gen_loop_head = (src_align < BITS_PER_WORD | |
1120 || dst_align < BITS_PER_WORD); | |
1121 | |
1122 if (gen_loop_head) | |
1123 head_length += UNITS_PER_WORD; | |
1124 | |
1125 for (loop_mov_bytes = MAX_MOVE_BYTES; | |
1126 loop_mov_bytes >= MIN_MOVE_BYTES; | |
1127 loop_mov_bytes -= UNITS_PER_WORD) | |
1128 { | |
1129 iteration = (length - head_length) / loop_mov_bytes; | |
1130 if (iteration > 1) | |
1131 break; | |
1132 } | |
1133 if (iteration <= 1) | |
1134 { | |
1135 score_block_move_straight (dst, src, length); | |
1136 return; | |
1137 } | |
1138 | |
1139 leftover = (length - head_length) % loop_mov_bytes; | |
1140 length -= leftover; | |
1141 | |
1142 src_reg = copy_addr_to_reg (XEXP (src, 0)); | |
1143 dst_reg = copy_addr_to_reg (XEXP (dst, 0)); | |
1144 final_dst = expand_simple_binop (Pmode, PLUS, dst_reg, GEN_INT (length), | |
1145 0, 0, OPTAB_WIDEN); | |
1146 | |
1147 if (gen_loop_head) | |
1148 score_block_move_loop_head (dst_reg, dst_align, | |
1149 src_reg, src_align, head_length); | |
1150 | |
1151 label = gen_label_rtx (); | |
1152 emit_label (label); | |
1153 | |
1154 score_block_move_loop_body (dst_reg, dst_align, | |
1155 src_reg, src_align, loop_mov_bytes); | |
1156 | |
1157 emit_insn (gen_cmpsi (dst_reg, final_dst)); | |
1158 emit_jump_insn (gen_bne (label)); | |
1159 | |
1160 score_block_move_loop_foot (dst_reg, dst_align, | |
1161 src_reg, src_align, leftover); | |
1162 } | |
1163 | |
1164 /* Generate block move, for misc.md: "movmemsi". */ | |
1165 bool | |
1166 score_block_move (rtx *ops) | |
1167 { | |
1168 rtx dst = ops[0]; | |
1169 rtx src = ops[1]; | |
1170 rtx length = ops[2]; | |
1171 | |
1172 if (TARGET_LITTLE_ENDIAN | |
1173 && (MEM_ALIGN (src) < BITS_PER_WORD || MEM_ALIGN (dst) < BITS_PER_WORD) | |
1174 && INTVAL (length) >= UNITS_PER_WORD) | |
1175 return false; | |
1176 | |
1177 if (GET_CODE (length) == CONST_INT) | |
1178 { | |
1179 if (INTVAL (length) <= 2 * MAX_MOVE_BYTES) | |
1180 { | |
1181 score_block_move_straight (dst, src, INTVAL (length)); | |
1182 return true; | |
1183 } | |
1184 else if (optimize && | |
1185 !(flag_unroll_loops || flag_unroll_all_loops)) | |
1186 { | |
1187 score_block_move_loop (dst, src, INTVAL (length)); | |
1188 return true; | |
1189 } | |
1190 } | |
1191 return false; | |
1192 } | |
1193 | |
1194 struct gcc_target targetm = TARGET_INITIALIZER; |