comparison gcc/config/vax/vax.c @ 55:77e2b8dfacca gcc-4.4.5

update it from 4.4.3 to 4.5.0
author ryoma <e075725@ie.u-ryukyu.ac.jp>
date Fri, 12 Feb 2010 23:39:51 +0900
parents 58ad6c70ea60
children b7f97abdc517
comparison
equal deleted inserted replaced
52:c156f1bd5cd9 55:77e2b8dfacca
1 /* Subroutines for insn-output.c for VAX. 1 /* Subroutines for insn-output.c for VAX.
2 Copyright (C) 1987, 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002, 2 Copyright (C) 1987, 1994, 1995, 1997, 1998, 1999, 2000, 2001, 2002,
3 2004, 2005, 2006, 2007, 2008 3 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc. 4 Free Software Foundation, Inc.
5 5
6 This file is part of GCC. 6 This file is part of GCC.
7 7
8 GCC is free software; you can redistribute it and/or modify 8 GCC is free software; you can redistribute it and/or modify
22 #include "config.h" 22 #include "config.h"
23 #include "system.h" 23 #include "system.h"
24 #include "coretypes.h" 24 #include "coretypes.h"
25 #include "tm.h" 25 #include "tm.h"
26 #include "rtl.h" 26 #include "rtl.h"
27 #include "df.h"
27 #include "tree.h" 28 #include "tree.h"
28 #include "regs.h" 29 #include "regs.h"
29 #include "hard-reg-set.h" 30 #include "hard-reg-set.h"
30 #include "real.h" 31 #include "real.h"
31 #include "insn-config.h" 32 #include "insn-config.h"
37 #include "expr.h" 38 #include "expr.h"
38 #include "optabs.h" 39 #include "optabs.h"
39 #include "flags.h" 40 #include "flags.h"
40 #include "debug.h" 41 #include "debug.h"
41 #include "toplev.h" 42 #include "toplev.h"
43 #include "tm-preds.h"
44 #include "tm-constrs.h"
42 #include "tm_p.h" 45 #include "tm_p.h"
43 #include "target.h" 46 #include "target.h"
44 #include "target-def.h" 47 #include "target-def.h"
45 48
49 static bool vax_legitimate_address_p (enum machine_mode, rtx, bool);
46 static void vax_output_function_prologue (FILE *, HOST_WIDE_INT); 50 static void vax_output_function_prologue (FILE *, HOST_WIDE_INT);
47 static void vax_file_start (void); 51 static void vax_file_start (void);
48 static void vax_init_libfuncs (void); 52 static void vax_init_libfuncs (void);
49 static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, 53 static void vax_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
50 HOST_WIDE_INT, tree); 54 HOST_WIDE_INT, tree);
51 static int vax_address_cost_1 (rtx); 55 static int vax_address_cost_1 (rtx);
52 static int vax_address_cost (rtx, bool); 56 static int vax_address_cost (rtx, bool);
53 static bool vax_rtx_costs (rtx, int, int, int *, bool); 57 static bool vax_rtx_costs (rtx, int, int, int *, bool);
54 static rtx vax_struct_value_rtx (tree, int); 58 static rtx vax_struct_value_rtx (tree, int);
59 static rtx vax_builtin_setjmp_frame_value (void);
60 static void vax_asm_trampoline_template (FILE *);
61 static void vax_trampoline_init (rtx, tree, rtx);
55 62
56 /* Initialize the GCC target structure. */ 63 /* Initialize the GCC target structure. */
57 #undef TARGET_ASM_ALIGNED_HI_OP 64 #undef TARGET_ASM_ALIGNED_HI_OP
58 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t" 65 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
59 66
84 #undef TARGET_PROMOTE_PROTOTYPES 91 #undef TARGET_PROMOTE_PROTOTYPES
85 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true 92 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
86 93
87 #undef TARGET_STRUCT_VALUE_RTX 94 #undef TARGET_STRUCT_VALUE_RTX
88 #define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx 95 #define TARGET_STRUCT_VALUE_RTX vax_struct_value_rtx
96
97 #undef TARGET_BUILTIN_SETJMP_FRAME_VALUE
98 #define TARGET_BUILTIN_SETJMP_FRAME_VALUE vax_builtin_setjmp_frame_value
99
100 #undef TARGET_LEGITIMATE_ADDRESS_P
101 #define TARGET_LEGITIMATE_ADDRESS_P vax_legitimate_address_p
102
103 #undef TARGET_FRAME_POINTER_REQUIRED
104 #define TARGET_FRAME_POINTER_REQUIRED hook_bool_void_true
105
106 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
107 #define TARGET_ASM_TRAMPOLINE_TEMPLATE vax_asm_trampoline_template
108 #undef TARGET_TRAMPOLINE_INIT
109 #define TARGET_TRAMPOLINE_INIT vax_trampoline_init
89 110
90 struct gcc_target targetm = TARGET_INITIALIZER; 111 struct gcc_target targetm = TARGET_INITIALIZER;
91 112
92 /* Set global variables as needed for the options enabled. */ 113 /* Set global variables as needed for the options enabled. */
93 114
159 ELF, avoid the user's namespace. */ 180 ELF, avoid the user's namespace. */
160 181
161 static void 182 static void
162 vax_init_libfuncs (void) 183 vax_init_libfuncs (void)
163 { 184 {
164 set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv"); 185 if (TARGET_BSD_DIVMOD)
165 set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem"); 186 {
187 set_optab_libfunc (udiv_optab, SImode, TARGET_ELF ? "*__udiv" : "*udiv");
188 set_optab_libfunc (umod_optab, SImode, TARGET_ELF ? "*__urem" : "*urem");
189 }
166 } 190 }
167 191
168 /* This is like nonimmediate_operand with a restriction on the type of MEM. */ 192 /* This is like nonimmediate_operand with a restriction on the type of MEM. */
169 193
170 void 194 static void
171 split_quadword_operands (rtx * operands, rtx * low, int n ATTRIBUTE_UNUSED) 195 split_quadword_operands (rtx insn, enum rtx_code code, rtx * operands,
196 rtx * low, int n)
172 { 197 {
173 int i; 198 int i;
174 /* Split operands. */ 199
175 200 for (i = 0; i < n; i++)
176 low[0] = low[1] = low[2] = 0; 201 low[i] = 0;
177 for (i = 0; i < 3; i++) 202
178 { 203 for (i = 0; i < n; i++)
179 if (low[i]) 204 {
180 /* it's already been figured out */; 205 if (MEM_P (operands[i])
181 else if (MEM_P (operands[i]) 206 && (GET_CODE (XEXP (operands[i], 0)) == PRE_DEC
182 && (GET_CODE (XEXP (operands[i], 0)) == POST_INC)) 207 || GET_CODE (XEXP (operands[i], 0)) == POST_INC))
183 { 208 {
184 rtx addr = XEXP (operands[i], 0); 209 rtx addr = XEXP (operands[i], 0);
185 operands[i] = low[i] = gen_rtx_MEM (SImode, addr); 210 operands[i] = low[i] = gen_rtx_MEM (SImode, addr);
186 if (which_alternative == 0 && i == 0) 211 }
187 { 212 else if (optimize_size && MEM_P (operands[i])
188 addr = XEXP (operands[i], 0); 213 && REG_P (XEXP (operands[i], 0))
189 operands[i+1] = low[i+1] = gen_rtx_MEM (SImode, addr); 214 && (code != MINUS || operands[1] != const0_rtx)
190 } 215 && find_regno_note (insn, REG_DEAD,
216 REGNO (XEXP (operands[i], 0))))
217 {
218 low[i] = gen_rtx_MEM (SImode,
219 gen_rtx_POST_INC (Pmode,
220 XEXP (operands[i], 0)));
221 operands[i] = gen_rtx_MEM (SImode, XEXP (operands[i], 0));
191 } 222 }
192 else 223 else
193 { 224 {
194 low[i] = operand_subword (operands[i], 0, 0, DImode); 225 low[i] = operand_subword (operands[i], 0, 0, DImode);
195 operands[i] = operand_subword (operands[i], 1, 0, DImode); 226 operands[i] = operand_subword (operands[i], 1, 0, DImode);
198 } 229 }
199 230
200 void 231 void
201 print_operand_address (FILE * file, rtx addr) 232 print_operand_address (FILE * file, rtx addr)
202 { 233 {
234 rtx orig = addr;
203 rtx reg1, breg, ireg; 235 rtx reg1, breg, ireg;
204 rtx offset; 236 rtx offset;
205 237
206 retry: 238 retry:
207 switch (GET_CODE (addr)) 239 switch (GET_CODE (addr))
342 } 374 }
343 375
344 /* If REG1 is nonzero, figure out if it is a base or index register. */ 376 /* If REG1 is nonzero, figure out if it is a base or index register. */
345 if (reg1) 377 if (reg1)
346 { 378 {
347 if (breg != 0 || (offset && MEM_P (offset))) 379 if (breg
380 || (flag_pic && GET_CODE (addr) == SYMBOL_REF)
381 || (offset
382 && (MEM_P (offset)
383 || (flag_pic && symbolic_operand (offset, SImode)))))
348 { 384 {
349 gcc_assert (!ireg); 385 gcc_assert (!ireg);
350 ireg = reg1; 386 ireg = reg1;
351 } 387 }
352 else 388 else
353 breg = reg1; 389 breg = reg1;
354 } 390 }
355 391
356 if (offset != 0) 392 if (offset != 0)
357 output_address (offset); 393 {
394 if (flag_pic && symbolic_operand (offset, SImode))
395 {
396 if (breg && ireg)
397 {
398 debug_rtx (orig);
399 output_operand_lossage ("symbol used with both base and indexed registers");
400 }
401
402 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
403 if (flag_pic > 1 && GET_CODE (offset) == CONST
404 && GET_CODE (XEXP (XEXP (offset, 0), 0)) == SYMBOL_REF
405 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (offset, 0), 0)))
406 {
407 debug_rtx (orig);
408 output_operand_lossage ("symbol with offset used in PIC mode");
409 }
410 #endif
411
412 /* symbol(reg) isn't PIC, but symbol[reg] is. */
413 if (breg)
414 {
415 ireg = breg;
416 breg = 0;
417 }
418
419 }
420
421 output_address (offset);
422 }
358 423
359 if (breg != 0) 424 if (breg != 0)
360 fprintf (file, "(%s)", reg_names[REGNO (breg)]); 425 fprintf (file, "(%s)", reg_names[REGNO (breg)]);
361 426
362 if (ireg != 0) 427 if (ireg != 0)
370 435
371 default: 436 default:
372 output_addr_const (file, addr); 437 output_addr_const (file, addr);
373 } 438 }
374 } 439 }
440
441 void
442 print_operand (FILE *file, rtx x, int code)
443 {
444 if (code == '#')
445 fputc (ASM_DOUBLE_CHAR, file);
446 else if (code == '|')
447 fputs (REGISTER_PREFIX, file);
448 else if (code == 'c')
449 fputs (cond_name (x), file);
450 else if (code == 'C')
451 fputs (rev_cond_name (x), file);
452 else if (code == 'D' && CONST_INT_P (x) && INTVAL (x) < 0)
453 fprintf (file, "$" NEG_HWI_PRINT_HEX16, INTVAL (x));
454 else if (code == 'P' && CONST_INT_P (x))
455 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, INTVAL (x) + 1);
456 else if (code == 'N' && CONST_INT_P (x))
457 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, ~ INTVAL (x));
458 /* rotl instruction cannot deal with negative arguments. */
459 else if (code == 'R' && CONST_INT_P (x))
460 fprintf (file, "$" HOST_WIDE_INT_PRINT_DEC, 32 - INTVAL (x));
461 else if (code == 'H' && CONST_INT_P (x))
462 fprintf (file, "$%d", (int) (0xffff & ~ INTVAL (x)));
463 else if (code == 'h' && CONST_INT_P (x))
464 fprintf (file, "$%d", (short) - INTVAL (x));
465 else if (code == 'B' && CONST_INT_P (x))
466 fprintf (file, "$%d", (int) (0xff & ~ INTVAL (x)));
467 else if (code == 'b' && CONST_INT_P (x))
468 fprintf (file, "$%d", (int) (0xff & - INTVAL (x)));
469 else if (code == 'M' && CONST_INT_P (x))
470 fprintf (file, "$%d", ~((1 << INTVAL (x)) - 1));
471 else if (REG_P (x))
472 fprintf (file, "%s", reg_names[REGNO (x)]);
473 else if (MEM_P (x))
474 output_address (XEXP (x, 0));
475 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == SFmode)
476 {
477 char dstr[30];
478 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
479 sizeof (dstr), 0, 1);
480 fprintf (file, "$0f%s", dstr);
481 }
482 else if (GET_CODE (x) == CONST_DOUBLE && GET_MODE (x) == DFmode)
483 {
484 char dstr[30];
485 real_to_decimal (dstr, CONST_DOUBLE_REAL_VALUE (x),
486 sizeof (dstr), 0, 1);
487 fprintf (file, "$0%c%s", ASM_DOUBLE_CHAR, dstr);
488 }
489 else
490 {
491 if (flag_pic > 1 && symbolic_operand (x, SImode))
492 {
493 debug_rtx (x);
494 output_operand_lossage ("symbol used as immediate operand");
495 }
496 putc ('$', file);
497 output_addr_const (file, x);
498 }
499 }
375 500
501 const char *
502 cond_name (rtx op)
503 {
504 switch (GET_CODE (op))
505 {
506 case NE:
507 return "neq";
508 case EQ:
509 return "eql";
510 case GE:
511 return "geq";
512 case GT:
513 return "gtr";
514 case LE:
515 return "leq";
516 case LT:
517 return "lss";
518 case GEU:
519 return "gequ";
520 case GTU:
521 return "gtru";
522 case LEU:
523 return "lequ";
524 case LTU:
525 return "lssu";
526
527 default:
528 gcc_unreachable ();
529 }
530 }
531
376 const char * 532 const char *
377 rev_cond_name (rtx op) 533 rev_cond_name (rtx op)
378 { 534 {
379 switch (GET_CODE (op)) 535 switch (GET_CODE (op))
380 { 536 {
402 default: 558 default:
403 gcc_unreachable (); 559 gcc_unreachable ();
404 } 560 }
405 } 561 }
406 562
407 int 563 static bool
408 vax_float_literal(rtx c) 564 vax_float_literal (rtx c)
409 { 565 {
410 enum machine_mode mode; 566 enum machine_mode mode;
411 REAL_VALUE_TYPE r, s; 567 REAL_VALUE_TYPE r, s;
412 int i; 568 int i;
413 569
414 if (GET_CODE (c) != CONST_DOUBLE) 570 if (GET_CODE (c) != CONST_DOUBLE)
415 return 0; 571 return false;
416 572
417 mode = GET_MODE (c); 573 mode = GET_MODE (c);
418 574
419 if (c == const_tiny_rtx[(int) mode][0] 575 if (c == const_tiny_rtx[(int) mode][0]
420 || c == const_tiny_rtx[(int) mode][1] 576 || c == const_tiny_rtx[(int) mode][1]
421 || c == const_tiny_rtx[(int) mode][2]) 577 || c == const_tiny_rtx[(int) mode][2])
422 return 1; 578 return true;
423 579
424 REAL_VALUE_FROM_CONST_DOUBLE (r, c); 580 REAL_VALUE_FROM_CONST_DOUBLE (r, c);
425 581
426 for (i = 0; i < 7; i++) 582 for (i = 0; i < 7; i++)
427 { 583 {
428 int x = 1 << i; 584 int x = 1 << i;
429 bool ok; 585 bool ok;
430 REAL_VALUE_FROM_INT (s, x, 0, mode); 586 REAL_VALUE_FROM_INT (s, x, 0, mode);
431 587
432 if (REAL_VALUES_EQUAL (r, s)) 588 if (REAL_VALUES_EQUAL (r, s))
433 return 1; 589 return true;
434 ok = exact_real_inverse (mode, &s); 590 ok = exact_real_inverse (mode, &s);
435 gcc_assert (ok); 591 gcc_assert (ok);
436 if (REAL_VALUES_EQUAL (r, s)) 592 if (REAL_VALUES_EQUAL (r, s))
437 return 1; 593 return true;
438 } 594 }
439 return 0; 595 return false;
440 } 596 }
441 597
442 598
443 /* Return the cost in cycles of a memory address, relative to register 599 /* Return the cost in cycles of a memory address, relative to register
444 indirect. 600 indirect.
548 so that decrement-and-branch insns can be formed more easily (if 704 so that decrement-and-branch insns can be formed more easily (if
549 the value -1 is copied to a register some decrement-and-branch 705 the value -1 is copied to a register some decrement-and-branch
550 patterns will not match). */ 706 patterns will not match). */
551 case CONST_INT: 707 case CONST_INT:
552 if (INTVAL (x) == 0) 708 if (INTVAL (x) == 0)
553 return true; 709 {
710 *total = 0;
711 return true;
712 }
554 if (outer_code == AND) 713 if (outer_code == AND)
555 { 714 {
556 *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2; 715 *total = ((unsigned HOST_WIDE_INT) ~INTVAL (x) <= 077) ? 1 : 2;
557 return true; 716 return true;
558 } 717 }
559 if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077 718 if ((unsigned HOST_WIDE_INT) INTVAL (x) <= 077
560 || (outer_code == COMPARE 719 || (outer_code == COMPARE
561 && INTVAL (x) == -1) 720 && INTVAL (x) == -1)
575 734
576 case CONST_DOUBLE: 735 case CONST_DOUBLE:
577 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT) 736 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
578 *total = vax_float_literal (x) ? 5 : 8; 737 *total = vax_float_literal (x) ? 5 : 8;
579 else 738 else
580 *total = ((CONST_DOUBLE_HIGH (x) == 0 739 *total = ((CONST_DOUBLE_HIGH (x) == 0
581 && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64) 740 && (unsigned HOST_WIDE_INT) CONST_DOUBLE_LOW (x) < 64)
582 || (outer_code == PLUS 741 || (outer_code == PLUS
583 && CONST_DOUBLE_HIGH (x) == -1 742 && CONST_DOUBLE_HIGH (x) == -1
584 && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64)) 743 && (unsigned HOST_WIDE_INT)-CONST_DOUBLE_LOW (x) < 64))
585 ? 2 : 5; 744 ? 2 : 5;
781 *total += (GET_MODE (x) == DFmode) ? 3 : 2; 940 *total += (GET_MODE (x) == DFmode) ? 3 : 2;
782 } 941 }
783 else 942 else
784 { 943 {
785 if (CONST_DOUBLE_HIGH (op) != 0 944 if (CONST_DOUBLE_HIGH (op) != 0
786 || (unsigned)CONST_DOUBLE_LOW (op) > 63) 945 || (unsigned HOST_WIDE_INT)CONST_DOUBLE_LOW (op) > 63)
787 *total += 2; 946 *total += 2;
788 } 947 }
789 break; 948 break;
790 case MEM: 949 case MEM:
791 *total += 1; /* 2 on VAX 2 */ 950 *total += 1; /* 2 on VAX 2 */
810 jmp FUNCTION+2 #jump beyond FUNCTION's entry mask 969 jmp FUNCTION+2 #jump beyond FUNCTION's entry mask
811 */ 970 */
812 971
813 static void 972 static void
814 vax_output_mi_thunk (FILE * file, 973 vax_output_mi_thunk (FILE * file,
815 tree thunk ATTRIBUTE_UNUSED, 974 tree thunk ATTRIBUTE_UNUSED,
816 HOST_WIDE_INT delta, 975 HOST_WIDE_INT delta,
817 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED, 976 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
818 tree function) 977 tree function)
819 { 978 {
820 fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta); 979 fprintf (file, "\t.word 0x0ffc\n\taddl2 $" HOST_WIDE_INT_PRINT_DEC, delta);
821 asm_fprintf (file, ",4(%Rap)\n"); 980 asm_fprintf (file, ",4(%Rap)\n");
822 fprintf (file, "\tjmp "); 981 fprintf (file, "\tjmp ");
823 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0)); 982 assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
827 static rtx 986 static rtx
828 vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED, 987 vax_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
829 int incoming ATTRIBUTE_UNUSED) 988 int incoming ATTRIBUTE_UNUSED)
830 { 989 {
831 return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM); 990 return gen_rtx_REG (Pmode, VAX_STRUCT_VALUE_REGNUM);
991 }
992
993 static rtx
994 vax_builtin_setjmp_frame_value (void)
995 {
996 return hard_frame_pointer_rtx;
832 } 997 }
833 998
834 /* Worker function for NOTICE_UPDATE_CC. */ 999 /* Worker function for NOTICE_UPDATE_CC. */
835 1000
836 void 1001 void
904 1069
905 const char * 1070 const char *
906 vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands, 1071 vax_output_int_move (rtx insn ATTRIBUTE_UNUSED, rtx *operands,
907 enum machine_mode mode) 1072 enum machine_mode mode)
908 { 1073 {
1074 rtx hi[3], lo[3];
1075 const char *pattern_hi, *pattern_lo;
1076
909 switch (mode) 1077 switch (mode)
910 { 1078 {
1079 case DImode:
1080 if (operands[1] == const0_rtx)
1081 return "clrq %0";
1082 if (TARGET_QMATH && optimize_size
1083 && (CONST_INT_P (operands[1])
1084 || GET_CODE (operands[1]) == CONST_DOUBLE))
1085 {
1086 unsigned HOST_WIDE_INT hval, lval;
1087 int n;
1088
1089 if (GET_CODE (operands[1]) == CONST_DOUBLE)
1090 {
1091 gcc_assert (HOST_BITS_PER_WIDE_INT != 64);
1092
1093 /* Make sure only the low 32 bits are valid. */
1094 lval = CONST_DOUBLE_LOW (operands[1]) & 0xffffffff;
1095 hval = CONST_DOUBLE_HIGH (operands[1]) & 0xffffffff;
1096 }
1097 else
1098 {
1099 lval = INTVAL (operands[1]);
1100 hval = 0;
1101 }
1102
1103 /* Here we see if we are trying to see if the 64bit value is really
1104 a 6bit shifted some arbitrary amount. If so, we can use ashq to
1105 shift it to the correct value saving 7 bytes (1 addr-mode-byte +
1106 8 bytes - 1 shift byte - 1 short literal byte. */
1107 if (lval != 0
1108 && (n = exact_log2 (lval & (- lval))) != -1
1109 && (lval >> n) < 64)
1110 {
1111 lval >>= n;
1112
1113 #if HOST_BITS_PER_WIDE_INT == 32
1114 /* On 32bit platforms, if the 6bits didn't overflow into the
1115 upper 32bit value that value better be 0. If we have
1116 overflowed, make sure it wasn't too much. */
1117 if (hval != 0)
1118 {
1119 if (n <= 26 || hval >= ((unsigned)1 << (n - 26)))
1120 n = 0; /* failure */
1121 else
1122 lval |= hval << (32 - n);
1123 }
1124 #endif
1125 /* If n is 0, then ashq is not the best way to emit this. */
1126 if (n > 0)
1127 {
1128 operands[1] = GEN_INT (lval);
1129 operands[2] = GEN_INT (n);
1130 return "ashq %2,%1,%0";
1131 }
1132 #if HOST_BITS_PER_WIDE_INT == 32
1133 }
1134 /* On 32bit platforms, if the low 32bit value is 0, checkout the
1135 upper 32bit value. */
1136 else if (hval != 0
1137 && (n = exact_log2 (hval & (- hval)) - 1) != -1
1138 && (hval >> n) < 64)
1139 {
1140 operands[1] = GEN_INT (hval >> n);
1141 operands[2] = GEN_INT (n + 32);
1142 return "ashq %2,%1,%0";
1143 #endif
1144 }
1145 }
1146
1147 if (TARGET_QMATH
1148 && (!MEM_P (operands[0])
1149 || GET_CODE (XEXP (operands[0], 0)) == PRE_DEC
1150 || GET_CODE (XEXP (operands[0], 0)) == POST_INC
1151 || !illegal_addsub_di_memory_operand (operands[0], DImode))
1152 && ((CONST_INT_P (operands[1])
1153 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
1154 || GET_CODE (operands[1]) == CONST_DOUBLE))
1155 {
1156 hi[0] = operands[0];
1157 hi[1] = operands[1];
1158
1159 split_quadword_operands (insn, SET, hi, lo, 2);
1160
1161 pattern_lo = vax_output_int_move (NULL, lo, SImode);
1162 pattern_hi = vax_output_int_move (NULL, hi, SImode);
1163
1164 /* The patterns are just movl/movl or pushl/pushl then a movq will
1165 be shorter (1 opcode byte + 1 addrmode byte + 8 immediate value
1166 bytes .vs. 2 opcode bytes + 2 addrmode bytes + 8 immediate value
1167 value bytes. */
1168 if ((!strncmp (pattern_lo, "movl", 4)
1169 && !strncmp (pattern_hi, "movl", 4))
1170 || (!strncmp (pattern_lo, "pushl", 5)
1171 && !strncmp (pattern_hi, "pushl", 5)))
1172 return "movq %1,%0";
1173
1174 if (MEM_P (operands[0])
1175 && GET_CODE (XEXP (operands[0], 0)) == PRE_DEC)
1176 {
1177 output_asm_insn (pattern_hi, hi);
1178 operands[0] = lo[0];
1179 operands[1] = lo[1];
1180 operands[2] = lo[2];
1181 return pattern_lo;
1182 }
1183 else
1184 {
1185 output_asm_insn (pattern_lo, lo);
1186 operands[0] = hi[0];
1187 operands[1] = hi[1];
1188 operands[2] = hi[2];
1189 return pattern_hi;
1190 }
1191 }
1192 return "movq %1,%0";
1193
911 case SImode: 1194 case SImode:
912 if (GET_CODE (operands[1]) == SYMBOL_REF || GET_CODE (operands[1]) == CONST) 1195 if (symbolic_operand (operands[1], SImode))
913 { 1196 {
914 if (push_operand (operands[0], SImode)) 1197 if (push_operand (operands[0], SImode))
915 return "pushab %a1"; 1198 return "pushab %a1";
916 return "movab %a1,%0"; 1199 return "movab %a1,%0";
917 } 1200 }
1201
918 if (operands[1] == const0_rtx) 1202 if (operands[1] == const0_rtx)
919 return "clrl %0"; 1203 {
1204 if (push_operand (operands[1], SImode))
1205 return "pushl %1";
1206 return "clrl %0";
1207 }
1208
920 if (CONST_INT_P (operands[1]) 1209 if (CONST_INT_P (operands[1])
921 && (unsigned) INTVAL (operands[1]) >= 64) 1210 && (unsigned HOST_WIDE_INT) INTVAL (operands[1]) >= 64)
922 { 1211 {
923 int i = INTVAL (operands[1]); 1212 HOST_WIDE_INT i = INTVAL (operands[1]);
924 if ((unsigned)(~i) < 64) 1213 int n;
1214 if ((unsigned HOST_WIDE_INT)(~i) < 64)
925 return "mcoml %N1,%0"; 1215 return "mcoml %N1,%0";
926 if ((unsigned)i < 0x100) 1216 if ((unsigned HOST_WIDE_INT)i < 0x100)
927 return "movzbl %1,%0"; 1217 return "movzbl %1,%0";
928 if (i >= -0x80 && i < 0) 1218 if (i >= -0x80 && i < 0)
929 return "cvtbl %1,%0"; 1219 return "cvtbl %1,%0";
930 if ((unsigned)i < 0x10000) 1220 if (optimize_size
1221 && (n = exact_log2 (i & (-i))) != -1
1222 && ((unsigned HOST_WIDE_INT)i >> n) < 64)
1223 {
1224 operands[1] = GEN_INT ((unsigned HOST_WIDE_INT)i >> n);
1225 operands[2] = GEN_INT (n);
1226 return "ashl %2,%1,%0";
1227 }
1228 if ((unsigned HOST_WIDE_INT)i < 0x10000)
931 return "movzwl %1,%0"; 1229 return "movzwl %1,%0";
932 if (i >= -0x8000 && i < 0) 1230 if (i >= -0x8000 && i < 0)
933 return "cvtwl %1,%0"; 1231 return "cvtwl %1,%0";
934 } 1232 }
935 if (push_operand (operands[0], SImode)) 1233 if (push_operand (operands[0], SImode))
937 return "movl %1,%0"; 1235 return "movl %1,%0";
938 1236
939 case HImode: 1237 case HImode:
940 if (CONST_INT_P (operands[1])) 1238 if (CONST_INT_P (operands[1]))
941 { 1239 {
942 int i = INTVAL (operands[1]); 1240 HOST_WIDE_INT i = INTVAL (operands[1]);
943 if (i == 0) 1241 if (i == 0)
944 return "clrw %0"; 1242 return "clrw %0";
945 else if ((unsigned int)i < 64) 1243 else if ((unsigned HOST_WIDE_INT)i < 64)
946 return "movw %1,%0"; 1244 return "movw %1,%0";
947 else if ((unsigned int)~i < 64) 1245 else if ((unsigned HOST_WIDE_INT)~i < 64)
948 return "mcomw %H1,%0"; 1246 return "mcomw %H1,%0";
949 else if ((unsigned int)i < 256) 1247 else if ((unsigned HOST_WIDE_INT)i < 256)
950 return "movzbw %1,%0"; 1248 return "movzbw %1,%0";
1249 else if (i >= -0x80 && i < 0)
1250 return "cvtbw %1,%0";
951 } 1251 }
952 return "movw %1,%0"; 1252 return "movw %1,%0";
953 1253
954 case QImode: 1254 case QImode:
955 if (CONST_INT_P (operands[1])) 1255 if (CONST_INT_P (operands[1]))
956 { 1256 {
957 int i = INTVAL (operands[1]); 1257 HOST_WIDE_INT i = INTVAL (operands[1]);
958 if (i == 0) 1258 if (i == 0)
959 return "clrb %0"; 1259 return "clrb %0";
960 else if ((unsigned int)~i < 64) 1260 else if ((unsigned HOST_WIDE_INT)~i < 64)
961 return "mcomb %B1,%0"; 1261 return "mcomb %B1,%0";
962 } 1262 }
963 return "movb %1,%0"; 1263 return "movb %1,%0";
964 1264
965 default: 1265 default:
980 Compromise by using movab only when it is shorter than the add 1280 Compromise by using movab only when it is shorter than the add
981 or the base register in the address is one of sp, ap, and fp, 1281 or the base register in the address is one of sp, ap, and fp,
982 which are not modified very often. */ 1282 which are not modified very often. */
983 1283
984 const char * 1284 const char *
985 vax_output_int_add (rtx insn ATTRIBUTE_UNUSED, rtx *operands, 1285 vax_output_int_add (rtx insn, rtx *operands, enum machine_mode mode)
986 enum machine_mode mode)
987 { 1286 {
988 switch (mode) 1287 switch (mode)
989 { 1288 {
1289 case DImode:
1290 {
1291 rtx low[3];
1292 const char *pattern;
1293 int carry = 1;
1294 bool sub;
1295
1296 if (TARGET_QMATH && 0)
1297 debug_rtx (insn);
1298
1299 split_quadword_operands (insn, PLUS, operands, low, 3);
1300
1301 if (TARGET_QMATH)
1302 {
1303 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1304 #ifdef NO_EXTERNAL_INDIRECT_ADDRESSS
1305 gcc_assert (!flag_pic || !external_memory_operand (low[2], SImode));
1306 gcc_assert (!flag_pic || !external_memory_operand (low[0], SImode));
1307 #endif
1308
1309 /* No reason to add a 0 to the low part and thus no carry, so just
1310 emit the appropriate add/sub instruction. */
1311 if (low[2] == const0_rtx)
1312 return vax_output_int_add (NULL, operands, SImode);
1313
1314 /* Are we doing addition or subtraction? */
1315 sub = CONST_INT_P (operands[2]) && INTVAL (operands[2]) < 0;
1316
1317 /* We can't use vax_output_int_add since some the patterns don't
1318 modify the carry bit. */
1319 if (sub)
1320 {
1321 if (low[2] == constm1_rtx)
1322 pattern = "decl %0";
1323 else
1324 pattern = "subl2 $%n2,%0";
1325 }
1326 else
1327 {
1328 if (low[2] == const1_rtx)
1329 pattern = "incl %0";
1330 else
1331 pattern = "addl2 %2,%0";
1332 }
1333 output_asm_insn (pattern, low);
1334
1335 /* In 2's complement, -n = ~n + 1. Since we are dealing with
1336 two 32bit parts, we complement each and then add one to
1337 low part. We know that the low part can't overflow since
1338 it's value can never be 0. */
1339 if (sub)
1340 return "sbwc %N2,%0";
1341 return "adwc %2,%0";
1342 }
1343
1344 /* Add low parts. */
1345 if (rtx_equal_p (operands[0], operands[1]))
1346 {
1347 if (low[2] == const0_rtx)
1348 /* Should examine operand, punt if not POST_INC. */
1349 pattern = "tstl %0", carry = 0;
1350 else if (low[2] == const1_rtx)
1351 pattern = "incl %0";
1352 else
1353 pattern = "addl2 %2,%0";
1354 }
1355 else
1356 {
1357 if (low[2] == const0_rtx)
1358 pattern = "movl %1,%0", carry = 0;
1359 else
1360 pattern = "addl3 %2,%1,%0";
1361 }
1362 if (pattern)
1363 output_asm_insn (pattern, low);
1364 if (!carry)
1365 /* If CARRY is 0, we don't have any carry value to worry about. */
1366 return get_insn_template (CODE_FOR_addsi3, insn);
1367 /* %0 = C + %1 + %2 */
1368 if (!rtx_equal_p (operands[0], operands[1]))
1369 output_asm_insn ((operands[1] == const0_rtx
1370 ? "clrl %0"
1371 : "movl %1,%0"), operands);
1372 return "adwc %2,%0";
1373 }
1374
990 case SImode: 1375 case SImode:
991 if (rtx_equal_p (operands[0], operands[1])) 1376 if (rtx_equal_p (operands[0], operands[1]))
992 { 1377 {
993 if (operands[2] == const1_rtx) 1378 if (operands[2] == const1_rtx)
994 return "incl %0"; 1379 return "incl %0";
995 if (operands[2] == constm1_rtx) 1380 if (operands[2] == constm1_rtx)
996 return "decl %0"; 1381 return "decl %0";
997 if (CONST_INT_P (operands[2]) 1382 if (CONST_INT_P (operands[2])
998 && (unsigned) (- INTVAL (operands[2])) < 64) 1383 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
999 return "subl2 $%n2,%0"; 1384 return "subl2 $%n2,%0";
1000 if (CONST_INT_P (operands[2]) 1385 if (CONST_INT_P (operands[2])
1001 && (unsigned) INTVAL (operands[2]) >= 64 1386 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1002 && REG_P (operands[1]) 1387 && REG_P (operands[1])
1003 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768) 1388 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1004 || REGNO (operands[1]) > 11)) 1389 || REGNO (operands[1]) > 11))
1005 return "movab %c2(%1),%0"; 1390 return "movab %c2(%1),%0";
1391 if (REG_P (operands[0]) && symbolic_operand (operands[2], SImode))
1392 return "movab %a2[%0],%0";
1006 return "addl2 %2,%0"; 1393 return "addl2 %2,%0";
1007 } 1394 }
1008 1395
1009 if (rtx_equal_p (operands[0], operands[2])) 1396 if (rtx_equal_p (operands[0], operands[2]))
1010 return "addl2 %1,%0"; 1397 {
1398 if (REG_P (operands[0]) && symbolic_operand (operands[1], SImode))
1399 return "movab %a1[%0],%0";
1400 return "addl2 %1,%0";
1401 }
1011 1402
1012 if (CONST_INT_P (operands[2]) 1403 if (CONST_INT_P (operands[2])
1013 && INTVAL (operands[2]) < 32767 1404 && INTVAL (operands[2]) < 32767
1014 && INTVAL (operands[2]) > -32768 1405 && INTVAL (operands[2]) > -32768
1015 && REG_P (operands[1]) 1406 && REG_P (operands[1])
1016 && push_operand (operands[0], SImode)) 1407 && push_operand (operands[0], SImode))
1017 return "pushab %c2(%1)"; 1408 return "pushab %c2(%1)";
1018 1409
1019 if (CONST_INT_P (operands[2]) 1410 if (CONST_INT_P (operands[2])
1020 && (unsigned) (- INTVAL (operands[2])) < 64) 1411 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1021 return "subl3 $%n2,%1,%0"; 1412 return "subl3 $%n2,%1,%0";
1022 1413
1023 if (CONST_INT_P (operands[2]) 1414 if (CONST_INT_P (operands[2])
1024 && (unsigned) INTVAL (operands[2]) >= 64 1415 && (unsigned HOST_WIDE_INT) INTVAL (operands[2]) >= 64
1025 && REG_P (operands[1]) 1416 && REG_P (operands[1])
1026 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768) 1417 && ((INTVAL (operands[2]) < 32767 && INTVAL (operands[2]) > -32768)
1027 || REGNO (operands[1]) > 11)) 1418 || REGNO (operands[1]) > 11))
1028 return "movab %c2(%1),%0"; 1419 return "movab %c2(%1),%0";
1029 1420
1030 /* Add this if using gcc on a VAX 3xxx: 1421 /* Add this if using gcc on a VAX 3xxx:
1031 if (REG_P (operands[1]) && REG_P (operands[2])) 1422 if (REG_P (operands[1]) && REG_P (operands[2]))
1032 return "movab (%1)[%2],%0"; 1423 return "movab (%1)[%2],%0";
1033 */ 1424 */
1425
1426 if (REG_P (operands[1]) && symbolic_operand (operands[2], SImode))
1427 {
1428 if (push_operand (operands[0], SImode))
1429 return "pushab %a2[%1]";
1430 return "movab %a2[%1],%0";
1431 }
1432
1433 if (REG_P (operands[2]) && symbolic_operand (operands[1], SImode))
1434 {
1435 if (push_operand (operands[0], SImode))
1436 return "pushab %a1[%2]";
1437 return "movab %a1[%2],%0";
1438 }
1439
1440 if (flag_pic && REG_P (operands[0])
1441 && symbolic_operand (operands[2], SImode))
1442 return "movab %a2,%0;addl2 %1,%0";
1443
1444 if (flag_pic
1445 && (symbolic_operand (operands[1], SImode)
1446 || symbolic_operand (operands[1], SImode)))
1447 debug_rtx (insn);
1448
1034 return "addl3 %1,%2,%0"; 1449 return "addl3 %1,%2,%0";
1035 1450
1036 case HImode: 1451 case HImode:
1037 if (rtx_equal_p (operands[0], operands[1])) 1452 if (rtx_equal_p (operands[0], operands[1]))
1038 { 1453 {
1039 if (operands[2] == const1_rtx) 1454 if (operands[2] == const1_rtx)
1040 return "incw %0"; 1455 return "incw %0";
1041 if (operands[2] == constm1_rtx) 1456 if (operands[2] == constm1_rtx)
1042 return "decw %0"; 1457 return "decw %0";
1043 if (CONST_INT_P (operands[2]) 1458 if (CONST_INT_P (operands[2])
1044 && (unsigned) (- INTVAL (operands[2])) < 64) 1459 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1045 return "subw2 $%n2,%0"; 1460 return "subw2 $%n2,%0";
1046 return "addw2 %2,%0"; 1461 return "addw2 %2,%0";
1047 } 1462 }
1048 if (rtx_equal_p (operands[0], operands[2])) 1463 if (rtx_equal_p (operands[0], operands[2]))
1049 return "addw2 %1,%0"; 1464 return "addw2 %1,%0";
1050 if (CONST_INT_P (operands[2]) 1465 if (CONST_INT_P (operands[2])
1051 && (unsigned) (- INTVAL (operands[2])) < 64) 1466 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1052 return "subw3 $%n2,%1,%0"; 1467 return "subw3 $%n2,%1,%0";
1053 return "addw3 %1,%2,%0"; 1468 return "addw3 %1,%2,%0";
1054 1469
1055 case QImode: 1470 case QImode:
1056 if (rtx_equal_p (operands[0], operands[1])) 1471 if (rtx_equal_p (operands[0], operands[1]))
1058 if (operands[2] == const1_rtx) 1473 if (operands[2] == const1_rtx)
1059 return "incb %0"; 1474 return "incb %0";
1060 if (operands[2] == constm1_rtx) 1475 if (operands[2] == constm1_rtx)
1061 return "decb %0"; 1476 return "decb %0";
1062 if (CONST_INT_P (operands[2]) 1477 if (CONST_INT_P (operands[2])
1063 && (unsigned) (- INTVAL (operands[2])) < 64) 1478 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1064 return "subb2 $%n2,%0"; 1479 return "subb2 $%n2,%0";
1065 return "addb2 %2,%0"; 1480 return "addb2 %2,%0";
1066 } 1481 }
1067 if (rtx_equal_p (operands[0], operands[2])) 1482 if (rtx_equal_p (operands[0], operands[2]))
1068 return "addb2 %1,%0"; 1483 return "addb2 %1,%0";
1069 if (CONST_INT_P (operands[2]) 1484 if (CONST_INT_P (operands[2])
1070 && (unsigned) (- INTVAL (operands[2])) < 64) 1485 && (unsigned HOST_WIDE_INT) (- INTVAL (operands[2])) < 64)
1071 return "subb3 $%n2,%1,%0"; 1486 return "subb3 $%n2,%1,%0";
1072 return "addb3 %1,%2,%0"; 1487 return "addb3 %1,%2,%0";
1073 1488
1074 default: 1489 default:
1075 gcc_unreachable (); 1490 gcc_unreachable ();
1076 } 1491 }
1077 } 1492 }
1078 1493
1079 /* Output a conditional branch. */
1080 const char * 1494 const char *
1081 vax_output_conditional_branch (enum rtx_code code) 1495 vax_output_int_subtract (rtx insn, rtx *operands, enum machine_mode mode)
1082 { 1496 {
1083 switch (code) 1497 switch (mode)
1084 { 1498 {
1085 case EQ: return "jeql %l0"; 1499 case DImode:
1086 case NE: return "jneq %l0"; 1500 {
1087 case GT: return "jgtr %l0"; 1501 rtx low[3];
1088 case LT: return "jlss %l0"; 1502 const char *pattern;
1089 case GTU: return "jgtru %l0"; 1503 int carry = 1;
1090 case LTU: return "jlssu %l0"; 1504
1091 case GE: return "jgeq %l0"; 1505 if (TARGET_QMATH && 0)
1092 case LE: return "jleq %l0"; 1506 debug_rtx (insn);
1093 case GEU: return "jgequ %l0"; 1507
1094 case LEU: return "jlequ %l0"; 1508 split_quadword_operands (insn, MINUS, operands, low, 3);
1095 default: 1509
1096 gcc_unreachable (); 1510 if (TARGET_QMATH)
1097 } 1511 {
1098 } 1512 if (operands[1] == const0_rtx && low[1] == const0_rtx)
1099 1513 {
1100 /* 1 if X is an rtx for a constant that is a valid address. */ 1514 /* Negation is tricky. It's basically complement and increment.
1101 1515 Negate hi, then lo, and subtract the carry back. */
1102 int 1516 if ((MEM_P (low[0]) && GET_CODE (XEXP (low[0], 0)) == POST_INC)
1517 || (MEM_P (operands[0])
1518 && GET_CODE (XEXP (operands[0], 0)) == POST_INC))
1519 fatal_insn ("illegal operand detected", insn);
1520 output_asm_insn ("mnegl %2,%0", operands);
1521 output_asm_insn ("mnegl %2,%0", low);
1522 return "sbwc $0,%0";
1523 }
1524 gcc_assert (rtx_equal_p (operands[0], operands[1]));
1525 gcc_assert (rtx_equal_p (low[0], low[1]));
1526 if (low[2] == const1_rtx)
1527 output_asm_insn ("decl %0", low);
1528 else
1529 output_asm_insn ("subl2 %2,%0", low);
1530 return "sbwc %2,%0";
1531 }
1532
1533 /* Subtract low parts. */
1534 if (rtx_equal_p (operands[0], operands[1]))
1535 {
1536 if (low[2] == const0_rtx)
1537 pattern = 0, carry = 0;
1538 else if (low[2] == constm1_rtx)
1539 pattern = "decl %0";
1540 else
1541 pattern = "subl2 %2,%0";
1542 }
1543 else
1544 {
1545 if (low[2] == constm1_rtx)
1546 pattern = "decl %0";
1547 else if (low[2] == const0_rtx)
1548 pattern = get_insn_template (CODE_FOR_movsi, insn), carry = 0;
1549 else
1550 pattern = "subl3 %2,%1,%0";
1551 }
1552 if (pattern)
1553 output_asm_insn (pattern, low);
1554 if (carry)
1555 {
1556 if (!rtx_equal_p (operands[0], operands[1]))
1557 return "movl %1,%0;sbwc %2,%0";
1558 return "sbwc %2,%0";
1559 /* %0 = %2 - %1 - C */
1560 }
1561 return get_insn_template (CODE_FOR_subsi3, insn);
1562 }
1563
1564 default:
1565 gcc_unreachable ();
1566 }
1567 }
1568
1569 /* True if X is an rtx for a constant that is a valid address. */
1570
1571 bool
1103 legitimate_constant_address_p (rtx x) 1572 legitimate_constant_address_p (rtx x)
1104 { 1573 {
1105 return (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF 1574 if (GET_CODE (x) == LABEL_REF || GET_CODE (x) == SYMBOL_REF
1106 || CONST_INT_P (x) || GET_CODE (x) == CONST 1575 || CONST_INT_P (x) || GET_CODE (x) == HIGH)
1107 || GET_CODE (x) == HIGH); 1576 return true;
1108 } 1577 if (GET_CODE (x) != CONST)
1109 1578 return false;
1110 /* Nonzero if the constant value X is a legitimate general operand. 1579 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1580 if (flag_pic
1581 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1582 && !SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0)))
1583 return false;
1584 #endif
1585 return true;
1586 }
1587
1588 /* True if the constant value X is a legitimate general operand.
1111 It is given that X satisfies CONSTANT_P or is a CONST_DOUBLE. */ 1589 It is given that X satisfies CONSTANT_P or is a CONST_DOUBLE. */
1112 1590
1113 int 1591 bool
1114 legitimate_constant_p (rtx x ATTRIBUTE_UNUSED) 1592 legitimate_constant_p (rtx x ATTRIBUTE_UNUSED)
1115 { 1593 {
1116 return 1; 1594 return true;
1117 } 1595 }
1118 1596
1119 /* The other macros defined here are used only in legitimate_address_p (). */ 1597 /* The other macros defined here are used only in legitimate_address_p (). */
1120 1598
1121 /* Nonzero if X is a hard reg that can be used as an index 1599 /* Nonzero if X is a hard reg that can be used as an index
1131 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS 1609 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1132 1610
1133 /* Re-definition of CONSTANT_ADDRESS_P, which is true only when there 1611 /* Re-definition of CONSTANT_ADDRESS_P, which is true only when there
1134 are no SYMBOL_REFs for external symbols present. */ 1612 are no SYMBOL_REFs for external symbols present. */
1135 1613
1136 static int 1614 static bool
1137 indirectable_constant_address_p (rtx x) 1615 indirectable_constant_address_p (rtx x, bool indirect)
1138 { 1616 {
1139 if (!CONSTANT_ADDRESS_P (x)) 1617 if (GET_CODE (x) == SYMBOL_REF)
1140 return 0; 1618 return !flag_pic || SYMBOL_REF_LOCAL_P (x) || !indirect;
1141 if (GET_CODE (x) == CONST && GET_CODE (XEXP ((x), 0)) == PLUS) 1619
1142 x = XEXP (XEXP (x, 0), 0); 1620 if (GET_CODE (x) == CONST)
1143 if (GET_CODE (x) == SYMBOL_REF && !SYMBOL_REF_LOCAL_P (x)) 1621 return !flag_pic
1144 return 0; 1622 || GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
1145 1623 || SYMBOL_REF_LOCAL_P (XEXP (XEXP (x, 0), 0));
1146 return 1; 1624
1625 return CONSTANT_ADDRESS_P (x);
1147 } 1626 }
1148 1627
1149 #else /* not NO_EXTERNAL_INDIRECT_ADDRESS */ 1628 #else /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1150 1629
1151 static int 1630 static bool
1152 indirectable_constant_address_p (rtx x) 1631 indirectable_constant_address_p (rtx x, bool indirect ATTRIBUTE_UNUSED)
1153 { 1632 {
1154 return CONSTANT_ADDRESS_P (x); 1633 return CONSTANT_ADDRESS_P (x);
1155 } 1634 }
1156 1635
1157 #endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */ 1636 #endif /* not NO_EXTERNAL_INDIRECT_ADDRESS */
1158 1637
1159 /* Nonzero if X is an address which can be indirected. External symbols 1638 /* True if X is an address which can be indirected. External symbols
1160 could be in a sharable image library, so we disallow those. */ 1639 could be in a sharable image library, so we disallow those. */
1161 1640
1162 static int 1641 static bool
1163 indirectable_address_p(rtx x, int strict) 1642 indirectable_address_p (rtx x, bool strict, bool indirect)
1164 { 1643 {
1165 if (indirectable_constant_address_p (x)) 1644 if (indirectable_constant_address_p (x, indirect)
1166 return 1; 1645 || BASE_REGISTER_P (x, strict))
1167 if (BASE_REGISTER_P (x, strict)) 1646 return true;
1168 return 1; 1647 if (GET_CODE (x) != PLUS
1169 if (GET_CODE (x) == PLUS 1648 || !BASE_REGISTER_P (XEXP (x, 0), strict)
1170 && BASE_REGISTER_P (XEXP (x, 0), strict) 1649 || (flag_pic && !CONST_INT_P (XEXP (x, 1))))
1171 && indirectable_constant_address_p (XEXP (x, 1))) 1650 return false;
1172 return 1; 1651 return indirectable_constant_address_p (XEXP (x, 1), indirect);
1173 return 0; 1652 }
1174 } 1653
1175 1654 /* Return true if x is a valid address not using indexing.
1176 /* Return 1 if x is a valid address not using indexing.
1177 (This much is the easy part.) */ 1655 (This much is the easy part.) */
1178 static int 1656 static bool
1179 nonindexed_address_p (rtx x, int strict) 1657 nonindexed_address_p (rtx x, bool strict)
1180 { 1658 {
1181 rtx xfoo0; 1659 rtx xfoo0;
1182 if (REG_P (x)) 1660 if (REG_P (x))
1183 { 1661 {
1184 extern rtx *reg_equiv_mem; 1662 extern rtx *reg_equiv_mem;
1185 if (!reload_in_progress 1663 if (! reload_in_progress
1186 || reg_equiv_mem[REGNO (x)] == 0 1664 || reg_equiv_mem[REGNO (x)] == 0
1187 || indirectable_address_p (reg_equiv_mem[REGNO (x)], strict)) 1665 || indirectable_address_p (reg_equiv_mem[REGNO (x)], strict, false))
1188 return 1; 1666 return true;
1189 } 1667 }
1190 if (indirectable_constant_address_p (x)) 1668 if (indirectable_constant_address_p (x, false))
1191 return 1; 1669 return true;
1192 if (indirectable_address_p (x, strict)) 1670 if (indirectable_address_p (x, strict, false))
1193 return 1; 1671 return true;
1194 xfoo0 = XEXP (x, 0); 1672 xfoo0 = XEXP (x, 0);
1195 if (MEM_P (x) && indirectable_address_p (xfoo0, strict)) 1673 if (MEM_P (x) && indirectable_address_p (xfoo0, strict, true))
1196 return 1; 1674 return true;
1197 if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC) 1675 if ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_INC)
1198 && BASE_REGISTER_P (xfoo0, strict)) 1676 && BASE_REGISTER_P (xfoo0, strict))
1199 return 1; 1677 return true;
1200 return 0; 1678 return false;
1201 } 1679 }
1202 1680
1203 /* 1 if PROD is either a reg times size of mode MODE and MODE is less 1681 /* True if PROD is either a reg times size of mode MODE and MODE is less
1204 than or equal 8 bytes, or just a reg if MODE is one byte. */ 1682 than or equal 8 bytes, or just a reg if MODE is one byte. */
1205 1683
1206 static int 1684 static bool
1207 index_term_p (rtx prod, enum machine_mode mode, int strict) 1685 index_term_p (rtx prod, enum machine_mode mode, bool strict)
1208 { 1686 {
1209 rtx xfoo0, xfoo1; 1687 rtx xfoo0, xfoo1;
1210 1688
1211 if (GET_MODE_SIZE (mode) == 1) 1689 if (GET_MODE_SIZE (mode) == 1)
1212 return BASE_REGISTER_P (prod, strict); 1690 return BASE_REGISTER_P (prod, strict);
1213 1691
1214 if (GET_CODE (prod) != MULT || GET_MODE_SIZE (mode) > 8) 1692 if (GET_CODE (prod) != MULT || GET_MODE_SIZE (mode) > 8)
1215 return 0; 1693 return false;
1216 1694
1217 xfoo0 = XEXP (prod, 0); 1695 xfoo0 = XEXP (prod, 0);
1218 xfoo1 = XEXP (prod, 1); 1696 xfoo1 = XEXP (prod, 1);
1219 1697
1220 if (CONST_INT_P (xfoo0) 1698 if (CONST_INT_P (xfoo0)
1221 && INTVAL (xfoo0) == (int)GET_MODE_SIZE (mode) 1699 && INTVAL (xfoo0) == (int)GET_MODE_SIZE (mode)
1222 && INDEX_REGISTER_P (xfoo1, strict)) 1700 && INDEX_REGISTER_P (xfoo1, strict))
1223 return 1; 1701 return true;
1224 1702
1225 if (CONST_INT_P (xfoo1) 1703 if (CONST_INT_P (xfoo1)
1226 && INTVAL (xfoo1) == (int)GET_MODE_SIZE (mode) 1704 && INTVAL (xfoo1) == (int)GET_MODE_SIZE (mode)
1227 && INDEX_REGISTER_P (xfoo0, strict)) 1705 && INDEX_REGISTER_P (xfoo0, strict))
1228 return 1; 1706 return true;
1229 1707
1230 return 0; 1708 return false;
1231 } 1709 }
1232 1710
1233 /* Return 1 if X is the sum of a register 1711 /* Return true if X is the sum of a register
1234 and a valid index term for mode MODE. */ 1712 and a valid index term for mode MODE. */
1235 static int 1713 static bool
1236 reg_plus_index_p (rtx x, enum machine_mode mode, int strict) 1714 reg_plus_index_p (rtx x, enum machine_mode mode, bool strict)
1237 { 1715 {
1238 rtx xfoo0, xfoo1; 1716 rtx xfoo0, xfoo1;
1239 1717
1240 if (GET_CODE (x) != PLUS) 1718 if (GET_CODE (x) != PLUS)
1241 return 0; 1719 return false;
1242 1720
1243 xfoo0 = XEXP (x, 0); 1721 xfoo0 = XEXP (x, 0);
1244 xfoo1 = XEXP (x, 1); 1722 xfoo1 = XEXP (x, 1);
1245 1723
1246 if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict)) 1724 if (BASE_REGISTER_P (xfoo0, strict) && index_term_p (xfoo1, mode, strict))
1247 return 1; 1725 return true;
1248 1726
1249 if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict)) 1727 if (BASE_REGISTER_P (xfoo1, strict) && index_term_p (xfoo0, mode, strict))
1250 return 1; 1728 return true;
1251 1729
1252 return 0; 1730 return false;
1253 } 1731 }
1254 1732
1255 /* legitimate_address_p returns 1 if it recognizes an RTL expression "x" 1733 /* Return true if xfoo0 and xfoo1 constitute a valid indexed address. */
1734 static bool
1735 indexable_address_p (rtx xfoo0, rtx xfoo1, enum machine_mode mode, bool strict)
1736 {
1737 if (!CONSTANT_ADDRESS_P (xfoo0))
1738 return false;
1739 if (BASE_REGISTER_P (xfoo1, strict))
1740 return !flag_pic || mode == QImode;
1741 if (flag_pic && symbolic_operand (xfoo0, SImode))
1742 return false;
1743 return reg_plus_index_p (xfoo1, mode, strict);
1744 }
1745
1746 /* legitimate_address_p returns true if it recognizes an RTL expression "x"
1256 that is a valid memory address for an instruction. 1747 that is a valid memory address for an instruction.
1257 The MODE argument is the machine mode for the MEM expression 1748 The MODE argument is the machine mode for the MEM expression
1258 that wants to use this address. */ 1749 that wants to use this address. */
1259 int 1750 bool
1260 legitimate_address_p (enum machine_mode mode, rtx x, int strict) 1751 vax_legitimate_address_p (enum machine_mode mode, rtx x, bool strict)
1261 { 1752 {
1262 rtx xfoo0, xfoo1; 1753 rtx xfoo0, xfoo1;
1263 1754
1264 if (nonindexed_address_p (x, strict)) 1755 if (nonindexed_address_p (x, strict))
1265 return 1; 1756 return true;
1266 1757
1267 if (GET_CODE (x) != PLUS) 1758 if (GET_CODE (x) != PLUS)
1268 return 0; 1759 return false;
1269 1760
1270 /* Handle <address>[index] represented with index-sum outermost */ 1761 /* Handle <address>[index] represented with index-sum outermost */
1271 1762
1272 xfoo0 = XEXP (x, 0); 1763 xfoo0 = XEXP (x, 0);
1273 xfoo1 = XEXP (x, 1); 1764 xfoo1 = XEXP (x, 1);
1274 1765
1275 if (index_term_p (xfoo0, mode, strict) 1766 if (index_term_p (xfoo0, mode, strict)
1276 && nonindexed_address_p (xfoo1, strict)) 1767 && nonindexed_address_p (xfoo1, strict))
1277 return 1; 1768 return true;
1278 1769
1279 if (index_term_p (xfoo1, mode, strict) 1770 if (index_term_p (xfoo1, mode, strict)
1280 && nonindexed_address_p (xfoo0, strict)) 1771 && nonindexed_address_p (xfoo0, strict))
1281 return 1; 1772 return true;
1282 1773
1283 /* Handle offset(reg)[index] with offset added outermost */ 1774 /* Handle offset(reg)[index] with offset added outermost */
1284 1775
1285 if (indirectable_constant_address_p (xfoo0) 1776 if (indexable_address_p (xfoo0, xfoo1, mode, strict)
1286 && (BASE_REGISTER_P (xfoo1, strict) 1777 || indexable_address_p (xfoo1, xfoo0, mode, strict))
1287 || reg_plus_index_p (xfoo1, mode, strict))) 1778 return true;
1288 return 1; 1779
1289 1780 return false;
1290 if (indirectable_constant_address_p (xfoo1) 1781 }
1291 && (BASE_REGISTER_P (xfoo0, strict) 1782
1292 || reg_plus_index_p (xfoo0, mode, strict))) 1783 /* Return true if x (a legitimate address expression) has an effect that
1293 return 1;
1294
1295 return 0;
1296 }
1297
1298 /* Return 1 if x (a legitimate address expression) has an effect that
1299 depends on the machine mode it is used for. On the VAX, the predecrement 1784 depends on the machine mode it is used for. On the VAX, the predecrement
1300 and postincrement address depend thus (the amount of decrement or 1785 and postincrement address depend thus (the amount of decrement or
1301 increment being the length of the operand) and all indexed address depend 1786 increment being the length of the operand) and all indexed address depend
1302 thus (because the index scale factor is the length of the operand). */ 1787 thus (because the index scale factor is the length of the operand). */
1303 1788
1304 int 1789 bool
1305 vax_mode_dependent_address_p (rtx x) 1790 vax_mode_dependent_address_p (rtx x)
1306 { 1791 {
1307 rtx xfoo0, xfoo1; 1792 rtx xfoo0, xfoo1;
1308 1793
1309 /* Auto-increment cases are now dealt with generically in recog.c. */ 1794 /* Auto-increment cases are now dealt with generically in recog.c. */
1310
1311 if (GET_CODE (x) != PLUS) 1795 if (GET_CODE (x) != PLUS)
1312 return 0; 1796 return false;
1313 1797
1314 xfoo0 = XEXP (x, 0); 1798 xfoo0 = XEXP (x, 0);
1315 xfoo1 = XEXP (x, 1); 1799 xfoo1 = XEXP (x, 1);
1316 1800
1317 if (CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1)) 1801 if (CONST_INT_P (xfoo0) && REG_P (xfoo1))
1318 return 0; 1802 return false;
1319 if (CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0)) 1803 if (CONST_INT_P (xfoo1) && REG_P (xfoo0))
1320 return 0; 1804 return false;
1321 1805 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo0) && REG_P (xfoo1))
1322 return 1; 1806 return false;
1323 } 1807 if (!flag_pic && CONSTANT_ADDRESS_P (xfoo1) && REG_P (xfoo0))
1808 return false;
1809
1810 return true;
1811 }
1812
1813 static rtx
1814 fixup_mathdi_operand (rtx x, enum machine_mode mode)
1815 {
1816 if (illegal_addsub_di_memory_operand (x, mode))
1817 {
1818 rtx addr = XEXP (x, 0);
1819 rtx temp = gen_reg_rtx (Pmode);
1820 rtx offset = 0;
1821 #ifdef NO_EXTERNAL_INDIRECT_ADDRESS
1822 if (GET_CODE (addr) == CONST && flag_pic)
1823 {
1824 offset = XEXP (XEXP (addr, 0), 1);
1825 addr = XEXP (XEXP (addr, 0), 0);
1826 }
1827 #endif
1828 emit_move_insn (temp, addr);
1829 if (offset)
1830 temp = gen_rtx_PLUS (Pmode, temp, offset);
1831 x = gen_rtx_MEM (DImode, temp);
1832 }
1833 return x;
1834 }
1835
1836 void
1837 vax_expand_addsub_di_operands (rtx * operands, enum rtx_code code)
1838 {
1839 int hi_only = operand_subword (operands[2], 0, 0, DImode) == const0_rtx;
1840 rtx temp;
1841
1842 rtx (*gen_old_insn)(rtx, rtx, rtx);
1843 rtx (*gen_si_insn)(rtx, rtx, rtx);
1844 rtx (*gen_insn)(rtx, rtx, rtx);
1845
1846 if (code == PLUS)
1847 {
1848 gen_old_insn = gen_adddi3_old;
1849 gen_si_insn = gen_addsi3;
1850 gen_insn = gen_adcdi3;
1851 }
1852 else if (code == MINUS)
1853 {
1854 gen_old_insn = gen_subdi3_old;
1855 gen_si_insn = gen_subsi3;
1856 gen_insn = gen_sbcdi3;
1857 }
1858 else
1859 gcc_unreachable ();
1860
1861 /* If this is addition (thus operands are commutative) and if there is one
1862 addend that duplicates the desination, we want that addend to be the
1863 first addend. */
1864 if (code == PLUS
1865 && rtx_equal_p (operands[0], operands[2])
1866 && !rtx_equal_p (operands[1], operands[2]))
1867 {
1868 temp = operands[2];
1869 operands[2] = operands[1];
1870 operands[1] = temp;
1871 }
1872
1873 if (!TARGET_QMATH)
1874 {
1875 emit_insn ((*gen_old_insn) (operands[0], operands[1], operands[2]));
1876 }
1877 else if (hi_only)
1878 {
1879 if (!rtx_equal_p (operands[0], operands[1])
1880 && (REG_P (operands[0]) && MEM_P (operands[1])))
1881 {
1882 emit_move_insn (operands[0], operands[1]);
1883 operands[1] = operands[0];
1884 }
1885
1886 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1887 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1888 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1889
1890 if (!rtx_equal_p (operands[0], operands[1]))
1891 emit_move_insn (operand_subword (operands[0], 0, 0, DImode),
1892 operand_subword (operands[1], 0, 0, DImode));
1893
1894 emit_insn ((*gen_si_insn) (operand_subword (operands[0], 1, 0, DImode),
1895 operand_subword (operands[1], 1, 0, DImode),
1896 operand_subword (operands[2], 1, 0, DImode)));
1897 }
1898 else
1899 {
1900 /* If are adding the same value together, that's really a multiply by 2,
1901 and that's just a left shift of 1. */
1902 if (rtx_equal_p (operands[1], operands[2]))
1903 {
1904 gcc_assert (code != MINUS);
1905 emit_insn (gen_ashldi3 (operands[0], operands[1], const1_rtx));
1906 return;
1907 }
1908
1909 operands[0] = fixup_mathdi_operand (operands[0], DImode);
1910
1911 /* If an operand is the same as operand[0], use the operand[0] rtx
1912 because fixup will an equivalent rtx but not an equal one. */
1913
1914 if (rtx_equal_p (operands[0], operands[1]))
1915 operands[1] = operands[0];
1916 else
1917 operands[1] = fixup_mathdi_operand (operands[1], DImode);
1918
1919 if (rtx_equal_p (operands[0], operands[2]))
1920 operands[2] = operands[0];
1921 else
1922 operands[2] = fixup_mathdi_operand (operands[2], DImode);
1923
1924 /* If we are subtracting not from ourselves [d = a - b], and because the
1925 carry ops are two operand only, we would need to do a move prior to
1926 the subtract. And if d == b, we would need a temp otherwise
1927 [d = a, d -= d] and we end up with 0. Instead we rewrite d = a - b
1928 into d = -b, d += a. Since -b can never overflow, even if b == d,
1929 no temp is needed.
1930
1931 If we are doing addition, since the carry ops are two operand, if
1932 we aren't adding to ourselves, move the first addend to the
1933 destination first. */
1934
1935 gcc_assert (operands[1] != const0_rtx || code == MINUS);
1936 if (!rtx_equal_p (operands[0], operands[1]) && operands[1] != const0_rtx)
1937 {
1938 if (code == MINUS && CONSTANT_P (operands[1]))
1939 {
1940 temp = gen_reg_rtx (DImode);
1941 emit_insn (gen_sbcdi3 (operands[0], const0_rtx, operands[2]));
1942 code = PLUS;
1943 gen_insn = gen_adcdi3;
1944 operands[2] = operands[1];
1945 operands[1] = operands[0];
1946 }
1947 else
1948 emit_move_insn (operands[0], operands[1]);
1949 }
1950
1951 /* Subtracting a constant will have been rewritten to an addition of the
1952 negative of that constant before we get here. */
1953 gcc_assert (!CONSTANT_P (operands[2]) || code == PLUS);
1954 emit_insn ((*gen_insn) (operands[0], operands[1], operands[2]));
1955 }
1956 }
1957
1958 bool
1959 adjacent_operands_p (rtx lo, rtx hi, enum machine_mode mode)
1960 {
1961 HOST_WIDE_INT lo_offset;
1962 HOST_WIDE_INT hi_offset;
1963
1964 if (GET_CODE (lo) != GET_CODE (hi))
1965 return false;
1966
1967 if (REG_P (lo))
1968 return mode == SImode && REGNO (lo) + 1 == REGNO (hi);
1969 if (CONST_INT_P (lo))
1970 return INTVAL (hi) == 0 && 0 <= INTVAL (lo) && INTVAL (lo) < 64;
1971 if (CONST_INT_P (lo))
1972 return mode != SImode;
1973
1974 if (!MEM_P (lo))
1975 return false;
1976
1977 if (MEM_VOLATILE_P (lo) || MEM_VOLATILE_P (hi))
1978 return false;
1979
1980 lo = XEXP (lo, 0);
1981 hi = XEXP (hi, 0);
1982
1983 if (GET_CODE (lo) == POST_INC /* || GET_CODE (lo) == PRE_DEC */)
1984 return rtx_equal_p (lo, hi);
1985
1986 switch (GET_CODE (lo))
1987 {
1988 case REG:
1989 case SYMBOL_REF:
1990 lo_offset = 0;
1991 break;
1992 case CONST:
1993 lo = XEXP (lo, 0);
1994 /* FALLTHROUGH */
1995 case PLUS:
1996 if (!CONST_INT_P (XEXP (lo, 1)))
1997 return false;
1998 lo_offset = INTVAL (XEXP (lo, 1));
1999 lo = XEXP (lo, 0);
2000 break;
2001 default:
2002 return false;
2003 }
2004
2005 switch (GET_CODE (hi))
2006 {
2007 case REG:
2008 case SYMBOL_REF:
2009 hi_offset = 0;
2010 break;
2011 case CONST:
2012 hi = XEXP (hi, 0);
2013 /* FALLTHROUGH */
2014 case PLUS:
2015 if (!CONST_INT_P (XEXP (hi, 1)))
2016 return false;
2017 hi_offset = INTVAL (XEXP (hi, 1));
2018 hi = XEXP (hi, 0);
2019 break;
2020 default:
2021 return false;
2022 }
2023
2024 if (GET_CODE (lo) == MULT || GET_CODE (lo) == PLUS)
2025 return false;
2026
2027 return rtx_equal_p (lo, hi)
2028 && hi_offset - lo_offset == GET_MODE_SIZE (mode);
2029 }
2030
2031 /* Output assembler code for a block containing the constant parts
2032 of a trampoline, leaving space for the variable parts. */
2033
2034 /* On the VAX, the trampoline contains an entry mask and two instructions:
2035 .word NN
2036 movl $STATIC,r0 (store the functions static chain)
2037 jmp *$FUNCTION (jump to function code at address FUNCTION) */
2038
2039 static void
2040 vax_asm_trampoline_template (FILE *f ATTRIBUTE_UNUSED)
2041 {
2042 assemble_aligned_integer (2, const0_rtx);
2043 assemble_aligned_integer (2, GEN_INT (0x8fd0));
2044 assemble_aligned_integer (4, const0_rtx);
2045 assemble_aligned_integer (1, GEN_INT (0x50 + STATIC_CHAIN_REGNUM));
2046 assemble_aligned_integer (2, GEN_INT (0x9f17));
2047 assemble_aligned_integer (4, const0_rtx);
2048 }
2049
2050 /* We copy the register-mask from the function's pure code
2051 to the start of the trampoline. */
2052
2053 static void
2054 vax_trampoline_init (rtx m_tramp, tree fndecl, rtx cxt)
2055 {
2056 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
2057 rtx mem;
2058
2059 emit_block_move (m_tramp, assemble_trampoline_template (),
2060 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
2061
2062 mem = adjust_address (m_tramp, HImode, 0);
2063 emit_move_insn (mem, gen_const_mem (HImode, fnaddr));
2064
2065 mem = adjust_address (m_tramp, SImode, 4);
2066 emit_move_insn (mem, cxt);
2067 mem = adjust_address (m_tramp, SImode, 11);
2068 emit_move_insn (mem, plus_constant (fnaddr, 2));
2069 emit_insn (gen_sync_istream ());
2070 }
2071