comparison gcc/config/pa/pa.c @ 67:f6334be47118

update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
author nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
date Tue, 22 Mar 2011 17:18:12 +0900
parents b7f97abdc517
children 04ced10e8804
comparison
equal deleted inserted replaced
65:65488c3d617d 67:f6334be47118
37 #include "expr.h" 37 #include "expr.h"
38 #include "optabs.h" 38 #include "optabs.h"
39 #include "reload.h" 39 #include "reload.h"
40 #include "integrate.h" 40 #include "integrate.h"
41 #include "function.h" 41 #include "function.h"
42 #include "toplev.h" 42 #include "diagnostic-core.h"
43 #include "ggc.h" 43 #include "ggc.h"
44 #include "recog.h" 44 #include "recog.h"
45 #include "predict.h" 45 #include "predict.h"
46 #include "tm_p.h" 46 #include "tm_p.h"
47 #include "target.h" 47 #include "target.h"
48 #include "target-def.h" 48 #include "target-def.h"
49 #include "langhooks.h"
49 #include "df.h" 50 #include "df.h"
50 51
51 /* Return nonzero if there is a bypass for the output of 52 /* Return nonzero if there is a bypass for the output of
52 OUT_INSN and the fp store IN_INSN. */ 53 OUT_INSN and the fp store IN_INSN. */
53 int 54 int
81 #else 82 #else
82 #define DO_FRAME_NOTES 0 83 #define DO_FRAME_NOTES 0
83 #endif 84 #endif
84 #endif 85 #endif
85 86
87 static void pa_option_override (void);
86 static void copy_reg_pointer (rtx, rtx); 88 static void copy_reg_pointer (rtx, rtx);
87 static void fix_range (const char *); 89 static void fix_range (const char *);
88 static bool pa_handle_option (size_t, const char *, int); 90 static bool pa_handle_option (size_t, const char *, int);
91 static int hppa_register_move_cost (enum machine_mode mode, reg_class_t,
92 reg_class_t);
89 static int hppa_address_cost (rtx, bool); 93 static int hppa_address_cost (rtx, bool);
90 static bool hppa_rtx_costs (rtx, int, int, int *, bool); 94 static bool hppa_rtx_costs (rtx, int, int, int *, bool);
91 static inline rtx force_mode (enum machine_mode, rtx); 95 static inline rtx force_mode (enum machine_mode, rtx);
92 static void pa_reorg (void); 96 static void pa_reorg (void);
93 static void pa_combine_instructions (void); 97 static void pa_combine_instructions (void);
101 static void store_reg (int, HOST_WIDE_INT, int); 105 static void store_reg (int, HOST_WIDE_INT, int);
102 static void store_reg_modify (int, int, HOST_WIDE_INT); 106 static void store_reg_modify (int, int, HOST_WIDE_INT);
103 static void load_reg (int, HOST_WIDE_INT, int); 107 static void load_reg (int, HOST_WIDE_INT, int);
104 static void set_reg_plus_d (int, int, HOST_WIDE_INT, int); 108 static void set_reg_plus_d (int, int, HOST_WIDE_INT, int);
105 static rtx pa_function_value (const_tree, const_tree, bool); 109 static rtx pa_function_value (const_tree, const_tree, bool);
110 static rtx pa_libcall_value (enum machine_mode, const_rtx);
111 static bool pa_function_value_regno_p (const unsigned int);
106 static void pa_output_function_prologue (FILE *, HOST_WIDE_INT); 112 static void pa_output_function_prologue (FILE *, HOST_WIDE_INT);
107 static void update_total_code_bytes (unsigned int); 113 static void update_total_code_bytes (unsigned int);
108 static void pa_output_function_epilogue (FILE *, HOST_WIDE_INT); 114 static void pa_output_function_epilogue (FILE *, HOST_WIDE_INT);
109 static int pa_adjust_cost (rtx, rtx, rtx, int); 115 static int pa_adjust_cost (rtx, rtx, rtx, int);
110 static int pa_adjust_priority (rtx, int); 116 static int pa_adjust_priority (rtx, int);
122 #if !defined(USE_COLLECT2) 128 #if !defined(USE_COLLECT2)
123 static void pa_asm_out_constructor (rtx, int); 129 static void pa_asm_out_constructor (rtx, int);
124 static void pa_asm_out_destructor (rtx, int); 130 static void pa_asm_out_destructor (rtx, int);
125 #endif 131 #endif
126 static void pa_init_builtins (void); 132 static void pa_init_builtins (void);
133 static rtx pa_expand_builtin (tree, rtx, rtx, enum machine_mode mode, int);
127 static rtx hppa_builtin_saveregs (void); 134 static rtx hppa_builtin_saveregs (void);
128 static void hppa_va_start (tree, rtx); 135 static void hppa_va_start (tree, rtx);
129 static tree hppa_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *); 136 static tree hppa_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
130 static bool pa_scalar_mode_supported_p (enum machine_mode); 137 static bool pa_scalar_mode_supported_p (enum machine_mode);
131 static bool pa_commutative_p (const_rtx x, int outer_code); 138 static bool pa_commutative_p (const_rtx x, int outer_code);
144 static void output_deferred_plabels (void); 151 static void output_deferred_plabels (void);
145 static void output_deferred_profile_counters (void) ATTRIBUTE_UNUSED; 152 static void output_deferred_profile_counters (void) ATTRIBUTE_UNUSED;
146 #ifdef ASM_OUTPUT_EXTERNAL_REAL 153 #ifdef ASM_OUTPUT_EXTERNAL_REAL
147 static void pa_hpux_file_end (void); 154 static void pa_hpux_file_end (void);
148 #endif 155 #endif
149 #ifdef HPUX_LONG_DOUBLE_LIBRARY 156 #if HPUX_LONG_DOUBLE_LIBRARY
150 static void pa_hpux_init_libfuncs (void); 157 static void pa_hpux_init_libfuncs (void);
151 #endif 158 #endif
152 static rtx pa_struct_value_rtx (tree, int); 159 static rtx pa_struct_value_rtx (tree, int);
153 static bool pa_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode, 160 static bool pa_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
154 const_tree, bool); 161 const_tree, bool);
155 static int pa_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode, 162 static int pa_arg_partial_bytes (CUMULATIVE_ARGS *, enum machine_mode,
156 tree, bool); 163 tree, bool);
164 static void pa_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
165 const_tree, bool);
166 static rtx pa_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
167 const_tree, bool);
168 static unsigned int pa_function_arg_boundary (enum machine_mode, const_tree);
157 static struct machine_function * pa_init_machine_status (void); 169 static struct machine_function * pa_init_machine_status (void);
158 static enum reg_class pa_secondary_reload (bool, rtx, enum reg_class, 170 static reg_class_t pa_secondary_reload (bool, rtx, reg_class_t,
159 enum machine_mode, 171 enum machine_mode,
160 secondary_reload_info *); 172 secondary_reload_info *);
161 static void pa_extra_live_on_entry (bitmap); 173 static void pa_extra_live_on_entry (bitmap);
162 static enum machine_mode pa_promote_function_mode (const_tree, 174 static enum machine_mode pa_promote_function_mode (const_tree,
163 enum machine_mode, int *, 175 enum machine_mode, int *,
164 const_tree, int); 176 const_tree, int);
165 177
166 static void pa_asm_trampoline_template (FILE *); 178 static void pa_asm_trampoline_template (FILE *);
167 static void pa_trampoline_init (rtx, tree, rtx); 179 static void pa_trampoline_init (rtx, tree, rtx);
168 static rtx pa_trampoline_adjust_address (rtx); 180 static rtx pa_trampoline_adjust_address (rtx);
169 static rtx pa_delegitimize_address (rtx); 181 static rtx pa_delegitimize_address (rtx);
182 static bool pa_print_operand_punct_valid_p (unsigned char);
183 static rtx pa_internal_arg_pointer (void);
184 static bool pa_can_eliminate (const int, const int);
185 static void pa_conditional_register_usage (void);
186 static enum machine_mode pa_c_mode_for_suffix (char);
187 static section *pa_function_section (tree, enum node_frequency, bool, bool);
170 188
171 /* The following extra sections are only used for SOM. */ 189 /* The following extra sections are only used for SOM. */
172 static GTY(()) section *som_readonly_data_section; 190 static GTY(()) section *som_readonly_data_section;
173 static GTY(()) section *som_one_only_readonly_data_section; 191 static GTY(()) section *som_one_only_readonly_data_section;
174 static GTY(()) section *som_one_only_data_section; 192 static GTY(()) section *som_one_only_data_section;
207 }; 225 };
208 static GTY((length ("n_deferred_plabels"))) struct deferred_plabel * 226 static GTY((length ("n_deferred_plabels"))) struct deferred_plabel *
209 deferred_plabels; 227 deferred_plabels;
210 static size_t n_deferred_plabels = 0; 228 static size_t n_deferred_plabels = 0;
211 229
230 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
231 static const struct default_options pa_option_optimization_table[] =
232 {
233 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
234 { OPT_LEVELS_NONE, 0, NULL, 0 }
235 };
236
212 237
213 /* Initialize the GCC target structure. */ 238 /* Initialize the GCC target structure. */
239
240 #undef TARGET_OPTION_OVERRIDE
241 #define TARGET_OPTION_OVERRIDE pa_option_override
242 #undef TARGET_OPTION_OPTIMIZATION_TABLE
243 #define TARGET_OPTION_OPTIMIZATION_TABLE pa_option_optimization_table
214 244
215 #undef TARGET_ASM_ALIGNED_HI_OP 245 #undef TARGET_ASM_ALIGNED_HI_OP
216 #define TARGET_ASM_ALIGNED_HI_OP "\t.half\t" 246 #define TARGET_ASM_ALIGNED_HI_OP "\t.half\t"
217 #undef TARGET_ASM_ALIGNED_SI_OP 247 #undef TARGET_ASM_ALIGNED_SI_OP
218 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t" 248 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
232 #undef TARGET_ASM_FUNCTION_EPILOGUE 262 #undef TARGET_ASM_FUNCTION_EPILOGUE
233 #define TARGET_ASM_FUNCTION_EPILOGUE pa_output_function_epilogue 263 #define TARGET_ASM_FUNCTION_EPILOGUE pa_output_function_epilogue
234 264
235 #undef TARGET_FUNCTION_VALUE 265 #undef TARGET_FUNCTION_VALUE
236 #define TARGET_FUNCTION_VALUE pa_function_value 266 #define TARGET_FUNCTION_VALUE pa_function_value
267 #undef TARGET_LIBCALL_VALUE
268 #define TARGET_LIBCALL_VALUE pa_libcall_value
269 #undef TARGET_FUNCTION_VALUE_REGNO_P
270 #define TARGET_FUNCTION_VALUE_REGNO_P pa_function_value_regno_p
237 271
238 #undef TARGET_LEGITIMIZE_ADDRESS 272 #undef TARGET_LEGITIMIZE_ADDRESS
239 #define TARGET_LEGITIMIZE_ADDRESS hppa_legitimize_address 273 #define TARGET_LEGITIMIZE_ADDRESS hppa_legitimize_address
240 274
241 #undef TARGET_SCHED_ADJUST_COST 275 #undef TARGET_SCHED_ADJUST_COST
266 #define TARGET_ASM_FILE_END pa_hpux_file_end 300 #define TARGET_ASM_FILE_END pa_hpux_file_end
267 #else 301 #else
268 #define TARGET_ASM_FILE_END output_deferred_plabels 302 #define TARGET_ASM_FILE_END output_deferred_plabels
269 #endif 303 #endif
270 304
305 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
306 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P pa_print_operand_punct_valid_p
307
271 #if !defined(USE_COLLECT2) 308 #if !defined(USE_COLLECT2)
272 #undef TARGET_ASM_CONSTRUCTOR 309 #undef TARGET_ASM_CONSTRUCTOR
273 #define TARGET_ASM_CONSTRUCTOR pa_asm_out_constructor 310 #define TARGET_ASM_CONSTRUCTOR pa_asm_out_constructor
274 #undef TARGET_ASM_DESTRUCTOR 311 #undef TARGET_ASM_DESTRUCTOR
275 #define TARGET_ASM_DESTRUCTOR pa_asm_out_destructor 312 #define TARGET_ASM_DESTRUCTOR pa_asm_out_destructor
281 #define TARGET_HANDLE_OPTION pa_handle_option 318 #define TARGET_HANDLE_OPTION pa_handle_option
282 319
283 #undef TARGET_INIT_BUILTINS 320 #undef TARGET_INIT_BUILTINS
284 #define TARGET_INIT_BUILTINS pa_init_builtins 321 #define TARGET_INIT_BUILTINS pa_init_builtins
285 322
323 #undef TARGET_EXPAND_BUILTIN
324 #define TARGET_EXPAND_BUILTIN pa_expand_builtin
325
326 #undef TARGET_REGISTER_MOVE_COST
327 #define TARGET_REGISTER_MOVE_COST hppa_register_move_cost
286 #undef TARGET_RTX_COSTS 328 #undef TARGET_RTX_COSTS
287 #define TARGET_RTX_COSTS hppa_rtx_costs 329 #define TARGET_RTX_COSTS hppa_rtx_costs
288 #undef TARGET_ADDRESS_COST 330 #undef TARGET_ADDRESS_COST
289 #define TARGET_ADDRESS_COST hppa_address_cost 331 #define TARGET_ADDRESS_COST hppa_address_cost
290 332
291 #undef TARGET_MACHINE_DEPENDENT_REORG 333 #undef TARGET_MACHINE_DEPENDENT_REORG
292 #define TARGET_MACHINE_DEPENDENT_REORG pa_reorg 334 #define TARGET_MACHINE_DEPENDENT_REORG pa_reorg
293 335
294 #ifdef HPUX_LONG_DOUBLE_LIBRARY 336 #if HPUX_LONG_DOUBLE_LIBRARY
295 #undef TARGET_INIT_LIBFUNCS 337 #undef TARGET_INIT_LIBFUNCS
296 #define TARGET_INIT_LIBFUNCS pa_hpux_init_libfuncs 338 #define TARGET_INIT_LIBFUNCS pa_hpux_init_libfuncs
297 #endif 339 #endif
298 340
299 #undef TARGET_PROMOTE_FUNCTION_MODE 341 #undef TARGET_PROMOTE_FUNCTION_MODE
311 #define TARGET_PASS_BY_REFERENCE pa_pass_by_reference 353 #define TARGET_PASS_BY_REFERENCE pa_pass_by_reference
312 #undef TARGET_CALLEE_COPIES 354 #undef TARGET_CALLEE_COPIES
313 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true 355 #define TARGET_CALLEE_COPIES hook_bool_CUMULATIVE_ARGS_mode_tree_bool_true
314 #undef TARGET_ARG_PARTIAL_BYTES 356 #undef TARGET_ARG_PARTIAL_BYTES
315 #define TARGET_ARG_PARTIAL_BYTES pa_arg_partial_bytes 357 #define TARGET_ARG_PARTIAL_BYTES pa_arg_partial_bytes
358 #undef TARGET_FUNCTION_ARG
359 #define TARGET_FUNCTION_ARG pa_function_arg
360 #undef TARGET_FUNCTION_ARG_ADVANCE
361 #define TARGET_FUNCTION_ARG_ADVANCE pa_function_arg_advance
362 #undef TARGET_FUNCTION_ARG_BOUNDARY
363 #define TARGET_FUNCTION_ARG_BOUNDARY pa_function_arg_boundary
316 364
317 #undef TARGET_EXPAND_BUILTIN_SAVEREGS 365 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
318 #define TARGET_EXPAND_BUILTIN_SAVEREGS hppa_builtin_saveregs 366 #define TARGET_EXPAND_BUILTIN_SAVEREGS hppa_builtin_saveregs
319 #undef TARGET_EXPAND_BUILTIN_VA_START 367 #undef TARGET_EXPAND_BUILTIN_VA_START
320 #define TARGET_EXPAND_BUILTIN_VA_START hppa_va_start 368 #define TARGET_EXPAND_BUILTIN_VA_START hppa_va_start
339 #define TARGET_TRAMPOLINE_INIT pa_trampoline_init 387 #define TARGET_TRAMPOLINE_INIT pa_trampoline_init
340 #undef TARGET_TRAMPOLINE_ADJUST_ADDRESS 388 #undef TARGET_TRAMPOLINE_ADJUST_ADDRESS
341 #define TARGET_TRAMPOLINE_ADJUST_ADDRESS pa_trampoline_adjust_address 389 #define TARGET_TRAMPOLINE_ADJUST_ADDRESS pa_trampoline_adjust_address
342 #undef TARGET_DELEGITIMIZE_ADDRESS 390 #undef TARGET_DELEGITIMIZE_ADDRESS
343 #define TARGET_DELEGITIMIZE_ADDRESS pa_delegitimize_address 391 #define TARGET_DELEGITIMIZE_ADDRESS pa_delegitimize_address
392 #undef TARGET_INTERNAL_ARG_POINTER
393 #define TARGET_INTERNAL_ARG_POINTER pa_internal_arg_pointer
394 #undef TARGET_CAN_ELIMINATE
395 #define TARGET_CAN_ELIMINATE pa_can_eliminate
396 #undef TARGET_CONDITIONAL_REGISTER_USAGE
397 #define TARGET_CONDITIONAL_REGISTER_USAGE pa_conditional_register_usage
398 #undef TARGET_C_MODE_FOR_SUFFIX
399 #define TARGET_C_MODE_FOR_SUFFIX pa_c_mode_for_suffix
400 #undef TARGET_ASM_FUNCTION_SECTION
401 #define TARGET_ASM_FUNCTION_SECTION pa_function_section
344 402
345 struct gcc_target targetm = TARGET_INITIALIZER; 403 struct gcc_target targetm = TARGET_INITIALIZER;
346 404
347 /* Parse the -mfixed-range= option string. */ 405 /* Parse the -mfixed-range= option string. */
348 406
484 default: 542 default:
485 return true; 543 return true;
486 } 544 }
487 } 545 }
488 546
489 void 547 /* Implement the TARGET_OPTION_OVERRIDE hook. */
490 override_options (void) 548
549 static void
550 pa_option_override (void)
491 { 551 {
492 /* Unconditional branches in the delay slot are not compatible with dwarf2 552 /* Unconditional branches in the delay slot are not compatible with dwarf2
493 call frame information. There is no benefit in using this optimization 553 call frame information. There is no benefit in using this optimization
494 on PA8000 and later processors. */ 554 on PA8000 and later processors. */
495 if (pa_cpu >= PROCESSOR_8000 555 if (pa_cpu >= PROCESSOR_8000
496 || (! USING_SJLJ_EXCEPTIONS && flag_exceptions) 556 || (targetm.except_unwind_info (&global_options) == UI_DWARF2
557 && flag_exceptions)
497 || flag_unwind_tables) 558 || flag_unwind_tables)
498 target_flags &= ~MASK_JUMP_IN_DELAY; 559 target_flags &= ~MASK_JUMP_IN_DELAY;
499 560
500 if (flag_pic && TARGET_PORTABLE_RUNTIME) 561 if (flag_pic && TARGET_PORTABLE_RUNTIME)
501 { 562 {
517 /* We only support the "big PIC" model now. And we always generate PIC 578 /* We only support the "big PIC" model now. And we always generate PIC
518 code when in 64bit mode. */ 579 code when in 64bit mode. */
519 if (flag_pic == 1 || TARGET_64BIT) 580 if (flag_pic == 1 || TARGET_64BIT)
520 flag_pic = 2; 581 flag_pic = 2;
521 582
583 /* Disable -freorder-blocks-and-partition as we don't support hot and
584 cold partitioning. */
585 if (flag_reorder_blocks_and_partition)
586 {
587 inform (input_location,
588 "-freorder-blocks-and-partition does not work "
589 "on this architecture");
590 flag_reorder_blocks_and_partition = 0;
591 flag_reorder_blocks = 1;
592 }
593
522 /* We can't guarantee that .dword is available for 32-bit targets. */ 594 /* We can't guarantee that .dword is available for 32-bit targets. */
523 if (UNITS_PER_WORD == 4) 595 if (UNITS_PER_WORD == 4)
524 targetm.asm_out.aligned_op.di = NULL; 596 targetm.asm_out.aligned_op.di = NULL;
525 597
526 /* The unaligned ops are only available when using GAS. */ 598 /* The unaligned ops are only available when using GAS. */
531 targetm.asm_out.unaligned_op.di = NULL; 603 targetm.asm_out.unaligned_op.di = NULL;
532 } 604 }
533 605
534 init_machine_status = pa_init_machine_status; 606 init_machine_status = pa_init_machine_status;
535 } 607 }
608
609 enum pa_builtins
610 {
611 PA_BUILTIN_COPYSIGNQ,
612 PA_BUILTIN_FABSQ,
613 PA_BUILTIN_INFQ,
614 PA_BUILTIN_HUGE_VALQ,
615 PA_BUILTIN_max
616 };
617
618 static GTY(()) tree pa_builtins[(int) PA_BUILTIN_max];
536 619
537 static void 620 static void
538 pa_init_builtins (void) 621 pa_init_builtins (void)
539 { 622 {
540 #ifdef DONT_HAVE_FPUTC_UNLOCKED 623 #ifdef DONT_HAVE_FPUTC_UNLOCKED
547 if (built_in_decls [BUILT_IN_FINITE]) 630 if (built_in_decls [BUILT_IN_FINITE])
548 set_user_assembler_name (built_in_decls [BUILT_IN_FINITE], "_Isfinite"); 631 set_user_assembler_name (built_in_decls [BUILT_IN_FINITE], "_Isfinite");
549 if (built_in_decls [BUILT_IN_FINITEF]) 632 if (built_in_decls [BUILT_IN_FINITEF])
550 set_user_assembler_name (built_in_decls [BUILT_IN_FINITEF], "_Isfinitef"); 633 set_user_assembler_name (built_in_decls [BUILT_IN_FINITEF], "_Isfinitef");
551 #endif 634 #endif
635
636 if (HPUX_LONG_DOUBLE_LIBRARY)
637 {
638 tree decl, ftype;
639
640 /* Under HPUX, the __float128 type is a synonym for "long double". */
641 (*lang_hooks.types.register_builtin_type) (long_double_type_node,
642 "__float128");
643
644 /* TFmode support builtins. */
645 ftype = build_function_type_list (long_double_type_node,
646 long_double_type_node,
647 NULL_TREE);
648 decl = add_builtin_function ("__builtin_fabsq", ftype,
649 PA_BUILTIN_FABSQ, BUILT_IN_MD,
650 "_U_Qfabs", NULL_TREE);
651 TREE_READONLY (decl) = 1;
652 pa_builtins[PA_BUILTIN_FABSQ] = decl;
653
654 ftype = build_function_type_list (long_double_type_node,
655 long_double_type_node,
656 long_double_type_node,
657 NULL_TREE);
658 decl = add_builtin_function ("__builtin_copysignq", ftype,
659 PA_BUILTIN_COPYSIGNQ, BUILT_IN_MD,
660 "_U_Qfcopysign", NULL_TREE);
661 TREE_READONLY (decl) = 1;
662 pa_builtins[PA_BUILTIN_COPYSIGNQ] = decl;
663
664 ftype = build_function_type (long_double_type_node, void_list_node);
665 decl = add_builtin_function ("__builtin_infq", ftype,
666 PA_BUILTIN_INFQ, BUILT_IN_MD,
667 NULL, NULL_TREE);
668 pa_builtins[PA_BUILTIN_INFQ] = decl;
669
670 decl = add_builtin_function ("__builtin_huge_valq", ftype,
671 PA_BUILTIN_HUGE_VALQ, BUILT_IN_MD,
672 NULL, NULL_TREE);
673 pa_builtins[PA_BUILTIN_HUGE_VALQ] = decl;
674 }
675 }
676
677 static rtx
678 pa_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
679 enum machine_mode mode ATTRIBUTE_UNUSED,
680 int ignore ATTRIBUTE_UNUSED)
681 {
682 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
683 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
684
685 switch (fcode)
686 {
687 case PA_BUILTIN_FABSQ:
688 case PA_BUILTIN_COPYSIGNQ:
689 return expand_call (exp, target, ignore);
690
691 case PA_BUILTIN_INFQ:
692 case PA_BUILTIN_HUGE_VALQ:
693 {
694 enum machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
695 REAL_VALUE_TYPE inf;
696 rtx tmp;
697
698 real_inf (&inf);
699 tmp = CONST_DOUBLE_FROM_REAL_VALUE (inf, target_mode);
700
701 tmp = validize_mem (force_const_mem (target_mode, tmp));
702
703 if (target == 0)
704 target = gen_reg_rtx (target_mode);
705
706 emit_move_insn (target, tmp);
707 return target;
708 }
709
710 default:
711 gcc_unreachable ();
712 }
713
714 return NULL_RTX;
552 } 715 }
553 716
554 /* Function to init struct machine_function. 717 /* Function to init struct machine_function.
555 This will be called, via a pointer variable, 718 This will be called, via a pointer variable,
556 from push_function_context. */ 719 from push_function_context. */
557 720
558 static struct machine_function * 721 static struct machine_function *
559 pa_init_machine_status (void) 722 pa_init_machine_status (void)
560 { 723 {
561 return GGC_CNEW (machine_function); 724 return ggc_alloc_cleared_machine_function ();
562 } 725 }
563 726
564 /* If FROM is a probable pointer register, mark TO as a probable 727 /* If FROM is a probable pointer register, mark TO as a probable
565 pointer register with the same pointer alignment as FROM. */ 728 pointer register with the same pointer alignment as FROM. */
566 729
1283 } 1446 }
1284 1447
1285 return orig; 1448 return orig;
1286 } 1449 }
1287 1450
1451 /* Implement the TARGET_REGISTER_MOVE_COST hook.
1452
1453 Compute extra cost of moving data between one register class
1454 and another.
1455
1456 Make moves from SAR so expensive they should never happen. We used to
1457 have 0xffff here, but that generates overflow in rare cases.
1458
1459 Copies involving a FP register and a non-FP register are relatively
1460 expensive because they must go through memory.
1461
1462 Other copies are reasonably cheap. */
1463
1464 static int
1465 hppa_register_move_cost (enum machine_mode mode ATTRIBUTE_UNUSED,
1466 reg_class_t from, reg_class_t to)
1467 {
1468 if (from == SHIFT_REGS)
1469 return 0x100;
1470 else if ((FP_REG_CLASS_P (from) && ! FP_REG_CLASS_P (to))
1471 || (FP_REG_CLASS_P (to) && ! FP_REG_CLASS_P (from)))
1472 return 16;
1473 else
1474 return 2;
1475 }
1476
1288 /* For the HPPA, REG and REG+CONST is cost 0 1477 /* For the HPPA, REG and REG+CONST is cost 0
1289 and addresses involving symbolic constants are cost 2. 1478 and addresses involving symbolic constants are cost 2.
1290 1479
1291 PIC addresses are very expensive. 1480 PIC addresses are very expensive.
1292 1481
1591 /* Handle secondary reloads for loads of FP registers from constant 1780 /* Handle secondary reloads for loads of FP registers from constant
1592 expressions by forcing the constant into memory. 1781 expressions by forcing the constant into memory.
1593 1782
1594 Use scratch_reg to hold the address of the memory location. 1783 Use scratch_reg to hold the address of the memory location.
1595 1784
1596 The proper fix is to change PREFERRED_RELOAD_CLASS to return 1785 The proper fix is to change TARGET_PREFERRED_RELOAD_CLASS to return
1597 NO_REGS when presented with a const_int and a register class 1786 NO_REGS when presented with a const_int and a register class
1598 containing only FP registers. Doing so unfortunately creates 1787 containing only FP registers. Doing so unfortunately creates
1599 more problems than it solves. Fix this for 2.5. */ 1788 more problems than it solves. Fix this for 2.5. */
1600 else if (scratch_reg 1789 else if (scratch_reg
1601 && CONSTANT_P (operand1) 1790 && CONSTANT_P (operand1)
1694 { 1883 {
1695 if (REG_POINTER (operand1) 1884 if (REG_POINTER (operand1)
1696 && !REG_POINTER (operand0) 1885 && !REG_POINTER (operand0)
1697 && !HARD_REGISTER_P (operand0)) 1886 && !HARD_REGISTER_P (operand0))
1698 copy_reg_pointer (operand0, operand1); 1887 copy_reg_pointer (operand0, operand1);
1699 else if (REG_POINTER (operand0)
1700 && !REG_POINTER (operand1)
1701 && !HARD_REGISTER_P (operand1))
1702 copy_reg_pointer (operand1, operand0);
1703 } 1888 }
1704 1889
1705 /* When MEMs are broken out, the REG_POINTER flag doesn't 1890 /* When MEMs are broken out, the REG_POINTER flag doesn't
1706 get set. In some cases, we can set the REG_POINTER flag 1891 get set. In some cases, we can set the REG_POINTER flag
1707 from the declaration for the MEM. */ 1892 from the declaration for the MEM. */
3678 local_fsize = (size + UNITS_PER_WORD - 1) & ~(UNITS_PER_WORD - 1); 3863 local_fsize = (size + UNITS_PER_WORD - 1) & ~(UNITS_PER_WORD - 1);
3679 if (local_fsize || frame_pointer_needed) 3864 if (local_fsize || frame_pointer_needed)
3680 local_fsize += STARTING_FRAME_OFFSET; 3865 local_fsize += STARTING_FRAME_OFFSET;
3681 3866
3682 actual_fsize = compute_frame_size (size, &save_fregs); 3867 actual_fsize = compute_frame_size (size, &save_fregs);
3868 if (flag_stack_usage)
3869 current_function_static_stack_size = actual_fsize;
3683 3870
3684 /* Compute a few things we will use often. */ 3871 /* Compute a few things we will use often. */
3685 tmpreg = gen_rtx_REG (word_mode, 1); 3872 tmpreg = gen_rtx_REG (word_mode, 1);
3686 3873
3687 /* Save RP first. The calling conventions manual states RP will 3874 /* Save RP first. The calling conventions manual states RP will
3704 new stack pointer, then store away the saved old frame pointer 3891 new stack pointer, then store away the saved old frame pointer
3705 into the stack at sp and at the same time update the stack 3892 into the stack at sp and at the same time update the stack
3706 pointer by actual_fsize bytes. Two versions, first 3893 pointer by actual_fsize bytes. Two versions, first
3707 handles small (<8k) frames. The second handles large (>=8k) 3894 handles small (<8k) frames. The second handles large (>=8k)
3708 frames. */ 3895 frames. */
3709 insn = emit_move_insn (tmpreg, frame_pointer_rtx); 3896 insn = emit_move_insn (tmpreg, hard_frame_pointer_rtx);
3710 if (DO_FRAME_NOTES) 3897 if (DO_FRAME_NOTES)
3711 RTX_FRAME_RELATED_P (insn) = 1; 3898 RTX_FRAME_RELATED_P (insn) = 1;
3712 3899
3713 insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx); 3900 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
3714 if (DO_FRAME_NOTES) 3901 if (DO_FRAME_NOTES)
3715 RTX_FRAME_RELATED_P (insn) = 1; 3902 RTX_FRAME_RELATED_P (insn) = 1;
3716 3903
3717 if (VAL_14_BITS_P (actual_fsize)) 3904 if (VAL_14_BITS_P (actual_fsize))
3718 store_reg_modify (STACK_POINTER_REGNUM, 1, actual_fsize); 3905 store_reg_modify (STACK_POINTER_REGNUM, 1, actual_fsize);
3752 { 3939 {
3753 rtx addr = gen_rtx_PLUS (word_mode, stack_pointer_rtx, 3940 rtx addr = gen_rtx_PLUS (word_mode, stack_pointer_rtx,
3754 GEN_INT (TARGET_64BIT ? -8 : -4)); 3941 GEN_INT (TARGET_64BIT ? -8 : -4));
3755 3942
3756 emit_move_insn (gen_rtx_MEM (word_mode, addr), 3943 emit_move_insn (gen_rtx_MEM (word_mode, addr),
3757 frame_pointer_rtx); 3944 hard_frame_pointer_rtx);
3758 } 3945 }
3759 else 3946 else
3760 emit_insn (gen_blockage ()); 3947 emit_insn (gen_blockage ());
3761 } 3948 }
3762 /* no frame pointer needed. */ 3949 /* no frame pointer needed. */
3795 { 3982 {
3796 regno = EH_RETURN_DATA_REGNO (i); 3983 regno = EH_RETURN_DATA_REGNO (i);
3797 if (regno == INVALID_REGNUM) 3984 if (regno == INVALID_REGNUM)
3798 break; 3985 break;
3799 3986
3800 store_reg (regno, offset, FRAME_POINTER_REGNUM); 3987 store_reg (regno, offset, HARD_FRAME_POINTER_REGNUM);
3801 offset += UNITS_PER_WORD; 3988 offset += UNITS_PER_WORD;
3802 } 3989 }
3803 } 3990 }
3804 3991
3805 for (i = 18; i >= 4; i--) 3992 for (i = 18; i >= 4; i--)
3806 if (df_regs_ever_live_p (i) && ! call_used_regs[i]) 3993 if (df_regs_ever_live_p (i) && ! call_used_regs[i])
3807 { 3994 {
3808 store_reg (i, offset, FRAME_POINTER_REGNUM); 3995 store_reg (i, offset, HARD_FRAME_POINTER_REGNUM);
3809 offset += UNITS_PER_WORD; 3996 offset += UNITS_PER_WORD;
3810 gr_saved++; 3997 gr_saved++;
3811 } 3998 }
3812 /* Account for %r3 which is saved in a special place. */ 3999 /* Account for %r3 which is saved in a special place. */
3813 gr_saved++; 4000 gr_saved++;
3892 4079
3893 /* First get the frame or stack pointer to the start of the FP register 4080 /* First get the frame or stack pointer to the start of the FP register
3894 save area. */ 4081 save area. */
3895 if (frame_pointer_needed) 4082 if (frame_pointer_needed)
3896 { 4083 {
3897 set_reg_plus_d (1, FRAME_POINTER_REGNUM, offset, 0); 4084 set_reg_plus_d (1, HARD_FRAME_POINTER_REGNUM, offset, 0);
3898 base = frame_pointer_rtx; 4085 base = hard_frame_pointer_rtx;
3899 } 4086 }
3900 else 4087 else
3901 { 4088 {
3902 set_reg_plus_d (1, STACK_POINTER_REGNUM, offset, 0); 4089 set_reg_plus_d (1, STACK_POINTER_REGNUM, offset, 0);
3903 base = stack_pointer_rtx; 4090 base = stack_pointer_rtx;
4091 if (rp_saved) 4278 if (rp_saved)
4092 { 4279 {
4093 ret_off = TARGET_64BIT ? -16 : -20; 4280 ret_off = TARGET_64BIT ? -16 : -20;
4094 if (frame_pointer_needed) 4281 if (frame_pointer_needed)
4095 { 4282 {
4096 load_reg (2, ret_off, FRAME_POINTER_REGNUM); 4283 load_reg (2, ret_off, HARD_FRAME_POINTER_REGNUM);
4097 ret_off = 0; 4284 ret_off = 0;
4098 } 4285 }
4099 else 4286 else
4100 { 4287 {
4101 /* No frame pointer, and stack is smaller than 8k. */ 4288 /* No frame pointer, and stack is smaller than 8k. */
4122 { 4309 {
4123 regno = EH_RETURN_DATA_REGNO (i); 4310 regno = EH_RETURN_DATA_REGNO (i);
4124 if (regno == INVALID_REGNUM) 4311 if (regno == INVALID_REGNUM)
4125 break; 4312 break;
4126 4313
4127 load_reg (regno, offset, FRAME_POINTER_REGNUM); 4314 load_reg (regno, offset, HARD_FRAME_POINTER_REGNUM);
4128 offset += UNITS_PER_WORD; 4315 offset += UNITS_PER_WORD;
4129 } 4316 }
4130 } 4317 }
4131 4318
4132 for (i = 18; i >= 4; i--) 4319 for (i = 18; i >= 4; i--)
4133 if (df_regs_ever_live_p (i) && ! call_used_regs[i]) 4320 if (df_regs_ever_live_p (i) && ! call_used_regs[i])
4134 { 4321 {
4135 load_reg (i, offset, FRAME_POINTER_REGNUM); 4322 load_reg (i, offset, HARD_FRAME_POINTER_REGNUM);
4136 offset += UNITS_PER_WORD; 4323 offset += UNITS_PER_WORD;
4137 } 4324 }
4138 } 4325 }
4139 else 4326 else
4140 { 4327 {
4189 /* FP register restores. */ 4376 /* FP register restores. */
4190 if (save_fregs) 4377 if (save_fregs)
4191 { 4378 {
4192 /* Adjust the register to index off of. */ 4379 /* Adjust the register to index off of. */
4193 if (frame_pointer_needed) 4380 if (frame_pointer_needed)
4194 set_reg_plus_d (1, FRAME_POINTER_REGNUM, offset, 0); 4381 set_reg_plus_d (1, HARD_FRAME_POINTER_REGNUM, offset, 0);
4195 else 4382 else
4196 set_reg_plus_d (1, STACK_POINTER_REGNUM, offset, 0); 4383 set_reg_plus_d (1, STACK_POINTER_REGNUM, offset, 0);
4197 4384
4198 /* Actually do the restores now. */ 4385 /* Actually do the restores now. */
4199 for (i = FP_SAVED_REG_LAST; i >= FP_SAVED_REG_FIRST; i -= FP_REG_STEP) 4386 for (i = FP_SAVED_REG_LAST; i >= FP_SAVED_REG_FIRST; i -= FP_REG_STEP)
4217 pointer is initially set to fp + 64 to avoid a race condition. */ 4404 pointer is initially set to fp + 64 to avoid a race condition. */
4218 if (frame_pointer_needed) 4405 if (frame_pointer_needed)
4219 { 4406 {
4220 rtx delta = GEN_INT (-64); 4407 rtx delta = GEN_INT (-64);
4221 4408
4222 set_reg_plus_d (STACK_POINTER_REGNUM, FRAME_POINTER_REGNUM, 64, 0); 4409 set_reg_plus_d (STACK_POINTER_REGNUM, HARD_FRAME_POINTER_REGNUM, 64, 0);
4223 emit_insn (gen_pre_load (frame_pointer_rtx, stack_pointer_rtx, delta)); 4410 emit_insn (gen_pre_load (hard_frame_pointer_rtx,
4411 stack_pointer_rtx, delta));
4224 } 4412 }
4225 /* If we were deferring a callee register restore, do it now. */ 4413 /* If we were deferring a callee register restore, do it now. */
4226 else if (merge_sp_adjust_with_load) 4414 else if (merge_sp_adjust_with_load)
4227 { 4415 {
4228 rtx delta = GEN_INT (-actual_fsize); 4416 rtx delta = GEN_INT (-actual_fsize);
4789 return 4; 4977 return 4;
4790 else 4978 else
4791 return 0; 4979 return 0;
4792 } 4980 }
4793 return 0; 4981 return 0;
4982 }
4983
4984 /* Implement the TARGET_PRINT_OPERAND_PUNCT_VALID_P hook. */
4985
4986 static bool
4987 pa_print_operand_punct_valid_p (unsigned char code)
4988 {
4989 if (code == '@'
4990 || code == '#'
4991 || code == '*'
4992 || code == '^')
4993 return true;
4994
4995 return false;
4794 } 4996 }
4795 4997
4796 /* Print operand X (an rtx) in assembler syntax to file FILE. 4998 /* Print operand X (an rtx) in assembler syntax to file FILE.
4797 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified. 4999 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
4798 For `%' followed by punctuation, CODE is the punctuation and X is null. */ 5000 For `%' followed by punctuation, CODE is the punctuation and X is null. */
5373 if (deferred_plabels == NULL || i == n_deferred_plabels) 5575 if (deferred_plabels == NULL || i == n_deferred_plabels)
5374 { 5576 {
5375 tree id; 5577 tree id;
5376 5578
5377 if (deferred_plabels == 0) 5579 if (deferred_plabels == 0)
5378 deferred_plabels = (struct deferred_plabel *) 5580 deferred_plabels = ggc_alloc_deferred_plabel ();
5379 ggc_alloc (sizeof (struct deferred_plabel));
5380 else 5581 else
5381 deferred_plabels = (struct deferred_plabel *) 5582 deferred_plabels = GGC_RESIZEVEC (struct deferred_plabel,
5382 ggc_realloc (deferred_plabels, 5583 deferred_plabels,
5383 ((n_deferred_plabels + 1) 5584 n_deferred_plabels + 1);
5384 * sizeof (struct deferred_plabel)));
5385 5585
5386 i = n_deferred_plabels++; 5586 i = n_deferred_plabels++;
5387 deferred_plabels[i].internal_label = gen_label_rtx (); 5587 deferred_plabels[i].internal_label = gen_label_rtx ();
5388 deferred_plabels[i].symbol = symbol; 5588 deferred_plabels[i].symbol = symbol;
5389 5589
5419 assemble_integer (deferred_plabels[i].symbol, 5619 assemble_integer (deferred_plabels[i].symbol,
5420 TARGET_64BIT ? 8 : 4, TARGET_64BIT ? 64 : 32, 1); 5620 TARGET_64BIT ? 8 : 4, TARGET_64BIT ? 64 : 32, 1);
5421 } 5621 }
5422 } 5622 }
5423 5623
5424 #ifdef HPUX_LONG_DOUBLE_LIBRARY 5624 #if HPUX_LONG_DOUBLE_LIBRARY
5425 /* Initialize optabs to point to HPUX long double emulation routines. */ 5625 /* Initialize optabs to point to HPUX long double emulation routines. */
5426 static void 5626 static void
5427 pa_hpux_init_libfuncs (void) 5627 pa_hpux_init_libfuncs (void)
5428 { 5628 {
5429 set_optab_libfunc (add_optab, TFmode, "_U_Qfadd"); 5629 set_optab_libfunc (add_optab, TFmode, "_U_Qfadd");
5681 } 5881 }
5682 } 5882 }
5683 fputc ('\n', asm_out_file); 5883 fputc ('\n', asm_out_file);
5684 } 5884 }
5685 5885
5686 static enum reg_class 5886 static reg_class_t
5687 pa_secondary_reload (bool in_p, rtx x, enum reg_class rclass, 5887 pa_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i,
5688 enum machine_mode mode, secondary_reload_info *sri) 5888 enum machine_mode mode, secondary_reload_info *sri)
5689 { 5889 {
5690 int is_symbolic, regno; 5890 int regno;
5891 enum reg_class rclass = (enum reg_class) rclass_i;
5691 5892
5692 /* Handle the easy stuff first. */ 5893 /* Handle the easy stuff first. */
5693 if (rclass == R1_REGS) 5894 if (rclass == R1_REGS)
5694 return NO_REGS; 5895 return NO_REGS;
5695 5896
5716 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)) 5917 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
5717 { 5918 {
5718 sri->icode = (mode == SImode ? CODE_FOR_reload_insi_r1 5919 sri->icode = (mode == SImode ? CODE_FOR_reload_insi_r1
5719 : CODE_FOR_reload_indi_r1); 5920 : CODE_FOR_reload_indi_r1);
5720 return NO_REGS; 5921 return NO_REGS;
5922 }
5923
5924 /* Secondary reloads of symbolic operands require %r1 as a scratch
5925 register when we're generating PIC code and when the operand isn't
5926 readonly. */
5927 if (symbolic_expression_p (x))
5928 {
5929 if (GET_CODE (x) == HIGH)
5930 x = XEXP (x, 0);
5931
5932 if (flag_pic || !read_only_operand (x, VOIDmode))
5933 {
5934 gcc_assert (mode == SImode || mode == DImode);
5935 sri->icode = (mode == SImode ? CODE_FOR_reload_insi_r1
5936 : CODE_FOR_reload_indi_r1);
5937 return NO_REGS;
5938 }
5721 } 5939 }
5722 5940
5723 /* Profiling showed the PA port spends about 1.3% of its compilation 5941 /* Profiling showed the PA port spends about 1.3% of its compilation
5724 time in true_regnum from calls inside pa_secondary_reload_class. */ 5942 time in true_regnum from calls inside pa_secondary_reload_class. */
5725 if (regno >= FIRST_PSEUDO_REGISTER || GET_CODE (x) == SUBREG) 5943 if (regno >= FIRST_PSEUDO_REGISTER || GET_CODE (x) == SUBREG)
5759 } 5977 }
5760 5978
5761 /* Request a secondary reload with a general scratch register 5979 /* Request a secondary reload with a general scratch register
5762 for everthing else. ??? Could symbolic operands be handled 5980 for everthing else. ??? Could symbolic operands be handled
5763 directly when generating non-pic PA 2.0 code? */ 5981 directly when generating non-pic PA 2.0 code? */
5764 sri->icode = in_p ? reload_in_optab[mode] : reload_out_optab[mode]; 5982 sri->icode = (in_p
5983 ? direct_optab_handler (reload_in_optab, mode)
5984 : direct_optab_handler (reload_out_optab, mode));
5765 return NO_REGS; 5985 return NO_REGS;
5766 } 5986 }
5767 5987
5768 /* We need a secondary register (GPR) for copies between the SAR 5988 /* We need a secondary register (GPR) for copies between the SAR
5769 and anything other than a general register. */ 5989 and anything other than a general register. */
5770 if (rclass == SHIFT_REGS && (regno <= 0 || regno >= 32)) 5990 if (rclass == SHIFT_REGS && (regno <= 0 || regno >= 32))
5771 { 5991 {
5772 sri->icode = in_p ? reload_in_optab[mode] : reload_out_optab[mode]; 5992 sri->icode = (in_p
5993 ? direct_optab_handler (reload_in_optab, mode)
5994 : direct_optab_handler (reload_out_optab, mode));
5773 return NO_REGS; 5995 return NO_REGS;
5774 } 5996 }
5775 5997
5776 /* A SAR<->FP register copy requires a secondary register (GPR) as 5998 /* A SAR<->FP register copy requires a secondary register (GPR) as
5777 well as secondary memory. */ 5999 well as secondary memory. */
5778 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER 6000 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER
5779 && (REGNO_REG_CLASS (regno) == SHIFT_REGS 6001 && (REGNO_REG_CLASS (regno) == SHIFT_REGS
5780 && FP_REG_CLASS_P (rclass))) 6002 && FP_REG_CLASS_P (rclass)))
5781 { 6003 sri->icode = (in_p
5782 sri->icode = in_p ? reload_in_optab[mode] : reload_out_optab[mode]; 6004 ? direct_optab_handler (reload_in_optab, mode)
5783 return NO_REGS; 6005 : direct_optab_handler (reload_out_optab, mode));
5784 }
5785
5786 /* Secondary reloads of symbolic operands require %r1 as a scratch
5787 register when we're generating PIC code and when the operand isn't
5788 readonly. */
5789 if (GET_CODE (x) == HIGH)
5790 x = XEXP (x, 0);
5791
5792 /* Profiling has showed GCC spends about 2.6% of its compilation
5793 time in symbolic_operand from calls inside pa_secondary_reload_class.
5794 So, we use an inline copy to avoid useless work. */
5795 switch (GET_CODE (x))
5796 {
5797 rtx op;
5798
5799 case SYMBOL_REF:
5800 is_symbolic = !SYMBOL_REF_TLS_MODEL (x);
5801 break;
5802 case LABEL_REF:
5803 is_symbolic = 1;
5804 break;
5805 case CONST:
5806 op = XEXP (x, 0);
5807 is_symbolic = (GET_CODE (op) == PLUS
5808 && ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF
5809 && !SYMBOL_REF_TLS_MODEL (XEXP (op, 0)))
5810 || GET_CODE (XEXP (op, 0)) == LABEL_REF)
5811 && GET_CODE (XEXP (op, 1)) == CONST_INT);
5812 break;
5813 default:
5814 is_symbolic = 0;
5815 break;
5816 }
5817
5818 if (is_symbolic && (flag_pic || !read_only_operand (x, VOIDmode)))
5819 {
5820 gcc_assert (mode == SImode || mode == DImode);
5821 sri->icode = (mode == SImode ? CODE_FOR_reload_insi_r1
5822 : CODE_FOR_reload_indi_r1);
5823 }
5824 6006
5825 return NO_REGS; 6007 return NO_REGS;
5826 } 6008 }
5827 6009
5828 /* Implement TARGET_EXTRA_LIVE_ON_ENTRY. The argument pointer 6010 /* Implement TARGET_EXTRA_LIVE_ON_ENTRY. The argument pointer
5843 rtx 6025 rtx
5844 pa_eh_return_handler_rtx (void) 6026 pa_eh_return_handler_rtx (void)
5845 { 6027 {
5846 rtx tmp; 6028 rtx tmp;
5847 6029
5848 tmp = gen_rtx_PLUS (word_mode, frame_pointer_rtx, 6030 tmp = gen_rtx_PLUS (word_mode, hard_frame_pointer_rtx,
5849 TARGET_64BIT ? GEN_INT (-16) : GEN_INT (-20)); 6031 TARGET_64BIT ? GEN_INT (-16) : GEN_INT (-20));
5850 tmp = gen_rtx_MEM (word_mode, tmp); 6032 tmp = gen_rtx_MEM (word_mode, tmp);
5851 tmp->volatil = 1; 6033 tmp->volatil = 1;
5852 return tmp; 6034 return tmp;
5853 } 6035 }
5929 static rtx 6111 static rtx
5930 hppa_builtin_saveregs (void) 6112 hppa_builtin_saveregs (void)
5931 { 6113 {
5932 rtx offset, dest; 6114 rtx offset, dest;
5933 tree fntype = TREE_TYPE (current_function_decl); 6115 tree fntype = TREE_TYPE (current_function_decl);
5934 int argadj = ((!(TYPE_ARG_TYPES (fntype) != 0 6116 int argadj = ((!stdarg_p (fntype))
5935 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5936 != void_type_node)))
5937 ? UNITS_PER_WORD : 0); 6117 ? UNITS_PER_WORD : 0);
5938 6118
5939 if (argadj) 6119 if (argadj)
5940 offset = plus_constant (crtl->args.arg_offset_rtx, argadj); 6120 offset = plus_constant (crtl->args.arg_offset_rtx, argadj);
5941 else 6121 else
6032 6212
6033 u = fold_convert (sizetype, size_in_bytes (type)); 6213 u = fold_convert (sizetype, size_in_bytes (type));
6034 u = fold_build1 (NEGATE_EXPR, sizetype, u); 6214 u = fold_build1 (NEGATE_EXPR, sizetype, u);
6035 t = build2 (POINTER_PLUS_EXPR, valist_type, valist, u); 6215 t = build2 (POINTER_PLUS_EXPR, valist_type, valist, u);
6036 6216
6037 /* Copied from va-pa.h, but we probably don't need to align to 6217 /* Align to 4 or 8 byte boundary depending on argument size. */
6038 word size, since we generate and preserve that invariant. */ 6218
6039 u = size_int (size > 4 ? -8 : -4); 6219 u = build_int_cst (TREE_TYPE (t), (HOST_WIDE_INT)(size > 4 ? -8 : -4));
6040 t = fold_convert (sizetype, t); 6220 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t, u);
6041 t = build2 (BIT_AND_EXPR, sizetype, t, u);
6042 t = fold_convert (valist_type, t); 6221 t = fold_convert (valist_type, t);
6043 6222
6044 t = build2 (MODIFY_EXPR, valist_type, valist, t); 6223 t = build2 (MODIFY_EXPR, valist_type, valist, t);
6045 6224
6046 ofs = (8 - size) % 4; 6225 ofs = (8 - size) % 4;
6106 gcc_unreachable (); 6285 gcc_unreachable ();
6107 } 6286 }
6108 } 6287 }
6109 6288
6110 /* Return TRUE if INSN, a jump insn, has an unfilled delay slot and 6289 /* Return TRUE if INSN, a jump insn, has an unfilled delay slot and
6111 it branches to the next real instruction. Otherwise, return FALSE. */ 6290 it branches into the delay slot. Otherwise, return FALSE. */
6112 6291
6113 static bool 6292 static bool
6114 branch_to_delay_slot_p (rtx insn) 6293 branch_to_delay_slot_p (rtx insn)
6115 { 6294 {
6295 rtx jump_insn;
6296
6116 if (dbr_sequence_length ()) 6297 if (dbr_sequence_length ())
6117 return FALSE; 6298 return FALSE;
6118 6299
6119 return next_real_insn (JUMP_LABEL (insn)) == next_real_insn (insn); 6300 jump_insn = next_active_insn (JUMP_LABEL (insn));
6120 } 6301 while (insn)
6121 6302 {
6122 /* Return TRUE if INSN, a jump insn, needs a nop in its delay slot. 6303 insn = next_active_insn (insn);
6304 if (jump_insn == insn)
6305 return TRUE;
6306
6307 /* We can't rely on the length of asms. So, we return FALSE when
6308 the branch is followed by an asm. */
6309 if (!insn
6310 || GET_CODE (PATTERN (insn)) == ASM_INPUT
6311 || extract_asm_operands (PATTERN (insn)) != NULL_RTX
6312 || get_attr_length (insn) > 0)
6313 break;
6314 }
6315
6316 return FALSE;
6317 }
6318
6319 /* Return TRUE if INSN, a forward jump insn, needs a nop in its delay slot.
6123 6320
6124 This occurs when INSN has an unfilled delay slot and is followed 6321 This occurs when INSN has an unfilled delay slot and is followed
6125 by an ASM_INPUT. Disaster can occur if the ASM_INPUT is empty and 6322 by an asm. Disaster can occur if the asm is empty and the jump
6126 the jump branches into the delay slot. So, we add a nop in the delay 6323 branches into the delay slot. So, we add a nop in the delay slot
6127 slot just to be safe. This messes up our instruction count, but we 6324 when this occurs. */
6128 don't know how big the ASM_INPUT insn is anyway. */
6129 6325
6130 static bool 6326 static bool
6131 branch_needs_nop_p (rtx insn) 6327 branch_needs_nop_p (rtx insn)
6132 { 6328 {
6133 rtx next_insn; 6329 rtx jump_insn;
6134 6330
6135 if (dbr_sequence_length ()) 6331 if (dbr_sequence_length ())
6136 return FALSE; 6332 return FALSE;
6137 6333
6138 next_insn = next_real_insn (insn); 6334 jump_insn = next_active_insn (JUMP_LABEL (insn));
6139 return GET_CODE (PATTERN (next_insn)) == ASM_INPUT; 6335 while (insn)
6336 {
6337 insn = next_active_insn (insn);
6338 if (!insn || jump_insn == insn)
6339 return TRUE;
6340
6341 if (!(GET_CODE (PATTERN (insn)) == ASM_INPUT
6342 || extract_asm_operands (PATTERN (insn)) != NULL_RTX)
6343 && get_attr_length (insn) > 0)
6344 break;
6345 }
6346
6347 return FALSE;
6348 }
6349
6350 /* Return TRUE if INSN, a forward jump insn, can use nullification
6351 to skip the following instruction. This avoids an extra cycle due
6352 to a mis-predicted branch when we fall through. */
6353
6354 static bool
6355 use_skip_p (rtx insn)
6356 {
6357 rtx jump_insn = next_active_insn (JUMP_LABEL (insn));
6358
6359 while (insn)
6360 {
6361 insn = next_active_insn (insn);
6362
6363 /* We can't rely on the length of asms, so we can't skip asms. */
6364 if (!insn
6365 || GET_CODE (PATTERN (insn)) == ASM_INPUT
6366 || extract_asm_operands (PATTERN (insn)) != NULL_RTX)
6367 break;
6368 if (get_attr_length (insn) == 4
6369 && jump_insn == next_active_insn (insn))
6370 return TRUE;
6371 if (get_attr_length (insn) > 0)
6372 break;
6373 }
6374
6375 return FALSE;
6140 } 6376 }
6141 6377
6142 /* This routine handles all the normal conditional branch sequences we 6378 /* This routine handles all the normal conditional branch sequences we
6143 might need to generate. It handles compare immediate vs compare 6379 might need to generate. It handles compare immediate vs compare
6144 register, nullification of delay slots, varying length branches, 6380 register, nullification of delay slots, varying length branches,
6148 6384
6149 const char * 6385 const char *
6150 output_cbranch (rtx *operands, int negated, rtx insn) 6386 output_cbranch (rtx *operands, int negated, rtx insn)
6151 { 6387 {
6152 static char buf[100]; 6388 static char buf[100];
6153 int useskip = 0; 6389 bool useskip;
6154 int nullify = INSN_ANNULLED_BRANCH_P (insn); 6390 int nullify = INSN_ANNULLED_BRANCH_P (insn);
6155 int length = get_attr_length (insn); 6391 int length = get_attr_length (insn);
6156 int xdelay; 6392 int xdelay;
6157 6393
6158 /* A conditional branch to the following instruction (e.g. the delay slot) 6394 /* A conditional branch to the following instruction (e.g. the delay slot)
6186 nullify = forward_branch_p (insn); 6422 nullify = forward_branch_p (insn);
6187 6423
6188 /* A forward branch over a single nullified insn can be done with a 6424 /* A forward branch over a single nullified insn can be done with a
6189 comclr instruction. This avoids a single cycle penalty due to 6425 comclr instruction. This avoids a single cycle penalty due to
6190 mis-predicted branch if we fall through (branch not taken). */ 6426 mis-predicted branch if we fall through (branch not taken). */
6191 if (length == 4 6427 useskip = (length == 4 && nullify) ? use_skip_p (insn) : FALSE;
6192 && next_real_insn (insn) != 0
6193 && get_attr_length (next_real_insn (insn)) == 4
6194 && JUMP_LABEL (insn) == next_nonnote_insn (next_real_insn (insn))
6195 && nullify)
6196 useskip = 1;
6197 6428
6198 switch (length) 6429 switch (length)
6199 { 6430 {
6200 /* All short conditional branches except backwards with an unfilled 6431 /* All short conditional branches except backwards with an unfilled
6201 delay slot. */ 6432 delay slot. */
6479 6710
6480 const char * 6711 const char *
6481 output_bb (rtx *operands ATTRIBUTE_UNUSED, int negated, rtx insn, int which) 6712 output_bb (rtx *operands ATTRIBUTE_UNUSED, int negated, rtx insn, int which)
6482 { 6713 {
6483 static char buf[100]; 6714 static char buf[100];
6484 int useskip = 0; 6715 bool useskip;
6485 int nullify = INSN_ANNULLED_BRANCH_P (insn); 6716 int nullify = INSN_ANNULLED_BRANCH_P (insn);
6486 int length = get_attr_length (insn); 6717 int length = get_attr_length (insn);
6487 int xdelay; 6718 int xdelay;
6488 6719
6489 /* A conditional branch to the following instruction (e.g. the delay slot) is 6720 /* A conditional branch to the following instruction (e.g. the delay slot) is
6505 nullify = forward_branch_p (insn); 6736 nullify = forward_branch_p (insn);
6506 6737
6507 /* A forward branch over a single nullified insn can be done with a 6738 /* A forward branch over a single nullified insn can be done with a
6508 extrs instruction. This avoids a single cycle penalty due to 6739 extrs instruction. This avoids a single cycle penalty due to
6509 mis-predicted branch if we fall through (branch not taken). */ 6740 mis-predicted branch if we fall through (branch not taken). */
6510 6741 useskip = (length == 4 && nullify) ? use_skip_p (insn) : FALSE;
6511 if (length == 4
6512 && next_real_insn (insn) != 0
6513 && get_attr_length (next_real_insn (insn)) == 4
6514 && JUMP_LABEL (insn) == next_nonnote_insn (next_real_insn (insn))
6515 && nullify)
6516 useskip = 1;
6517 6742
6518 switch (length) 6743 switch (length)
6519 { 6744 {
6520 6745
6521 /* All short conditional branches except backwards with an unfilled 6746 /* All short conditional branches except backwards with an unfilled
6670 6895
6671 const char * 6896 const char *
6672 output_bvb (rtx *operands ATTRIBUTE_UNUSED, int negated, rtx insn, int which) 6897 output_bvb (rtx *operands ATTRIBUTE_UNUSED, int negated, rtx insn, int which)
6673 { 6898 {
6674 static char buf[100]; 6899 static char buf[100];
6675 int useskip = 0; 6900 bool useskip;
6676 int nullify = INSN_ANNULLED_BRANCH_P (insn); 6901 int nullify = INSN_ANNULLED_BRANCH_P (insn);
6677 int length = get_attr_length (insn); 6902 int length = get_attr_length (insn);
6678 int xdelay; 6903 int xdelay;
6679 6904
6680 /* A conditional branch to the following instruction (e.g. the delay slot) is 6905 /* A conditional branch to the following instruction (e.g. the delay slot) is
6696 nullify = forward_branch_p (insn); 6921 nullify = forward_branch_p (insn);
6697 6922
6698 /* A forward branch over a single nullified insn can be done with a 6923 /* A forward branch over a single nullified insn can be done with a
6699 extrs instruction. This avoids a single cycle penalty due to 6924 extrs instruction. This avoids a single cycle penalty due to
6700 mis-predicted branch if we fall through (branch not taken). */ 6925 mis-predicted branch if we fall through (branch not taken). */
6701 6926 useskip = (length == 4 && nullify) ? use_skip_p (insn) : FALSE;
6702 if (length == 4
6703 && next_real_insn (insn) != 0
6704 && get_attr_length (next_real_insn (insn)) == 4
6705 && JUMP_LABEL (insn) == next_nonnote_insn (next_real_insn (insn))
6706 && nullify)
6707 useskip = 1;
6708 6927
6709 switch (length) 6928 switch (length)
6710 { 6929 {
6711 6930
6712 /* All short conditional branches except backwards with an unfilled 6931 /* All short conditional branches except backwards with an unfilled
9223 return values in TARGET_PROMOTE_FUNCTION_MODE will not work correctly. 9442 return values in TARGET_PROMOTE_FUNCTION_MODE will not work correctly.
9224 9443
9225 Small structures must be returned in a PARALLEL on PA64 in order 9444 Small structures must be returned in a PARALLEL on PA64 in order
9226 to match the HP Compiler ABI. */ 9445 to match the HP Compiler ABI. */
9227 9446
9228 rtx 9447 static rtx
9229 pa_function_value (const_tree valtype, 9448 pa_function_value (const_tree valtype,
9230 const_tree func ATTRIBUTE_UNUSED, 9449 const_tree func ATTRIBUTE_UNUSED,
9231 bool outgoing ATTRIBUTE_UNUSED) 9450 bool outgoing ATTRIBUTE_UNUSED)
9232 { 9451 {
9233 enum machine_mode valmode; 9452 enum machine_mode valmode;
9284 return gen_rtx_REG (valmode, 32); 9503 return gen_rtx_REG (valmode, 32);
9285 9504
9286 return gen_rtx_REG (valmode, 28); 9505 return gen_rtx_REG (valmode, 28);
9287 } 9506 }
9288 9507
9508 /* Implement the TARGET_LIBCALL_VALUE hook. */
9509
9510 static rtx
9511 pa_libcall_value (enum machine_mode mode,
9512 const_rtx fun ATTRIBUTE_UNUSED)
9513 {
9514 if (! TARGET_SOFT_FLOAT
9515 && (mode == SFmode || mode == DFmode))
9516 return gen_rtx_REG (mode, 32);
9517 else
9518 return gen_rtx_REG (mode, 28);
9519 }
9520
9521 /* Implement the TARGET_FUNCTION_VALUE_REGNO_P hook. */
9522
9523 static bool
9524 pa_function_value_regno_p (const unsigned int regno)
9525 {
9526 if (regno == 28
9527 || (! TARGET_SOFT_FLOAT && regno == 32))
9528 return true;
9529
9530 return false;
9531 }
9532
9533 /* Update the data in CUM to advance over an argument
9534 of mode MODE and data type TYPE.
9535 (TYPE is null for libcalls where that information may not be available.) */
9536
9537 static void
9538 pa_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
9539 const_tree type, bool named ATTRIBUTE_UNUSED)
9540 {
9541 int arg_size = FUNCTION_ARG_SIZE (mode, type);
9542
9543 cum->nargs_prototype--;
9544 cum->words += (arg_size
9545 + ((cum->words & 01)
9546 && type != NULL_TREE
9547 && arg_size > 1));
9548 }
9549
9289 /* Return the location of a parameter that is passed in a register or NULL 9550 /* Return the location of a parameter that is passed in a register or NULL
9290 if the parameter has any component that is passed in memory. 9551 if the parameter has any component that is passed in memory.
9291 9552
9292 This is new code and will be pushed to into the net sources after 9553 This is new code and will be pushed to into the net sources after
9293 further testing. 9554 further testing.
9294 9555
9295 ??? We might want to restructure this so that it looks more like other 9556 ??? We might want to restructure this so that it looks more like other
9296 ports. */ 9557 ports. */
9297 rtx 9558 static rtx
9298 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, tree type, 9559 pa_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
9299 int named ATTRIBUTE_UNUSED) 9560 const_tree type, bool named ATTRIBUTE_UNUSED)
9300 { 9561 {
9301 int max_arg_words = (TARGET_64BIT ? 8 : 4); 9562 int max_arg_words = (TARGET_64BIT ? 8 : 4);
9302 int alignment = 0; 9563 int alignment = 0;
9303 int arg_size; 9564 int arg_size;
9304 int fpr_reg_base; 9565 int fpr_reg_base;
9485 retval = gen_rtx_REG (mode, fpr_reg_base); 9746 retval = gen_rtx_REG (mode, fpr_reg_base);
9486 } 9747 }
9487 return retval; 9748 return retval;
9488 } 9749 }
9489 9750
9751 /* Arguments larger than one word are double word aligned. */
9752
9753 static unsigned int
9754 pa_function_arg_boundary (enum machine_mode mode, const_tree type)
9755 {
9756 bool singleword = (type
9757 ? (integer_zerop (TYPE_SIZE (type))
9758 || !TREE_CONSTANT (TYPE_SIZE (type))
9759 || int_size_in_bytes (type) <= UNITS_PER_WORD)
9760 : GET_MODE_SIZE (mode) <= UNITS_PER_WORD);
9761
9762 return singleword ? PARM_BOUNDARY : MAX_PARM_BOUNDARY;
9763 }
9490 9764
9491 /* If this arg would be passed totally in registers or totally on the stack, 9765 /* If this arg would be passed totally in registers or totally on the stack,
9492 then this routine should return zero. */ 9766 then this routine should return zero. */
9493 9767
9494 static int 9768 static int
10001 && XINT (XEXP (x, 1), 1) == UNSPEC_DLTIND14R) 10275 && XINT (XEXP (x, 1), 1) == UNSPEC_DLTIND14R)
10002 return gen_const_mem (Pmode, XVECEXP (XEXP (x, 1), 0, 0)); 10276 return gen_const_mem (Pmode, XVECEXP (XEXP (x, 1), 0, 0));
10003 return x; 10277 return x;
10004 } 10278 }
10005 10279
10280 static rtx
10281 pa_internal_arg_pointer (void)
10282 {
10283 /* The argument pointer and the hard frame pointer are the same in
10284 the 32-bit runtime, so we don't need a copy. */
10285 if (TARGET_64BIT)
10286 return copy_to_reg (virtual_incoming_args_rtx);
10287 else
10288 return virtual_incoming_args_rtx;
10289 }
10290
10291 /* Given FROM and TO register numbers, say whether this elimination is allowed.
10292 Frame pointer elimination is automatically handled. */
10293
10294 static bool
10295 pa_can_eliminate (const int from, const int to)
10296 {
10297 /* The argument cannot be eliminated in the 64-bit runtime. */
10298 if (TARGET_64BIT && from == ARG_POINTER_REGNUM)
10299 return false;
10300
10301 return (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM
10302 ? ! frame_pointer_needed
10303 : true);
10304 }
10305
10306 /* Define the offset between two registers, FROM to be eliminated and its
10307 replacement TO, at the start of a routine. */
10308 HOST_WIDE_INT
10309 pa_initial_elimination_offset (int from, int to)
10310 {
10311 HOST_WIDE_INT offset;
10312
10313 if ((from == HARD_FRAME_POINTER_REGNUM || from == FRAME_POINTER_REGNUM)
10314 && to == STACK_POINTER_REGNUM)
10315 offset = -compute_frame_size (get_frame_size (), 0);
10316 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
10317 offset = 0;
10318 else
10319 gcc_unreachable ();
10320
10321 return offset;
10322 }
10323
10324 static void
10325 pa_conditional_register_usage (void)
10326 {
10327 int i;
10328
10329 if (!TARGET_64BIT && !TARGET_PA_11)
10330 {
10331 for (i = 56; i <= FP_REG_LAST; i++)
10332 fixed_regs[i] = call_used_regs[i] = 1;
10333 for (i = 33; i < 56; i += 2)
10334 fixed_regs[i] = call_used_regs[i] = 1;
10335 }
10336 if (TARGET_DISABLE_FPREGS || TARGET_SOFT_FLOAT)
10337 {
10338 for (i = FP_REG_FIRST; i <= FP_REG_LAST; i++)
10339 fixed_regs[i] = call_used_regs[i] = 1;
10340 }
10341 if (flag_pic)
10342 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
10343 }
10344
10345 /* Target hook for c_mode_for_suffix. */
10346
10347 static enum machine_mode
10348 pa_c_mode_for_suffix (char suffix)
10349 {
10350 if (HPUX_LONG_DOUBLE_LIBRARY)
10351 {
10352 if (suffix == 'q')
10353 return TFmode;
10354 }
10355
10356 return VOIDmode;
10357 }
10358
10359 /* Target hook for function_section. */
10360
10361 static section *
10362 pa_function_section (tree decl, enum node_frequency freq,
10363 bool startup, bool exit)
10364 {
10365 /* Put functions in text section if target doesn't have named sections. */
10366 if (!targetm.have_named_sections)
10367 return text_section;
10368
10369 /* Force nested functions into the same section as the containing
10370 function. */
10371 if (decl
10372 && DECL_SECTION_NAME (decl) == NULL_TREE
10373 && DECL_CONTEXT (decl) != NULL_TREE
10374 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
10375 && DECL_SECTION_NAME (DECL_CONTEXT (decl)) == NULL_TREE)
10376 return function_section (DECL_CONTEXT (decl));
10377
10378 /* Otherwise, use the default function section. */
10379 return default_function_section (decl, freq, startup, exit);
10380 }
10381
10006 #include "gt-pa.h" 10382 #include "gt-pa.h"