comparison gcc/config/alpha/alpha.c @ 111:04ced10e8804

gcc 7
author kono
date Fri, 27 Oct 2017 22:46:09 +0900
parents f6334be47118
children 84e7813d76e9
comparison
equal deleted inserted replaced
68:561a7518be6b 111:04ced10e8804
1 /* Subroutines used for code generation on the DEC Alpha. 1 /* Subroutines used for code generation on the DEC Alpha.
2 Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2 Copyright (C) 1992-2017 Free Software Foundation, Inc.
3 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu) 3 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 4
7 This file is part of GCC. 5 This file is part of GCC.
8 6
9 GCC is free software; you can redistribute it and/or modify 7 GCC is free software; you can redistribute it and/or modify
22 20
23 21
24 #include "config.h" 22 #include "config.h"
25 #include "system.h" 23 #include "system.h"
26 #include "coretypes.h" 24 #include "coretypes.h"
27 #include "tm.h" 25 #include "backend.h"
26 #include "target.h"
28 #include "rtl.h" 27 #include "rtl.h"
29 #include "tree.h" 28 #include "tree.h"
29 #include "stringpool.h"
30 #include "attribs.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "df.h"
34 #include "predict.h"
35 #include "tm_p.h"
36 #include "ssa.h"
37 #include "expmed.h"
38 #include "optabs.h"
30 #include "regs.h" 39 #include "regs.h"
31 #include "hard-reg-set.h" 40 #include "emit-rtl.h"
32 #include "insn-config.h" 41 #include "recog.h"
33 #include "conditions.h" 42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "stor-layout.h"
46 #include "calls.h"
47 #include "varasm.h"
34 #include "output.h" 48 #include "output.h"
35 #include "insn-attr.h" 49 #include "insn-attr.h"
36 #include "flags.h" 50 #include "explow.h"
37 #include "recog.h"
38 #include "expr.h" 51 #include "expr.h"
39 #include "optabs.h"
40 #include "reload.h" 52 #include "reload.h"
41 #include "obstack.h"
42 #include "except.h" 53 #include "except.h"
43 #include "function.h" 54 #include "common/common-target.h"
44 #include "diagnostic-core.h"
45 #include "ggc.h"
46 #include "integrate.h"
47 #include "tm_p.h"
48 #include "target.h"
49 #include "target-def.h"
50 #include "debug.h" 55 #include "debug.h"
51 #include "langhooks.h" 56 #include "langhooks.h"
52 #include "splay-tree.h" 57 #include "cfgrtl.h"
53 #include "cfglayout.h" 58 #include "tree-pass.h"
54 #include "gimple.h" 59 #include "context.h"
55 #include "tree-flow.h" 60 #include "gimple-iterator.h"
61 #include "gimplify.h"
56 #include "tree-stdarg.h" 62 #include "tree-stdarg.h"
57 #include "tm-constrs.h" 63 #include "tm-constrs.h"
58 #include "df.h"
59 #include "libfuncs.h" 64 #include "libfuncs.h"
65 #include "params.h"
66 #include "builtins.h"
67 #include "rtl-iter.h"
68
69 /* This file should be included last. */
70 #include "target-def.h"
60 71
61 /* Specify which cpu to schedule for. */ 72 /* Specify which cpu to schedule for. */
62 enum processor_type alpha_tune; 73 enum processor_type alpha_tune;
63 74
64 /* Which cpu we're generating code for. */ 75 /* Which cpu we're generating code for. */
86 97
87 static int inside_function = FALSE; 98 static int inside_function = FALSE;
88 99
89 /* The number of cycles of latency we should assume on memory reads. */ 100 /* The number of cycles of latency we should assume on memory reads. */
90 101
91 int alpha_memory_latency = 3; 102 static int alpha_memory_latency = 3;
92 103
93 /* Whether the function needs the GP. */ 104 /* Whether the function needs the GP. */
94 105
95 static int alpha_function_needs_gp; 106 static int alpha_function_needs_gp;
96
97 /* The alias set for prologue/epilogue register save/restore. */
98
99 static GTY(()) alias_set_type alpha_sr_alias_set;
100 107
101 /* The assembler name of the current function. */ 108 /* The assembler name of the current function. */
102 109
103 static const char *alpha_fnname; 110 static const char *alpha_fnname;
104 111
182 COSTS_N_INSNS (1), /* int_cmov */ 189 COSTS_N_INSNS (1), /* int_cmov */
183 COSTS_N_INSNS (6), /* int_div */ 190 COSTS_N_INSNS (6), /* int_div */
184 }; 191 };
185 192
186 /* Get the number of args of a function in one of two ways. */ 193 /* Get the number of args of a function in one of two ways. */
187 #if TARGET_ABI_OPEN_VMS || TARGET_ABI_UNICOSMK 194 #if TARGET_ABI_OPEN_VMS
188 #define NUM_ARGS crtl->args.info.num_args 195 #define NUM_ARGS crtl->args.info.num_args
189 #else 196 #else
190 #define NUM_ARGS crtl->args.info 197 #define NUM_ARGS crtl->args.info
191 #endif 198 #endif
192 199
194 #define REG_RA 26 201 #define REG_RA 26
195 202
196 /* Declarations of static functions. */ 203 /* Declarations of static functions. */
197 static struct machine_function *alpha_init_machine_status (void); 204 static struct machine_function *alpha_init_machine_status (void);
198 static rtx alpha_emit_xfloating_compare (enum rtx_code *, rtx, rtx); 205 static rtx alpha_emit_xfloating_compare (enum rtx_code *, rtx, rtx);
206 static void alpha_handle_trap_shadows (void);
207 static void alpha_align_insns (void);
208 static void alpha_override_options_after_change (void);
199 209
200 #if TARGET_ABI_OPEN_VMS 210 #if TARGET_ABI_OPEN_VMS
201 static void alpha_write_linkage (FILE *, const char *, tree); 211 static void alpha_write_linkage (FILE *, const char *);
202 static bool vms_valid_pointer_mode (enum machine_mode); 212 static bool vms_valid_pointer_mode (scalar_int_mode);
213 #else
214 #define vms_patch_builtins() gcc_unreachable()
203 #endif 215 #endif
204
205 static void unicosmk_output_deferred_case_vectors (FILE *);
206 static void unicosmk_gen_dsib (unsigned long *);
207 static void unicosmk_output_ssib (FILE *, const char *);
208 static int unicosmk_need_dex (rtx);
209 216
210 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */ 217 static unsigned int
211 static const struct default_options alpha_option_optimization_table[] = 218 rest_of_handle_trap_shadows (void)
212 { 219 {
213 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 }, 220 alpha_handle_trap_shadows ();
214 { OPT_LEVELS_NONE, 0, NULL, 0 } 221 return 0;
215 }; 222 }
216 223
217 /* Implement TARGET_HANDLE_OPTION. */ 224 namespace {
218 225
219 static bool 226 const pass_data pass_data_handle_trap_shadows =
220 alpha_handle_option (size_t code, const char *arg, int value) 227 {
221 { 228 RTL_PASS,
222 switch (code) 229 "trap_shadows", /* name */
223 { 230 OPTGROUP_NONE, /* optinfo_flags */
224 case OPT_mfp_regs: 231 TV_NONE, /* tv_id */
225 if (value == 0) 232 0, /* properties_required */
226 target_flags |= MASK_SOFT_FP; 233 0, /* properties_provided */
227 break; 234 0, /* properties_destroyed */
228 235 0, /* todo_flags_start */
229 case OPT_mieee: 236 TODO_df_finish, /* todo_flags_finish */
230 case OPT_mieee_with_inexact: 237 };
231 target_flags |= MASK_IEEE_CONFORMANT; 238
232 break; 239 class pass_handle_trap_shadows : public rtl_opt_pass
233 240 {
234 case OPT_mtls_size_: 241 public:
235 if (value != 16 && value != 32 && value != 64) 242 pass_handle_trap_shadows(gcc::context *ctxt)
236 error ("bad value %qs for -mtls-size switch", arg); 243 : rtl_opt_pass(pass_data_handle_trap_shadows, ctxt)
237 break; 244 {}
238 } 245
239 246 /* opt_pass methods: */
240 return true; 247 virtual bool gate (function *)
248 {
249 return alpha_tp != ALPHA_TP_PROG || flag_exceptions;
250 }
251
252 virtual unsigned int execute (function *)
253 {
254 return rest_of_handle_trap_shadows ();
255 }
256
257 }; // class pass_handle_trap_shadows
258
259 } // anon namespace
260
261 rtl_opt_pass *
262 make_pass_handle_trap_shadows (gcc::context *ctxt)
263 {
264 return new pass_handle_trap_shadows (ctxt);
265 }
266
267 static unsigned int
268 rest_of_align_insns (void)
269 {
270 alpha_align_insns ();
271 return 0;
272 }
273
274 namespace {
275
276 const pass_data pass_data_align_insns =
277 {
278 RTL_PASS,
279 "align_insns", /* name */
280 OPTGROUP_NONE, /* optinfo_flags */
281 TV_NONE, /* tv_id */
282 0, /* properties_required */
283 0, /* properties_provided */
284 0, /* properties_destroyed */
285 0, /* todo_flags_start */
286 TODO_df_finish, /* todo_flags_finish */
287 };
288
289 class pass_align_insns : public rtl_opt_pass
290 {
291 public:
292 pass_align_insns(gcc::context *ctxt)
293 : rtl_opt_pass(pass_data_align_insns, ctxt)
294 {}
295
296 /* opt_pass methods: */
297 virtual bool gate (function *)
298 {
299 /* Due to the number of extra trapb insns, don't bother fixing up
300 alignment when trap precision is instruction. Moreover, we can
301 only do our job when sched2 is run. */
302 return ((alpha_tune == PROCESSOR_EV4
303 || alpha_tune == PROCESSOR_EV5)
304 && optimize && !optimize_size
305 && alpha_tp != ALPHA_TP_INSN
306 && flag_schedule_insns_after_reload);
307 }
308
309 virtual unsigned int execute (function *)
310 {
311 return rest_of_align_insns ();
312 }
313
314 }; // class pass_align_insns
315
316 } // anon namespace
317
318 rtl_opt_pass *
319 make_pass_align_insns (gcc::context *ctxt)
320 {
321 return new pass_align_insns (ctxt);
241 } 322 }
242 323
243 #ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING 324 #ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
244 /* Implement TARGET_MANGLE_TYPE. */ 325 /* Implement TARGET_MANGLE_TYPE. */
245 326
262 { 343 {
263 static const struct cpu_table { 344 static const struct cpu_table {
264 const char *const name; 345 const char *const name;
265 const enum processor_type processor; 346 const enum processor_type processor;
266 const int flags; 347 const int flags;
348 const unsigned short line_size; /* in bytes */
349 const unsigned short l1_size; /* in kb. */
350 const unsigned short l2_size; /* in kb. */
267 } cpu_table[] = { 351 } cpu_table[] = {
268 { "ev4", PROCESSOR_EV4, 0 }, 352 /* EV4/LCA45 had 8k L1 caches; EV45 had 16k L1 caches.
269 { "ev45", PROCESSOR_EV4, 0 }, 353 EV4/EV45 had 128k to 16M 32-byte direct Bcache. LCA45
270 { "21064", PROCESSOR_EV4, 0 }, 354 had 64k to 8M 8-byte direct Bcache. */
271 { "ev5", PROCESSOR_EV5, 0 }, 355 { "ev4", PROCESSOR_EV4, 0, 32, 8, 8*1024 },
272 { "21164", PROCESSOR_EV5, 0 }, 356 { "21064", PROCESSOR_EV4, 0, 32, 8, 8*1024 },
273 { "ev56", PROCESSOR_EV5, MASK_BWX }, 357 { "ev45", PROCESSOR_EV4, 0, 32, 16, 16*1024 },
274 { "21164a", PROCESSOR_EV5, MASK_BWX }, 358
275 { "pca56", PROCESSOR_EV5, MASK_BWX|MASK_MAX }, 359 /* EV5 or EV56 had 8k 32 byte L1, 96k 32 or 64 byte L2,
276 { "21164PC",PROCESSOR_EV5, MASK_BWX|MASK_MAX }, 360 and 1M to 16M 64 byte L3 (not modeled).
277 { "21164pc",PROCESSOR_EV5, MASK_BWX|MASK_MAX }, 361 PCA56 had 16k 64-byte cache; PCA57 had 32k Icache.
278 { "ev6", PROCESSOR_EV6, MASK_BWX|MASK_MAX|MASK_FIX }, 362 PCA56 had 8k 64-byte cache; PCA57 had 16k Dcache. */
279 { "21264", PROCESSOR_EV6, MASK_BWX|MASK_MAX|MASK_FIX }, 363 { "ev5", PROCESSOR_EV5, 0, 32, 8, 96 },
280 { "ev67", PROCESSOR_EV6, MASK_BWX|MASK_MAX|MASK_FIX|MASK_CIX }, 364 { "21164", PROCESSOR_EV5, 0, 32, 8, 96 },
281 { "21264a", PROCESSOR_EV6, MASK_BWX|MASK_MAX|MASK_FIX|MASK_CIX } 365 { "ev56", PROCESSOR_EV5, MASK_BWX, 32, 8, 96 },
366 { "21164a", PROCESSOR_EV5, MASK_BWX, 32, 8, 96 },
367 { "pca56", PROCESSOR_EV5, MASK_BWX|MASK_MAX, 64, 16, 4*1024 },
368 { "21164PC",PROCESSOR_EV5, MASK_BWX|MASK_MAX, 64, 16, 4*1024 },
369 { "21164pc",PROCESSOR_EV5, MASK_BWX|MASK_MAX, 64, 16, 4*1024 },
370
371 /* EV6 had 64k 64 byte L1, 1M to 16M Bcache. */
372 { "ev6", PROCESSOR_EV6, MASK_BWX|MASK_MAX|MASK_FIX, 64, 64, 16*1024 },
373 { "21264", PROCESSOR_EV6, MASK_BWX|MASK_MAX|MASK_FIX, 64, 64, 16*1024 },
374 { "ev67", PROCESSOR_EV6, MASK_BWX|MASK_MAX|MASK_FIX|MASK_CIX,
375 64, 64, 16*1024 },
376 { "21264a", PROCESSOR_EV6, MASK_BWX|MASK_MAX|MASK_FIX|MASK_CIX,
377 64, 64, 16*1024 }
282 }; 378 };
283 379
284 int const ct_size = ARRAY_SIZE (cpu_table); 380 int const ct_size = ARRAY_SIZE (cpu_table);
381 int line_size = 0, l1_size = 0, l2_size = 0;
285 int i; 382 int i;
286 383
287 #ifdef SUBTARGET_OVERRIDE_OPTIONS 384 #ifdef SUBTARGET_OVERRIDE_OPTIONS
288 SUBTARGET_OVERRIDE_OPTIONS; 385 SUBTARGET_OVERRIDE_OPTIONS;
289 #endif 386 #endif
290 387
291 /* Unicos/Mk doesn't have shared libraries. */ 388 /* Default to full IEEE compliance mode for Go language. */
292 if (TARGET_ABI_UNICOSMK && flag_pic) 389 if (strcmp (lang_hooks.name, "GNU Go") == 0
293 { 390 && !(target_flags_explicit & MASK_IEEE))
294 warning (0, "-f%s ignored for Unicos/Mk (not supported)", 391 target_flags |= MASK_IEEE;
295 (flag_pic > 1) ? "PIC" : "pic"); 392
296 flag_pic = 0; 393 alpha_fprm = ALPHA_FPRM_NORM;
297 }
298
299 /* On Unicos/Mk, the native compiler consistently generates /d suffices for
300 floating-point instructions. Make that the default for this target. */
301 if (TARGET_ABI_UNICOSMK)
302 alpha_fprm = ALPHA_FPRM_DYN;
303 else
304 alpha_fprm = ALPHA_FPRM_NORM;
305
306 alpha_tp = ALPHA_TP_PROG; 394 alpha_tp = ALPHA_TP_PROG;
307 alpha_fptm = ALPHA_FPTM_N; 395 alpha_fptm = ALPHA_FPTM_N;
308 396
309 /* We cannot use su and sui qualifiers for conversion instructions on
310 Unicos/Mk. I'm not sure if this is due to assembler or hardware
311 limitations. Right now, we issue a warning if -mieee is specified
312 and then ignore it; eventually, we should either get it right or
313 disable the option altogether. */
314
315 if (TARGET_IEEE) 397 if (TARGET_IEEE)
316 { 398 {
317 if (TARGET_ABI_UNICOSMK) 399 alpha_tp = ALPHA_TP_INSN;
318 warning (0, "-mieee not supported on Unicos/Mk"); 400 alpha_fptm = ALPHA_FPTM_SU;
319 else 401 }
320 {
321 alpha_tp = ALPHA_TP_INSN;
322 alpha_fptm = ALPHA_FPTM_SU;
323 }
324 }
325
326 if (TARGET_IEEE_WITH_INEXACT) 402 if (TARGET_IEEE_WITH_INEXACT)
327 { 403 {
328 if (TARGET_ABI_UNICOSMK) 404 alpha_tp = ALPHA_TP_INSN;
329 warning (0, "-mieee-with-inexact not supported on Unicos/Mk"); 405 alpha_fptm = ALPHA_FPTM_SUI;
330 else
331 {
332 alpha_tp = ALPHA_TP_INSN;
333 alpha_fptm = ALPHA_FPTM_SUI;
334 }
335 } 406 }
336 407
337 if (alpha_tp_string) 408 if (alpha_tp_string)
338 { 409 {
339 if (! strcmp (alpha_tp_string, "p")) 410 if (! strcmp (alpha_tp_string, "p"))
378 if (alpha_cpu_string) 449 if (alpha_cpu_string)
379 { 450 {
380 for (i = 0; i < ct_size; i++) 451 for (i = 0; i < ct_size; i++)
381 if (! strcmp (alpha_cpu_string, cpu_table [i].name)) 452 if (! strcmp (alpha_cpu_string, cpu_table [i].name))
382 { 453 {
383 alpha_tune = alpha_cpu = cpu_table [i].processor; 454 alpha_tune = alpha_cpu = cpu_table[i].processor;
455 line_size = cpu_table[i].line_size;
456 l1_size = cpu_table[i].l1_size;
457 l2_size = cpu_table[i].l2_size;
384 target_flags &= ~ (MASK_BWX | MASK_MAX | MASK_FIX | MASK_CIX); 458 target_flags &= ~ (MASK_BWX | MASK_MAX | MASK_FIX | MASK_CIX);
385 target_flags |= cpu_table [i].flags; 459 target_flags |= cpu_table[i].flags;
386 break; 460 break;
387 } 461 }
388 if (i == ct_size) 462 if (i == ct_size)
389 error ("bad value %qs for -mcpu switch", alpha_cpu_string); 463 error ("bad value %qs for -mcpu switch", alpha_cpu_string);
390 } 464 }
392 if (alpha_tune_string) 466 if (alpha_tune_string)
393 { 467 {
394 for (i = 0; i < ct_size; i++) 468 for (i = 0; i < ct_size; i++)
395 if (! strcmp (alpha_tune_string, cpu_table [i].name)) 469 if (! strcmp (alpha_tune_string, cpu_table [i].name))
396 { 470 {
397 alpha_tune = cpu_table [i].processor; 471 alpha_tune = cpu_table[i].processor;
472 line_size = cpu_table[i].line_size;
473 l1_size = cpu_table[i].l1_size;
474 l2_size = cpu_table[i].l2_size;
398 break; 475 break;
399 } 476 }
400 if (i == ct_size) 477 if (i == ct_size)
401 error ("bad value %qs for -mtune switch", alpha_tune_string); 478 error ("bad value %qs for -mtune switch", alpha_tune_string);
402 } 479 }
403 480
481 if (line_size)
482 maybe_set_param_value (PARAM_L1_CACHE_LINE_SIZE, line_size,
483 global_options.x_param_values,
484 global_options_set.x_param_values);
485 if (l1_size)
486 maybe_set_param_value (PARAM_L1_CACHE_SIZE, l1_size,
487 global_options.x_param_values,
488 global_options_set.x_param_values);
489 if (l2_size)
490 maybe_set_param_value (PARAM_L2_CACHE_SIZE, l2_size,
491 global_options.x_param_values,
492 global_options_set.x_param_values);
493
404 /* Do some sanity checks on the above options. */ 494 /* Do some sanity checks on the above options. */
405
406 if (TARGET_ABI_UNICOSMK && alpha_fptm != ALPHA_FPTM_N)
407 {
408 warning (0, "trap mode not supported on Unicos/Mk");
409 alpha_fptm = ALPHA_FPTM_N;
410 }
411 495
412 if ((alpha_fptm == ALPHA_FPTM_SU || alpha_fptm == ALPHA_FPTM_SUI) 496 if ((alpha_fptm == ALPHA_FPTM_SU || alpha_fptm == ALPHA_FPTM_SUI)
413 && alpha_tp != ALPHA_TP_INSN && alpha_cpu != PROCESSOR_EV6) 497 && alpha_tp != ALPHA_TP_INSN && alpha_cpu != PROCESSOR_EV6)
414 { 498 {
415 warning (0, "fp software completion requires -mtrap-precision=i"); 499 warning (0, "fp software completion requires -mtrap-precision=i");
495 if (flag_pic == 1) 579 if (flag_pic == 1)
496 target_flags |= MASK_SMALL_DATA; 580 target_flags |= MASK_SMALL_DATA;
497 else if (flag_pic == 2) 581 else if (flag_pic == 2)
498 target_flags &= ~MASK_SMALL_DATA; 582 target_flags &= ~MASK_SMALL_DATA;
499 583
584 alpha_override_options_after_change ();
585
586 /* Register variables and functions with the garbage collector. */
587
588 /* Set up function hooks. */
589 init_machine_status = alpha_init_machine_status;
590
591 /* Tell the compiler when we're using VAX floating point. */
592 if (TARGET_FLOAT_VAX)
593 {
594 REAL_MODE_FORMAT (SFmode) = &vax_f_format;
595 REAL_MODE_FORMAT (DFmode) = &vax_g_format;
596 REAL_MODE_FORMAT (TFmode) = NULL;
597 }
598
599 #ifdef TARGET_DEFAULT_LONG_DOUBLE_128
600 if (!(target_flags_explicit & MASK_LONG_DOUBLE_128))
601 target_flags |= MASK_LONG_DOUBLE_128;
602 #endif
603
604 }
605
606 /* Implement targetm.override_options_after_change. */
607
608 static void
609 alpha_override_options_after_change (void)
610 {
500 /* Align labels and loops for optimal branching. */ 611 /* Align labels and loops for optimal branching. */
501 /* ??? Kludge these by not doing anything if we don't optimize and also if 612 /* ??? Kludge these by not doing anything if we don't optimize. */
502 we are writing ECOFF symbols to work around a bug in DEC's assembler. */ 613 if (optimize > 0)
503 if (optimize > 0 && write_symbols != SDB_DEBUG)
504 { 614 {
505 if (align_loops <= 0) 615 if (align_loops <= 0)
506 align_loops = 16; 616 align_loops = 16;
507 if (align_jumps <= 0) 617 if (align_jumps <= 0)
508 align_jumps = 16; 618 align_jumps = 16;
509 } 619 }
510 if (align_functions <= 0) 620 if (align_functions <= 0)
511 align_functions = 16; 621 align_functions = 16;
512
513 /* Acquire a unique set number for our register saves and restores. */
514 alpha_sr_alias_set = new_alias_set ();
515
516 /* Register variables and functions with the garbage collector. */
517
518 /* Set up function hooks. */
519 init_machine_status = alpha_init_machine_status;
520
521 /* Tell the compiler when we're using VAX floating point. */
522 if (TARGET_FLOAT_VAX)
523 {
524 REAL_MODE_FORMAT (SFmode) = &vax_f_format;
525 REAL_MODE_FORMAT (DFmode) = &vax_g_format;
526 REAL_MODE_FORMAT (TFmode) = NULL;
527 }
528
529 #ifdef TARGET_DEFAULT_LONG_DOUBLE_128
530 if (!(target_flags_explicit & MASK_LONG_DOUBLE_128))
531 target_flags |= MASK_LONG_DOUBLE_128;
532 #endif
533
534 /* If using typedef char *va_list, signal that __builtin_va_start (&ap, 0)
535 can be optimized to ap = __builtin_next_arg (0). */
536 if (TARGET_ABI_UNICOSMK)
537 targetm.expand_builtin_va_start = NULL;
538 } 622 }
539 623
540 /* Returns 1 if VALUE is a mask that contains full bytes of zero or ones. */ 624 /* Returns 1 if VALUE is a mask that contains full bytes of zero or ones. */
541 625
542 int 626 int
587 resolve_reload_operand (rtx op) 671 resolve_reload_operand (rtx op)
588 { 672 {
589 if (reload_in_progress) 673 if (reload_in_progress)
590 { 674 {
591 rtx tmp = op; 675 rtx tmp = op;
592 if (GET_CODE (tmp) == SUBREG) 676 if (SUBREG_P (tmp))
593 tmp = SUBREG_REG (tmp); 677 tmp = SUBREG_REG (tmp);
594 if (REG_P (tmp) 678 if (REG_P (tmp)
595 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER) 679 && REGNO (tmp) >= FIRST_PSEUDO_REGISTER)
596 { 680 {
597 op = reg_equiv_memory_loc[REGNO (tmp)]; 681 op = reg_equiv_memory_loc (REGNO (tmp));
598 if (op == 0) 682 if (op == 0)
599 return 0; 683 return 0;
600 } 684 }
601 } 685 }
602 return op; 686 return op;
603 } 687 }
604 688
605 /* The scalar modes supported differs from the default check-what-c-supports 689 /* The scalar modes supported differs from the default check-what-c-supports
606 version in that sometimes TFmode is available even when long double 690 version in that sometimes TFmode is available even when long double
607 indicates only DFmode. On unicosmk, we have the situation that HImode 691 indicates only DFmode. */
608 doesn't map to any C type, but of course we still support that. */
609 692
610 static bool 693 static bool
611 alpha_scalar_mode_supported_p (enum machine_mode mode) 694 alpha_scalar_mode_supported_p (scalar_mode mode)
612 { 695 {
613 switch (mode) 696 switch (mode)
614 { 697 {
615 case QImode: 698 case E_QImode:
616 case HImode: 699 case E_HImode:
617 case SImode: 700 case E_SImode:
618 case DImode: 701 case E_DImode:
619 case TImode: /* via optabs.c */ 702 case E_TImode: /* via optabs.c */
620 return true; 703 return true;
621 704
622 case SFmode: 705 case E_SFmode:
623 case DFmode: 706 case E_DFmode:
624 return true; 707 return true;
625 708
626 case TFmode: 709 case E_TFmode:
627 return TARGET_HAS_XFLOATING_LIBS; 710 return TARGET_HAS_XFLOATING_LIBS;
628 711
629 default: 712 default:
630 return false; 713 return false;
631 } 714 }
635 TARGET_MAX is enabled. We do not check TARGET_MAX here, however, 718 TARGET_MAX is enabled. We do not check TARGET_MAX here, however,
636 which allows the vectorizer to operate on e.g. move instructions, 719 which allows the vectorizer to operate on e.g. move instructions,
637 or when expand_vector_operations can do something useful. */ 720 or when expand_vector_operations can do something useful. */
638 721
639 static bool 722 static bool
640 alpha_vector_mode_supported_p (enum machine_mode mode) 723 alpha_vector_mode_supported_p (machine_mode mode)
641 { 724 {
642 return mode == V8QImode || mode == V4HImode || mode == V2SImode; 725 return mode == V8QImode || mode == V4HImode || mode == V2SImode;
643 } 726 }
644 727
645 /* Return 1 if this function can directly return via $26. */ 728 /* Return 1 if this function can directly return via $26. */
646 729
647 int 730 int
648 direct_return (void) 731 direct_return (void)
649 { 732 {
650 return (! TARGET_ABI_OPEN_VMS && ! TARGET_ABI_UNICOSMK 733 return (TARGET_ABI_OSF
651 && reload_completed 734 && reload_completed
652 && alpha_sa_size () == 0 735 && alpha_sa_size () == 0
653 && get_frame_size () == 0 736 && get_frame_size () == 0
654 && crtl->outgoing_args_size == 0 737 && crtl->outgoing_args_size == 0
655 && crtl->args.pretend_args_size == 0); 738 && crtl->args.pretend_args_size == 0);
656 }
657
658 /* Return the ADDR_VEC associated with a tablejump insn. */
659
660 rtx
661 alpha_tablejump_addr_vec (rtx insn)
662 {
663 rtx tmp;
664
665 tmp = JUMP_LABEL (insn);
666 if (!tmp)
667 return NULL_RTX;
668 tmp = NEXT_INSN (tmp);
669 if (!tmp)
670 return NULL_RTX;
671 if (JUMP_P (tmp)
672 && GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC)
673 return PATTERN (tmp);
674 return NULL_RTX;
675 }
676
677 /* Return the label of the predicted edge, or CONST0_RTX if we don't know. */
678
679 rtx
680 alpha_tablejump_best_label (rtx insn)
681 {
682 rtx jump_table = alpha_tablejump_addr_vec (insn);
683 rtx best_label = NULL_RTX;
684
685 /* ??? Once the CFG doesn't keep getting completely rebuilt, look
686 there for edge frequency counts from profile data. */
687
688 if (jump_table)
689 {
690 int n_labels = XVECLEN (jump_table, 1);
691 int best_count = -1;
692 int i, j;
693
694 for (i = 0; i < n_labels; i++)
695 {
696 int count = 1;
697
698 for (j = i + 1; j < n_labels; j++)
699 if (XEXP (XVECEXP (jump_table, 1, i), 0)
700 == XEXP (XVECEXP (jump_table, 1, j), 0))
701 count++;
702
703 if (count > best_count)
704 best_count = count, best_label = XVECEXP (jump_table, 1, i);
705 }
706 }
707
708 return best_label ? best_label : const0_rtx;
709 } 739 }
710 740
711 /* Return the TLS model to use for SYMBOL. */ 741 /* Return the TLS model to use for SYMBOL. */
712 742
713 static enum tls_model 743 static enum tls_model
761 791
762 /* Functions are never in the small data area. Duh. */ 792 /* Functions are never in the small data area. Duh. */
763 if (TREE_CODE (exp) == FUNCTION_DECL) 793 if (TREE_CODE (exp) == FUNCTION_DECL)
764 return false; 794 return false;
765 795
796 /* COMMON symbols are never small data. */
797 if (TREE_CODE (exp) == VAR_DECL && DECL_COMMON (exp))
798 return false;
799
766 if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp)) 800 if (TREE_CODE (exp) == VAR_DECL && DECL_SECTION_NAME (exp))
767 { 801 {
768 const char *section = TREE_STRING_POINTER (DECL_SECTION_NAME (exp)); 802 const char *section = DECL_SECTION_NAME (exp);
769 if (strcmp (section, ".sdata") == 0 803 if (strcmp (section, ".sdata") == 0
770 || strcmp (section, ".sbss") == 0) 804 || strcmp (section, ".sbss") == 0)
771 return true; 805 return true;
772 } 806 }
773 else 807 else
783 return false; 817 return false;
784 } 818 }
785 819
786 #if TARGET_ABI_OPEN_VMS 820 #if TARGET_ABI_OPEN_VMS
787 static bool 821 static bool
788 vms_valid_pointer_mode (enum machine_mode mode) 822 vms_valid_pointer_mode (scalar_int_mode mode)
789 { 823 {
790 return (mode == SImode || mode == DImode); 824 return (mode == SImode || mode == DImode);
791 } 825 }
792 826
793 static bool 827 static bool
818 register and a constant address, or just a register. For DImode, 852 register and a constant address, or just a register. For DImode,
819 any of those forms can be surrounded with an AND that clear the 853 any of those forms can be surrounded with an AND that clear the
820 low-order three bits; this is an "unaligned" access. */ 854 low-order three bits; this is an "unaligned" access. */
821 855
822 static bool 856 static bool
823 alpha_legitimate_address_p (enum machine_mode mode, rtx x, bool strict) 857 alpha_legitimate_address_p (machine_mode mode, rtx x, bool strict)
824 { 858 {
825 /* If this is an ldq_u type address, discard the outer AND. */ 859 /* If this is an ldq_u type address, discard the outer AND. */
826 if (mode == DImode 860 if (mode == DImode
827 && GET_CODE (x) == AND 861 && GET_CODE (x) == AND
828 && CONST_INT_P (XEXP (x, 1)) 862 && CONST_INT_P (XEXP (x, 1))
829 && INTVAL (XEXP (x, 1)) == -8) 863 && INTVAL (XEXP (x, 1)) == -8)
830 x = XEXP (x, 0); 864 x = XEXP (x, 0);
831 865
832 /* Discard non-paradoxical subregs. */ 866 /* Discard non-paradoxical subregs. */
833 if (GET_CODE (x) == SUBREG 867 if (SUBREG_P (x)
834 && (GET_MODE_SIZE (GET_MODE (x)) 868 && (GET_MODE_SIZE (GET_MODE (x))
835 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))) 869 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
836 x = SUBREG_REG (x); 870 x = SUBREG_REG (x);
837 871
838 /* Unadorned general registers are valid. */ 872 /* Unadorned general registers are valid. */
856 { 890 {
857 rtx ofs = XEXP (x, 1); 891 rtx ofs = XEXP (x, 1);
858 x = XEXP (x, 0); 892 x = XEXP (x, 0);
859 893
860 /* Discard non-paradoxical subregs. */ 894 /* Discard non-paradoxical subregs. */
861 if (GET_CODE (x) == SUBREG 895 if (SUBREG_P (x)
862 && (GET_MODE_SIZE (GET_MODE (x)) 896 && (GET_MODE_SIZE (GET_MODE (x))
863 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))) 897 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
864 x = SUBREG_REG (x); 898 x = SUBREG_REG (x);
865 899
866 if (REG_P (x)) 900 if (REG_P (x))
890 { 924 {
891 rtx ofs = XEXP (x, 1); 925 rtx ofs = XEXP (x, 1);
892 x = XEXP (x, 0); 926 x = XEXP (x, 0);
893 927
894 /* Discard non-paradoxical subregs. */ 928 /* Discard non-paradoxical subregs. */
895 if (GET_CODE (x) == SUBREG 929 if (SUBREG_P (x)
896 && (GET_MODE_SIZE (GET_MODE (x)) 930 && (GET_MODE_SIZE (GET_MODE (x))
897 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))) 931 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
898 x = SUBREG_REG (x); 932 x = SUBREG_REG (x);
899 933
900 /* Must have a valid base register. */ 934 /* Must have a valid base register. */
929 963
930 /* Try machine-dependent ways of modifying an illegitimate address 964 /* Try machine-dependent ways of modifying an illegitimate address
931 to be legitimate. If we find one, return the new, valid address. */ 965 to be legitimate. If we find one, return the new, valid address. */
932 966
933 static rtx 967 static rtx
934 alpha_legitimize_address_1 (rtx x, rtx scratch, enum machine_mode mode) 968 alpha_legitimize_address_1 (rtx x, rtx scratch, machine_mode mode)
935 { 969 {
936 HOST_WIDE_INT addend; 970 HOST_WIDE_INT addend;
937 971
938 /* If the address is (plus reg const_int) and the CONST_INT is not a 972 /* If the address is (plus reg const_int) and the CONST_INT is not a
939 valid offset, compute the high part of the constant and add it to 973 valid offset, compute the high part of the constant and add it to
984 around +/- 32k offset. */ 1018 around +/- 32k offset. */
985 if (TARGET_EXPLICIT_RELOCS 1019 if (TARGET_EXPLICIT_RELOCS
986 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD 1020 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
987 && symbolic_operand (x, Pmode)) 1021 && symbolic_operand (x, Pmode))
988 { 1022 {
989 rtx r0, r16, eqv, tga, tp, insn, dest, seq; 1023 rtx r0, r16, eqv, tga, tp, dest, seq;
1024 rtx_insn *insn;
990 1025
991 switch (tls_symbolic_operand_type (x)) 1026 switch (tls_symbolic_operand_type (x))
992 { 1027 {
993 case TLS_MODEL_NONE: 1028 case TLS_MODEL_NONE:
994 break; 1029 break;
995 1030
996 case TLS_MODEL_GLOBAL_DYNAMIC: 1031 case TLS_MODEL_GLOBAL_DYNAMIC:
997 start_sequence (); 1032 {
998 1033 start_sequence ();
999 r0 = gen_rtx_REG (Pmode, 0); 1034
1000 r16 = gen_rtx_REG (Pmode, 16); 1035 r0 = gen_rtx_REG (Pmode, 0);
1001 tga = get_tls_get_addr (); 1036 r16 = gen_rtx_REG (Pmode, 16);
1002 dest = gen_reg_rtx (Pmode); 1037 tga = get_tls_get_addr ();
1003 seq = GEN_INT (alpha_next_sequence_number++); 1038 dest = gen_reg_rtx (Pmode);
1004 1039 seq = GEN_INT (alpha_next_sequence_number++);
1005 emit_insn (gen_movdi_er_tlsgd (r16, pic_offset_table_rtx, x, seq)); 1040
1006 insn = gen_call_value_osf_tlsgd (r0, tga, seq); 1041 emit_insn (gen_movdi_er_tlsgd (r16, pic_offset_table_rtx, x, seq));
1007 insn = emit_call_insn (insn); 1042 rtx val = gen_call_value_osf_tlsgd (r0, tga, seq);
1008 RTL_CONST_CALL_P (insn) = 1; 1043 insn = emit_call_insn (val);
1009 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r16); 1044 RTL_CONST_CALL_P (insn) = 1;
1010 1045 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r16);
1011 insn = get_insns (); 1046
1012 end_sequence (); 1047 insn = get_insns ();
1013 1048 end_sequence ();
1014 emit_libcall_block (insn, dest, r0, x); 1049
1015 return dest; 1050 emit_libcall_block (insn, dest, r0, x);
1051 return dest;
1052 }
1016 1053
1017 case TLS_MODEL_LOCAL_DYNAMIC: 1054 case TLS_MODEL_LOCAL_DYNAMIC:
1018 start_sequence (); 1055 {
1019 1056 start_sequence ();
1020 r0 = gen_rtx_REG (Pmode, 0); 1057
1021 r16 = gen_rtx_REG (Pmode, 16); 1058 r0 = gen_rtx_REG (Pmode, 0);
1022 tga = get_tls_get_addr (); 1059 r16 = gen_rtx_REG (Pmode, 16);
1023 scratch = gen_reg_rtx (Pmode); 1060 tga = get_tls_get_addr ();
1024 seq = GEN_INT (alpha_next_sequence_number++); 1061 scratch = gen_reg_rtx (Pmode);
1025 1062 seq = GEN_INT (alpha_next_sequence_number++);
1026 emit_insn (gen_movdi_er_tlsldm (r16, pic_offset_table_rtx, seq)); 1063
1027 insn = gen_call_value_osf_tlsldm (r0, tga, seq); 1064 emit_insn (gen_movdi_er_tlsldm (r16, pic_offset_table_rtx, seq));
1028 insn = emit_call_insn (insn); 1065 rtx val = gen_call_value_osf_tlsldm (r0, tga, seq);
1029 RTL_CONST_CALL_P (insn) = 1; 1066 insn = emit_call_insn (val);
1030 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r16); 1067 RTL_CONST_CALL_P (insn) = 1;
1031 1068 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r16);
1032 insn = get_insns (); 1069
1033 end_sequence (); 1070 insn = get_insns ();
1034 1071 end_sequence ();
1035 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), 1072
1036 UNSPEC_TLSLDM_CALL); 1073 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
1037 emit_libcall_block (insn, scratch, r0, eqv); 1074 UNSPEC_TLSLDM_CALL);
1038 1075 emit_libcall_block (insn, scratch, r0, eqv);
1039 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_DTPREL); 1076
1040 eqv = gen_rtx_CONST (Pmode, eqv); 1077 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_DTPREL);
1041 1078 eqv = gen_rtx_CONST (Pmode, eqv);
1042 if (alpha_tls_size == 64) 1079
1043 { 1080 if (alpha_tls_size == 64)
1044 dest = gen_reg_rtx (Pmode); 1081 {
1045 emit_insn (gen_rtx_SET (VOIDmode, dest, eqv)); 1082 dest = gen_reg_rtx (Pmode);
1046 emit_insn (gen_adddi3 (dest, dest, scratch)); 1083 emit_insn (gen_rtx_SET (dest, eqv));
1047 return dest; 1084 emit_insn (gen_adddi3 (dest, dest, scratch));
1048 } 1085 return dest;
1049 if (alpha_tls_size == 32) 1086 }
1050 { 1087 if (alpha_tls_size == 32)
1051 insn = gen_rtx_HIGH (Pmode, eqv); 1088 {
1052 insn = gen_rtx_PLUS (Pmode, scratch, insn); 1089 rtx temp = gen_rtx_HIGH (Pmode, eqv);
1053 scratch = gen_reg_rtx (Pmode); 1090 temp = gen_rtx_PLUS (Pmode, scratch, temp);
1054 emit_insn (gen_rtx_SET (VOIDmode, scratch, insn)); 1091 scratch = gen_reg_rtx (Pmode);
1055 } 1092 emit_insn (gen_rtx_SET (scratch, temp));
1056 return gen_rtx_LO_SUM (Pmode, scratch, eqv); 1093 }
1094 return gen_rtx_LO_SUM (Pmode, scratch, eqv);
1095 }
1057 1096
1058 case TLS_MODEL_INITIAL_EXEC: 1097 case TLS_MODEL_INITIAL_EXEC:
1059 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_TPREL); 1098 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_TPREL);
1060 eqv = gen_rtx_CONST (Pmode, eqv); 1099 eqv = gen_rtx_CONST (Pmode, eqv);
1061 tp = gen_reg_rtx (Pmode); 1100 tp = gen_reg_rtx (Pmode);
1062 scratch = gen_reg_rtx (Pmode); 1101 scratch = gen_reg_rtx (Pmode);
1063 dest = gen_reg_rtx (Pmode); 1102 dest = gen_reg_rtx (Pmode);
1064 1103
1065 emit_insn (gen_load_tp (tp)); 1104 emit_insn (gen_get_thread_pointerdi (tp));
1066 emit_insn (gen_rtx_SET (VOIDmode, scratch, eqv)); 1105 emit_insn (gen_rtx_SET (scratch, eqv));
1067 emit_insn (gen_adddi3 (dest, tp, scratch)); 1106 emit_insn (gen_adddi3 (dest, tp, scratch));
1068 return dest; 1107 return dest;
1069 1108
1070 case TLS_MODEL_LOCAL_EXEC: 1109 case TLS_MODEL_LOCAL_EXEC:
1071 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_TPREL); 1110 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_TPREL);
1072 eqv = gen_rtx_CONST (Pmode, eqv); 1111 eqv = gen_rtx_CONST (Pmode, eqv);
1073 tp = gen_reg_rtx (Pmode); 1112 tp = gen_reg_rtx (Pmode);
1074 1113
1075 emit_insn (gen_load_tp (tp)); 1114 emit_insn (gen_get_thread_pointerdi (tp));
1076 if (alpha_tls_size == 32) 1115 if (alpha_tls_size == 32)
1077 { 1116 {
1078 insn = gen_rtx_HIGH (Pmode, eqv); 1117 rtx temp = gen_rtx_HIGH (Pmode, eqv);
1079 insn = gen_rtx_PLUS (Pmode, tp, insn); 1118 temp = gen_rtx_PLUS (Pmode, tp, temp);
1080 tp = gen_reg_rtx (Pmode); 1119 tp = gen_reg_rtx (Pmode);
1081 emit_insn (gen_rtx_SET (VOIDmode, tp, insn)); 1120 emit_insn (gen_rtx_SET (tp, temp));
1082 } 1121 }
1083 return gen_rtx_LO_SUM (Pmode, tp, eqv); 1122 return gen_rtx_LO_SUM (Pmode, tp, eqv);
1084 1123
1085 default: 1124 default:
1086 gcc_unreachable (); 1125 gcc_unreachable ();
1092 return x; 1131 return x;
1093 else 1132 else
1094 { 1133 {
1095 if (can_create_pseudo_p ()) 1134 if (can_create_pseudo_p ())
1096 scratch = gen_reg_rtx (Pmode); 1135 scratch = gen_reg_rtx (Pmode);
1097 emit_insn (gen_rtx_SET (VOIDmode, scratch, 1136 emit_insn (gen_rtx_SET (scratch, gen_rtx_HIGH (Pmode, x)));
1098 gen_rtx_HIGH (Pmode, x)));
1099 return gen_rtx_LO_SUM (Pmode, scratch, x); 1137 return gen_rtx_LO_SUM (Pmode, scratch, x);
1100 } 1138 }
1101 } 1139 }
1102 } 1140 }
1103 1141
1119 if (high) 1157 if (high)
1120 x = expand_simple_binop (Pmode, PLUS, x, GEN_INT (high), 1158 x = expand_simple_binop (Pmode, PLUS, x, GEN_INT (high),
1121 (!can_create_pseudo_p () ? scratch : NULL_RTX), 1159 (!can_create_pseudo_p () ? scratch : NULL_RTX),
1122 1, OPTAB_LIB_WIDEN); 1160 1, OPTAB_LIB_WIDEN);
1123 1161
1124 return plus_constant (x, low); 1162 return plus_constant (Pmode, x, low);
1125 } 1163 }
1126 } 1164 }
1127 1165
1128 1166
1129 /* Try machine-dependent ways of modifying an illegitimate address 1167 /* Try machine-dependent ways of modifying an illegitimate address
1130 to be legitimate. Return X or the new, valid address. */ 1168 to be legitimate. Return X or the new, valid address. */
1131 1169
1132 static rtx 1170 static rtx
1133 alpha_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED, 1171 alpha_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
1134 enum machine_mode mode) 1172 machine_mode mode)
1135 { 1173 {
1136 rtx new_x = alpha_legitimize_address_1 (x, NULL_RTX, mode); 1174 rtx new_x = alpha_legitimize_address_1 (x, NULL_RTX, mode);
1137 return new_x ? new_x : x; 1175 return new_x ? new_x : x;
1176 }
1177
1178 /* Return true if ADDR has an effect that depends on the machine mode it
1179 is used for. On the Alpha this is true only for the unaligned modes.
1180 We can simplify the test since we know that the address must be valid. */
1181
1182 static bool
1183 alpha_mode_dependent_address_p (const_rtx addr,
1184 addr_space_t as ATTRIBUTE_UNUSED)
1185 {
1186 return GET_CODE (addr) == AND;
1138 } 1187 }
1139 1188
1140 /* Primarily this is required for TLS symbols, but given that our move 1189 /* Primarily this is required for TLS symbols, but given that our move
1141 patterns *ought* to be able to handle any symbol at any time, we 1190 patterns *ought* to be able to handle any symbol at any time, we
1142 should never be spilling symbolic operands to the constant pool, ever. */ 1191 should never be spilling symbolic operands to the constant pool, ever. */
1143 1192
1144 static bool 1193 static bool
1145 alpha_cannot_force_const_mem (rtx x) 1194 alpha_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
1146 { 1195 {
1147 enum rtx_code code = GET_CODE (x); 1196 enum rtx_code code = GET_CODE (x);
1148 return code == SYMBOL_REF || code == LABEL_REF || code == CONST; 1197 return code == SYMBOL_REF || code == LABEL_REF || code == CONST;
1149 } 1198 }
1150 1199
1163 /* Otherwise, we can make a tail call if the target function shares 1212 /* Otherwise, we can make a tail call if the target function shares
1164 the same GP. */ 1213 the same GP. */
1165 return decl_has_samegp (decl); 1214 return decl_has_samegp (decl);
1166 } 1215 }
1167 1216
1168 int 1217 bool
1169 some_small_symbolic_operand_int (rtx *px, void *data ATTRIBUTE_UNUSED) 1218 some_small_symbolic_operand_int (rtx x)
1170 { 1219 {
1171 rtx x = *px; 1220 subrtx_var_iterator::array_type array;
1172 1221 FOR_EACH_SUBRTX_VAR (iter, array, x, ALL)
1173 /* Don't re-split. */ 1222 {
1174 if (GET_CODE (x) == LO_SUM) 1223 rtx x = *iter;
1175 return -1; 1224 /* Don't re-split. */
1176 1225 if (GET_CODE (x) == LO_SUM)
1177 return small_symbolic_operand (x, Pmode) != 0; 1226 iter.skip_subrtxes ();
1178 } 1227 else if (small_symbolic_operand (x, Pmode))
1179 1228 return true;
1180 static int 1229 }
1181 split_small_symbolic_operand_1 (rtx *px, void *data ATTRIBUTE_UNUSED) 1230 return false;
1182 {
1183 rtx x = *px;
1184
1185 /* Don't re-split. */
1186 if (GET_CODE (x) == LO_SUM)
1187 return -1;
1188
1189 if (small_symbolic_operand (x, Pmode))
1190 {
1191 x = gen_rtx_LO_SUM (Pmode, pic_offset_table_rtx, x);
1192 *px = x;
1193 return -1;
1194 }
1195
1196 return 0;
1197 } 1231 }
1198 1232
1199 rtx 1233 rtx
1200 split_small_symbolic_operand (rtx x) 1234 split_small_symbolic_operand (rtx x)
1201 { 1235 {
1202 x = copy_insn (x); 1236 x = copy_insn (x);
1203 for_each_rtx (&x, split_small_symbolic_operand_1, NULL); 1237 subrtx_ptr_iterator::array_type array;
1238 FOR_EACH_SUBRTX_PTR (iter, array, &x, ALL)
1239 {
1240 rtx *ptr = *iter;
1241 rtx x = *ptr;
1242 /* Don't re-split. */
1243 if (GET_CODE (x) == LO_SUM)
1244 iter.skip_subrtxes ();
1245 else if (small_symbolic_operand (x, Pmode))
1246 {
1247 *ptr = gen_rtx_LO_SUM (Pmode, pic_offset_table_rtx, x);
1248 iter.skip_subrtxes ();
1249 }
1250 }
1204 return x; 1251 return x;
1205 } 1252 }
1206 1253
1207 /* Indicate that INSN cannot be duplicated. This is true for any insn 1254 /* Indicate that INSN cannot be duplicated. This is true for any insn
1208 that we've marked with gpdisp relocs, since those have to stay in 1255 that we've marked with gpdisp relocs, since those have to stay in
1217 then they'll be in a different block from their ldgp. Which could lead 1264 then they'll be in a different block from their ldgp. Which could lead
1218 the bb reorder code to think that it would be ok to copy just the block 1265 the bb reorder code to think that it would be ok to copy just the block
1219 containing the call and branch to the block containing the ldgp. */ 1266 containing the call and branch to the block containing the ldgp. */
1220 1267
1221 static bool 1268 static bool
1222 alpha_cannot_copy_insn_p (rtx insn) 1269 alpha_cannot_copy_insn_p (rtx_insn *insn)
1223 { 1270 {
1224 if (!reload_completed || !TARGET_EXPLICIT_RELOCS) 1271 if (!reload_completed || !TARGET_EXPLICIT_RELOCS)
1225 return false; 1272 return false;
1226 if (recog_memoized (insn) >= 0) 1273 if (recog_memoized (insn) >= 0)
1227 return get_attr_cannot_copy (insn); 1274 return get_attr_cannot_copy (insn);
1233 /* Try a machine-dependent way of reloading an illegitimate address 1280 /* Try a machine-dependent way of reloading an illegitimate address
1234 operand. If we find one, push the reload and return the new rtx. */ 1281 operand. If we find one, push the reload and return the new rtx. */
1235 1282
1236 rtx 1283 rtx
1237 alpha_legitimize_reload_address (rtx x, 1284 alpha_legitimize_reload_address (rtx x,
1238 enum machine_mode mode ATTRIBUTE_UNUSED, 1285 machine_mode mode ATTRIBUTE_UNUSED,
1239 int opnum, int type, 1286 int opnum, int type,
1240 int ind_levels ATTRIBUTE_UNUSED) 1287 int ind_levels ATTRIBUTE_UNUSED)
1241 { 1288 {
1242 /* We must recognize output that we have already generated ourselves. */ 1289 /* We must recognize output that we have already generated ourselves. */
1243 if (GET_CODE (x) == PLUS 1290 if (GET_CODE (x) == PLUS
1257 cuts number of extra insns needed from 3 to 1. */ 1304 cuts number of extra insns needed from 3 to 1. */
1258 if (GET_CODE (x) == PLUS 1305 if (GET_CODE (x) == PLUS
1259 && REG_P (XEXP (x, 0)) 1306 && REG_P (XEXP (x, 0))
1260 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER 1307 && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
1261 && REGNO_OK_FOR_BASE_P (REGNO (XEXP (x, 0))) 1308 && REGNO_OK_FOR_BASE_P (REGNO (XEXP (x, 0)))
1262 && GET_CODE (XEXP (x, 1)) == CONST_INT) 1309 && CONST_INT_P (XEXP (x, 1)))
1263 { 1310 {
1264 HOST_WIDE_INT val = INTVAL (XEXP (x, 1)); 1311 HOST_WIDE_INT val = INTVAL (XEXP (x, 1));
1265 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000; 1312 HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;
1266 HOST_WIDE_INT high 1313 HOST_WIDE_INT high
1267 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000; 1314 = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;
1284 } 1331 }
1285 1332
1286 return NULL_RTX; 1333 return NULL_RTX;
1287 } 1334 }
1288 1335
1336 /* Return the cost of moving between registers of various classes. Moving
1337 between FLOAT_REGS and anything else except float regs is expensive.
1338 In fact, we make it quite expensive because we really don't want to
1339 do these moves unless it is clearly worth it. Optimizations may
1340 reduce the impact of not being able to allocate a pseudo to a
1341 hard register. */
1342
1343 static int
1344 alpha_register_move_cost (machine_mode /*mode*/,
1345 reg_class_t from, reg_class_t to)
1346 {
1347 if ((from == FLOAT_REGS) == (to == FLOAT_REGS))
1348 return 2;
1349
1350 if (TARGET_FIX)
1351 return (from == FLOAT_REGS) ? 6 : 8;
1352
1353 return 4 + 2 * alpha_memory_latency;
1354 }
1355
1356 /* Return the cost of moving data of MODE from a register to
1357 or from memory. On the Alpha, bump this up a bit. */
1358
1359 static int
1360 alpha_memory_move_cost (machine_mode /*mode*/, reg_class_t /*regclass*/,
1361 bool /*in*/)
1362 {
1363 return 2 * alpha_memory_latency;
1364 }
1365
1289 /* Compute a (partial) cost for rtx X. Return true if the complete 1366 /* Compute a (partial) cost for rtx X. Return true if the complete
1290 cost has been computed, and false if subexpressions should be 1367 cost has been computed, and false if subexpressions should be
1291 scanned. In either case, *TOTAL contains the cost result. */ 1368 scanned. In either case, *TOTAL contains the cost result. */
1292 1369
1293 static bool 1370 static bool
1294 alpha_rtx_costs (rtx x, int code, int outer_code, int *total, 1371 alpha_rtx_costs (rtx x, machine_mode mode, int outer_code, int opno, int *total,
1295 bool speed) 1372 bool speed)
1296 { 1373 {
1297 enum machine_mode mode = GET_MODE (x); 1374 int code = GET_CODE (x);
1298 bool float_mode_p = FLOAT_MODE_P (mode); 1375 bool float_mode_p = FLOAT_MODE_P (mode);
1299 const struct alpha_rtx_cost_data *cost_data; 1376 const struct alpha_rtx_cost_data *cost_data;
1300 1377
1301 if (!speed) 1378 if (!speed)
1302 cost_data = &alpha_rtx_cost_size; 1379 cost_data = &alpha_rtx_cost_size;
1317 return true; 1394 return true;
1318 } 1395 }
1319 /* FALLTHRU */ 1396 /* FALLTHRU */
1320 1397
1321 case CONST_DOUBLE: 1398 case CONST_DOUBLE:
1399 case CONST_WIDE_INT:
1322 if (x == CONST0_RTX (mode)) 1400 if (x == CONST0_RTX (mode))
1323 *total = 0; 1401 *total = 0;
1324 else if ((outer_code == PLUS && add_operand (x, VOIDmode)) 1402 else if ((outer_code == PLUS && add_operand (x, VOIDmode))
1325 || (outer_code == AND && and_operand (x, VOIDmode))) 1403 || (outer_code == AND && and_operand (x, VOIDmode)))
1326 *total = 0; 1404 *total = 0;
1356 if (float_mode_p) 1434 if (float_mode_p)
1357 *total = cost_data->fp_add; 1435 *total = cost_data->fp_add;
1358 else if (GET_CODE (XEXP (x, 0)) == MULT 1436 else if (GET_CODE (XEXP (x, 0)) == MULT
1359 && const48_operand (XEXP (XEXP (x, 0), 1), VOIDmode)) 1437 && const48_operand (XEXP (XEXP (x, 0), 1), VOIDmode))
1360 { 1438 {
1361 *total = (rtx_cost (XEXP (XEXP (x, 0), 0), 1439 *total = (rtx_cost (XEXP (XEXP (x, 0), 0), mode,
1362 (enum rtx_code) outer_code, speed) 1440 (enum rtx_code) outer_code, opno, speed)
1363 + rtx_cost (XEXP (x, 1), 1441 + rtx_cost (XEXP (x, 1), mode,
1364 (enum rtx_code) outer_code, speed) 1442 (enum rtx_code) outer_code, opno, speed)
1365 + COSTS_N_INSNS (1)); 1443 + COSTS_N_INSNS (1));
1366 return true; 1444 return true;
1367 } 1445 }
1368 return false; 1446 return false;
1369 1447
1460 rtx base; 1538 rtx base;
1461 HOST_WIDE_INT disp, offset; 1539 HOST_WIDE_INT disp, offset;
1462 1540
1463 gcc_assert (MEM_P (ref)); 1541 gcc_assert (MEM_P (ref));
1464 1542
1465 if (reload_in_progress 1543 if (reload_in_progress)
1466 && ! memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
1467 { 1544 {
1468 base = find_replacement (&XEXP (ref, 0)); 1545 base = find_replacement (&XEXP (ref, 0));
1469 gcc_assert (memory_address_p (GET_MODE (ref), base)); 1546 gcc_assert (memory_address_p (GET_MODE (ref), base));
1470 } 1547 }
1471 else 1548 else
1491 1568
1492 /* Access the entire aligned word. */ 1569 /* Access the entire aligned word. */
1493 *paligned_mem = widen_memory_access (ref, SImode, -offset); 1570 *paligned_mem = widen_memory_access (ref, SImode, -offset);
1494 1571
1495 /* Convert the byte offset within the word to a bit offset. */ 1572 /* Convert the byte offset within the word to a bit offset. */
1496 if (WORDS_BIG_ENDIAN) 1573 offset *= BITS_PER_UNIT;
1497 offset = 32 - (GET_MODE_BITSIZE (GET_MODE (ref)) + offset * 8);
1498 else
1499 offset *= 8;
1500 *pbitnum = GEN_INT (offset); 1574 *pbitnum = GEN_INT (offset);
1501 } 1575 }
1502 1576
1503 /* Similar, but just get the address. Handle the two reload cases. 1577 /* Similar, but just get the address. Handle the two reload cases.
1504 Add EXTRA_OFFSET to the address we return. */ 1578 Add EXTRA_OFFSET to the address we return. */
1509 rtx base; 1583 rtx base;
1510 HOST_WIDE_INT offset = 0; 1584 HOST_WIDE_INT offset = 0;
1511 1585
1512 gcc_assert (MEM_P (ref)); 1586 gcc_assert (MEM_P (ref));
1513 1587
1514 if (reload_in_progress 1588 if (reload_in_progress)
1515 && ! memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
1516 { 1589 {
1517 base = find_replacement (&XEXP (ref, 0)); 1590 base = find_replacement (&XEXP (ref, 0));
1518
1519 gcc_assert (memory_address_p (GET_MODE (ref), base)); 1591 gcc_assert (memory_address_p (GET_MODE (ref), base));
1520 } 1592 }
1521 else 1593 else
1522 base = XEXP (ref, 0); 1594 base = XEXP (ref, 0);
1523 1595
1524 if (GET_CODE (base) == PLUS) 1596 if (GET_CODE (base) == PLUS)
1525 offset += INTVAL (XEXP (base, 1)), base = XEXP (base, 0); 1597 offset += INTVAL (XEXP (base, 1)), base = XEXP (base, 0);
1526 1598
1527 return plus_constant (base, offset); 1599 return plus_constant (Pmode, base, offset);
1528 } 1600 }
1529 1601
1530 /* Compute a value X, such that X & 7 == (ADDR + OFS) & 7. 1602 /* Compute a value X, such that X & 7 == (ADDR + OFS) & 7.
1531 X is always returned in a register. */ 1603 X is always returned in a register. */
1532 1604
1554 /* Zero is present in any register class. */ 1626 /* Zero is present in any register class. */
1555 if (x == CONST0_RTX (GET_MODE (x))) 1627 if (x == CONST0_RTX (GET_MODE (x)))
1556 return rclass; 1628 return rclass;
1557 1629
1558 /* These sorts of constants we can easily drop to memory. */ 1630 /* These sorts of constants we can easily drop to memory. */
1559 if (CONST_INT_P (x) 1631 if (CONST_SCALAR_INT_P (x)
1560 || GET_CODE (x) == CONST_DOUBLE 1632 || CONST_DOUBLE_P (x)
1561 || GET_CODE (x) == CONST_VECTOR) 1633 || GET_CODE (x) == CONST_VECTOR)
1562 { 1634 {
1563 if (rclass == FLOAT_REGS) 1635 if (rclass == FLOAT_REGS)
1564 return NO_REGS; 1636 return NO_REGS;
1565 if (rclass == ALL_REGS) 1637 if (rclass == ALL_REGS)
1580 RCLASS requires an extra scratch or immediate register. Return the class 1652 RCLASS requires an extra scratch or immediate register. Return the class
1581 needed for the immediate register. */ 1653 needed for the immediate register. */
1582 1654
1583 static reg_class_t 1655 static reg_class_t
1584 alpha_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i, 1656 alpha_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i,
1585 enum machine_mode mode, secondary_reload_info *sri) 1657 machine_mode mode, secondary_reload_info *sri)
1586 { 1658 {
1587 enum reg_class rclass = (enum reg_class) rclass_i; 1659 enum reg_class rclass = (enum reg_class) rclass_i;
1588 1660
1589 /* Loading and storing HImode or QImode values to and from memory 1661 /* Loading and storing HImode or QImode values to and from memory
1590 usually requires a scratch register. */ 1662 usually requires a scratch register. */
1614 return GENERAL_REGS; 1686 return GENERAL_REGS;
1615 } 1687 }
1616 1688
1617 return NO_REGS; 1689 return NO_REGS;
1618 } 1690 }
1691
1692 /* Implement TARGET_SECONDARY_MEMORY_NEEDED.
1693
1694 If we are copying between general and FP registers, we need a memory
1695 location unless the FIX extension is available. */
1696
1697 static bool
1698 alpha_secondary_memory_needed (machine_mode, reg_class_t class1,
1699 reg_class_t class2)
1700 {
1701 return (!TARGET_FIX
1702 && ((class1 == FLOAT_REGS && class2 != FLOAT_REGS)
1703 || (class2 == FLOAT_REGS && class1 != FLOAT_REGS)));
1704 }
1705
1706 /* Implement TARGET_SECONDARY_MEMORY_NEEDED_MODE. If MODE is
1707 floating-point, use it. Otherwise, widen to a word like the default.
1708 This is needed because we always store integers in FP registers in
1709 quadword format. This whole area is very tricky! */
1710
1711 static machine_mode
1712 alpha_secondary_memory_needed_mode (machine_mode mode)
1713 {
1714 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1715 return mode;
1716 if (GET_MODE_SIZE (mode) >= 4)
1717 return mode;
1718 return mode_for_size (BITS_PER_WORD, GET_MODE_CLASS (mode), 0).require ();
1719 }
1619 1720
1620 /* Subfunction of the following function. Update the flags of any MEM
1621 found in part of X. */
1622
1623 static int
1624 alpha_set_memflags_1 (rtx *xp, void *data)
1625 {
1626 rtx x = *xp, orig = (rtx) data;
1627
1628 if (!MEM_P (x))
1629 return 0;
1630
1631 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (orig);
1632 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (orig);
1633 MEM_SCALAR_P (x) = MEM_SCALAR_P (orig);
1634 MEM_NOTRAP_P (x) = MEM_NOTRAP_P (orig);
1635 MEM_READONLY_P (x) = MEM_READONLY_P (orig);
1636
1637 /* Sadly, we cannot use alias sets because the extra aliasing
1638 produced by the AND interferes. Given that two-byte quantities
1639 are the only thing we would be able to differentiate anyway,
1640 there does not seem to be any point in convoluting the early
1641 out of the alias check. */
1642
1643 return -1;
1644 }
1645
1646 /* Given SEQ, which is an INSN list, look for any MEMs in either 1721 /* Given SEQ, which is an INSN list, look for any MEMs in either
1647 a SET_DEST or a SET_SRC and copy the in-struct, unchanging, and 1722 a SET_DEST or a SET_SRC and copy the in-struct, unchanging, and
1648 volatile flags from REF into each of the MEMs found. If REF is not 1723 volatile flags from REF into each of the MEMs found. If REF is not
1649 a MEM, don't do anything. */ 1724 a MEM, don't do anything. */
1650 1725
1651 void 1726 void
1652 alpha_set_memflags (rtx seq, rtx ref) 1727 alpha_set_memflags (rtx seq, rtx ref)
1653 { 1728 {
1654 rtx insn; 1729 rtx_insn *insn;
1655 1730
1656 if (!MEM_P (ref)) 1731 if (!MEM_P (ref))
1657 return; 1732 return;
1658 1733
1659 /* This is only called from alpha.md, after having had something 1734 /* This is only called from alpha.md, after having had something
1660 generated from one of the insn patterns. So if everything is 1735 generated from one of the insn patterns. So if everything is
1661 zero, the pattern is already up-to-date. */ 1736 zero, the pattern is already up-to-date. */
1662 if (!MEM_VOLATILE_P (ref) 1737 if (!MEM_VOLATILE_P (ref)
1663 && !MEM_IN_STRUCT_P (ref)
1664 && !MEM_SCALAR_P (ref)
1665 && !MEM_NOTRAP_P (ref) 1738 && !MEM_NOTRAP_P (ref)
1666 && !MEM_READONLY_P (ref)) 1739 && !MEM_READONLY_P (ref))
1667 return; 1740 return;
1668 1741
1669 for (insn = seq; insn; insn = NEXT_INSN (insn)) 1742 subrtx_var_iterator::array_type array;
1743 for (insn = as_a <rtx_insn *> (seq); insn; insn = NEXT_INSN (insn))
1670 if (INSN_P (insn)) 1744 if (INSN_P (insn))
1671 for_each_rtx (&PATTERN (insn), alpha_set_memflags_1, (void *) ref); 1745 FOR_EACH_SUBRTX_VAR (iter, array, PATTERN (insn), NONCONST)
1746 {
1747 rtx x = *iter;
1748 if (MEM_P (x))
1749 {
1750 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (ref);
1751 MEM_NOTRAP_P (x) = MEM_NOTRAP_P (ref);
1752 MEM_READONLY_P (x) = MEM_READONLY_P (ref);
1753 /* Sadly, we cannot use alias sets because the extra
1754 aliasing produced by the AND interferes. Given that
1755 two-byte quantities are the only thing we would be
1756 able to differentiate anyway, there does not seem to
1757 be any point in convoluting the early out of the
1758 alias check. */
1759 iter.skip_subrtxes ();
1760 }
1761 }
1672 else 1762 else
1673 gcc_unreachable (); 1763 gcc_unreachable ();
1674 } 1764 }
1675 1765
1676 static rtx alpha_emit_set_const (rtx, enum machine_mode, HOST_WIDE_INT, 1766 static rtx alpha_emit_set_const (rtx, machine_mode, HOST_WIDE_INT,
1677 int, bool); 1767 int, bool);
1678 1768
1679 /* Internal routine for alpha_emit_set_const to check for N or below insns. 1769 /* Internal routine for alpha_emit_set_const to check for N or below insns.
1680 If NO_OUTPUT is true, then we only check to see if N insns are possible, 1770 If NO_OUTPUT is true, then we only check to see if N insns are possible,
1681 and return pc_rtx if successful. */ 1771 and return pc_rtx if successful. */
1682 1772
1683 static rtx 1773 static rtx
1684 alpha_emit_set_const_1 (rtx target, enum machine_mode mode, 1774 alpha_emit_set_const_1 (rtx target, machine_mode mode,
1685 HOST_WIDE_INT c, int n, bool no_output) 1775 HOST_WIDE_INT c, int n, bool no_output)
1686 { 1776 {
1687 HOST_WIDE_INT new_const; 1777 HOST_WIDE_INT new_const;
1688 int i, bits; 1778 int i, bits;
1689 /* Use a pseudo if highly optimizing and still generating RTL. */ 1779 /* Use a pseudo if highly optimizing and still generating RTL. */
1690 rtx subtarget 1780 rtx subtarget
1691 = (flag_expensive_optimizations && can_create_pseudo_p () ? 0 : target); 1781 = (flag_expensive_optimizations && can_create_pseudo_p () ? 0 : target);
1692 rtx temp, insn; 1782 rtx temp, insn;
1693 1783
1694 /* If this is a sign-extended 32-bit constant, we can do this in at most 1784 /* If this is a sign-extended 32-bit constant, we can do this in at most
1695 three insns, so do it if we have enough insns left. We always have 1785 three insns, so do it if we have enough insns left. */
1696 a sign-extended 32-bit constant when compiling on a narrow machine. */ 1786
1697 1787 if (c >> 31 == -1 || c >> 31 == 0)
1698 if (HOST_BITS_PER_WIDE_INT != 64
1699 || c >> 31 == -1 || c >> 31 == 0)
1700 { 1788 {
1701 HOST_WIDE_INT low = ((c & 0xffff) ^ 0x8000) - 0x8000; 1789 HOST_WIDE_INT low = ((c & 0xffff) ^ 0x8000) - 0x8000;
1702 HOST_WIDE_INT tmp1 = c - low; 1790 HOST_WIDE_INT tmp1 = c - low;
1703 HOST_WIDE_INT high = (((tmp1 >> 16) & 0xffff) ^ 0x8000) - 0x8000; 1791 HOST_WIDE_INT high = (((tmp1 >> 16) & 0xffff) ^ 0x8000) - 0x8000;
1704 HOST_WIDE_INT extra = 0; 1792 HOST_WIDE_INT extra = 0;
1723 1811
1724 if (no_output) 1812 if (no_output)
1725 return pc_rtx; 1813 return pc_rtx;
1726 if (target == NULL) 1814 if (target == NULL)
1727 target = gen_reg_rtx (mode); 1815 target = gen_reg_rtx (mode);
1728 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (c))); 1816 emit_insn (gen_rtx_SET (target, GEN_INT (c)));
1729 return target; 1817 return target;
1730 } 1818 }
1731 else if (n >= 2 + (extra != 0)) 1819 else if (n >= 2 + (extra != 0))
1732 { 1820 {
1733 if (no_output) 1821 if (no_output)
1734 return pc_rtx; 1822 return pc_rtx;
1735 if (!can_create_pseudo_p ()) 1823 if (!can_create_pseudo_p ())
1736 { 1824 {
1737 emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (high << 16))); 1825 emit_insn (gen_rtx_SET (target, GEN_INT (high << 16)));
1738 temp = target; 1826 temp = target;
1739 } 1827 }
1740 else 1828 else
1741 temp = copy_to_suggested_reg (GEN_INT (high << 16), 1829 temp = copy_to_suggested_reg (GEN_INT (high << 16),
1742 subtarget, mode); 1830 subtarget, mode);
1750 if (extra != 0) 1838 if (extra != 0)
1751 { 1839 {
1752 if (! subtarget) 1840 if (! subtarget)
1753 subtarget = gen_reg_rtx (mode); 1841 subtarget = gen_reg_rtx (mode);
1754 insn = gen_rtx_PLUS (mode, temp, GEN_INT (extra << 16)); 1842 insn = gen_rtx_PLUS (mode, temp, GEN_INT (extra << 16));
1755 insn = gen_rtx_SET (VOIDmode, subtarget, insn); 1843 insn = gen_rtx_SET (subtarget, insn);
1756 emit_insn (insn); 1844 emit_insn (insn);
1757 temp = subtarget; 1845 temp = subtarget;
1758 } 1846 }
1759 1847
1760 if (target == NULL) 1848 if (target == NULL)
1761 target = gen_reg_rtx (mode); 1849 target = gen_reg_rtx (mode);
1762 insn = gen_rtx_PLUS (mode, temp, GEN_INT (low)); 1850 insn = gen_rtx_PLUS (mode, temp, GEN_INT (low));
1763 insn = gen_rtx_SET (VOIDmode, target, insn); 1851 insn = gen_rtx_SET (target, insn);
1764 emit_insn (insn); 1852 emit_insn (insn);
1765 return target; 1853 return target;
1766 } 1854 }
1767 } 1855 }
1768 1856
1836 } 1924 }
1837 1925
1838 /* Now try high-order zero bits. Here we try the shifted-in bits as 1926 /* Now try high-order zero bits. Here we try the shifted-in bits as
1839 all zero and all ones. Be careful to avoid shifting outside the 1927 all zero and all ones. Be careful to avoid shifting outside the
1840 mode and to avoid shifting outside the host wide int size. */ 1928 mode and to avoid shifting outside the host wide int size. */
1841 /* On narrow hosts, don't shift a 1 into the high bit, since we'll
1842 confuse the recursive call and set all of the high 32 bits. */
1843 1929
1844 bits = (MIN (HOST_BITS_PER_WIDE_INT, GET_MODE_SIZE (mode) * 8) 1930 bits = (MIN (HOST_BITS_PER_WIDE_INT, GET_MODE_SIZE (mode) * 8)
1845 - floor_log2 (c) - 1 - (HOST_BITS_PER_WIDE_INT < 64)); 1931 - floor_log2 (c) - 1);
1846 if (bits > 0) 1932 if (bits > 0)
1847 for (; bits > 0; bits--) 1933 for (; bits > 0; bits--)
1848 { 1934 {
1849 new_const = c << bits; 1935 new_const = c << bits;
1850 temp = alpha_emit_set_const (subtarget, mode, new_const, i, no_output); 1936 temp = alpha_emit_set_const (subtarget, mode, new_const, i, no_output);
1851 if (!temp) 1937 if (!temp)
1852 { 1938 {
1853 new_const = (c << bits) | (((HOST_WIDE_INT) 1 << bits) - 1); 1939 new_const = (c << bits) | ((HOST_WIDE_INT_1U << bits) - 1);
1854 temp = alpha_emit_set_const (subtarget, mode, new_const, 1940 temp = alpha_emit_set_const (subtarget, mode, new_const,
1855 i, no_output); 1941 i, no_output);
1856 } 1942 }
1857 if (temp) 1943 if (temp)
1858 { 1944 {
1874 { 1960 {
1875 new_const = c << bits; 1961 new_const = c << bits;
1876 temp = alpha_emit_set_const (subtarget, mode, new_const, i, no_output); 1962 temp = alpha_emit_set_const (subtarget, mode, new_const, i, no_output);
1877 if (!temp) 1963 if (!temp)
1878 { 1964 {
1879 new_const = (c << bits) | (((HOST_WIDE_INT) 1 << bits) - 1); 1965 new_const = (c << bits) | ((HOST_WIDE_INT_1U << bits) - 1);
1880 temp = alpha_emit_set_const (subtarget, mode, new_const, 1966 temp = alpha_emit_set_const (subtarget, mode, new_const,
1881 i, no_output); 1967 i, no_output);
1882 } 1968 }
1883 if (temp) 1969 if (temp)
1884 { 1970 {
1888 target, 0, OPTAB_WIDEN); 1974 target, 0, OPTAB_WIDEN);
1889 } 1975 }
1890 } 1976 }
1891 } 1977 }
1892 1978
1893 #if HOST_BITS_PER_WIDE_INT == 64
1894 /* Finally, see if can load a value into the target that is the same as the 1979 /* Finally, see if can load a value into the target that is the same as the
1895 constant except that all bytes that are 0 are changed to be 0xff. If we 1980 constant except that all bytes that are 0 are changed to be 0xff. If we
1896 can, then we can do a ZAPNOT to obtain the desired constant. */ 1981 can, then we can do a ZAPNOT to obtain the desired constant. */
1897 1982
1898 new_const = c; 1983 new_const = c;
1915 return temp; 2000 return temp;
1916 return expand_binop (mode, and_optab, temp, GEN_INT (c | ~ new_const), 2001 return expand_binop (mode, and_optab, temp, GEN_INT (c | ~ new_const),
1917 target, 0, OPTAB_WIDEN); 2002 target, 0, OPTAB_WIDEN);
1918 } 2003 }
1919 } 2004 }
1920 #endif
1921 2005
1922 return 0; 2006 return 0;
1923 } 2007 }
1924 2008
1925 /* Try to output insns to set TARGET equal to the constant C if it can be 2009 /* Try to output insns to set TARGET equal to the constant C if it can be
1927 where the output has been placed if it can be done and the insns have been 2011 where the output has been placed if it can be done and the insns have been
1928 emitted. If it would take more than N insns, zero is returned and no 2012 emitted. If it would take more than N insns, zero is returned and no
1929 insns and emitted. */ 2013 insns and emitted. */
1930 2014
1931 static rtx 2015 static rtx
1932 alpha_emit_set_const (rtx target, enum machine_mode mode, 2016 alpha_emit_set_const (rtx target, machine_mode mode,
1933 HOST_WIDE_INT c, int n, bool no_output) 2017 HOST_WIDE_INT c, int n, bool no_output)
1934 { 2018 {
1935 enum machine_mode orig_mode = mode; 2019 machine_mode orig_mode = mode;
1936 rtx orig_target = target; 2020 rtx orig_target = target;
1937 rtx result = 0; 2021 rtx result = 0;
1938 int i; 2022 int i;
1939 2023
1940 /* If we can't make any pseudos, TARGET is an SImode hard register, we 2024 /* If we can't make any pseudos, TARGET is an SImode hard register, we
1959 for (i = 1; i <= n; i++) 2043 for (i = 1; i <= n; i++)
1960 { 2044 {
1961 result = alpha_emit_set_const_1 (target, mode, c, i, no_output); 2045 result = alpha_emit_set_const_1 (target, mode, c, i, no_output);
1962 if (result) 2046 if (result)
1963 { 2047 {
1964 rtx insn, set; 2048 rtx_insn *insn;
2049 rtx set;
1965 2050
1966 if (no_output) 2051 if (no_output)
1967 return result; 2052 return result;
1968 2053
1969 insn = get_last_insn (); 2054 insn = get_last_insn ();
1990 fall back to a straight forward decomposition. We do this to avoid 2075 fall back to a straight forward decomposition. We do this to avoid
1991 exponential run times encountered when looking for longer sequences 2076 exponential run times encountered when looking for longer sequences
1992 with alpha_emit_set_const. */ 2077 with alpha_emit_set_const. */
1993 2078
1994 static rtx 2079 static rtx
1995 alpha_emit_set_long_const (rtx target, HOST_WIDE_INT c1, HOST_WIDE_INT c2) 2080 alpha_emit_set_long_const (rtx target, HOST_WIDE_INT c1)
1996 { 2081 {
1997 HOST_WIDE_INT d1, d2, d3, d4; 2082 HOST_WIDE_INT d1, d2, d3, d4;
1998 2083
1999 /* Decompose the entire word */ 2084 /* Decompose the entire word */
2000 #if HOST_BITS_PER_WIDE_INT >= 64 2085
2001 gcc_assert (c2 == -(c1 < 0));
2002 d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000; 2086 d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
2003 c1 -= d1; 2087 c1 -= d1;
2004 d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000; 2088 d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2005 c1 = (c1 - d2) >> 32; 2089 c1 = (c1 - d2) >> 32;
2006 d3 = ((c1 & 0xffff) ^ 0x8000) - 0x8000; 2090 d3 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
2007 c1 -= d3; 2091 c1 -= d3;
2008 d4 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000; 2092 d4 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2009 gcc_assert (c1 == d4); 2093 gcc_assert (c1 == d4);
2010 #else
2011 d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000;
2012 c1 -= d1;
2013 d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2014 gcc_assert (c1 == d2);
2015 c2 += (d2 < 0);
2016 d3 = ((c2 & 0xffff) ^ 0x8000) - 0x8000;
2017 c2 -= d3;
2018 d4 = ((c2 & 0xffffffff) ^ 0x80000000) - 0x80000000;
2019 gcc_assert (c2 == d4);
2020 #endif
2021 2094
2022 /* Construct the high word */ 2095 /* Construct the high word */
2023 if (d4) 2096 if (d4)
2024 { 2097 {
2025 emit_move_insn (target, GEN_INT (d4)); 2098 emit_move_insn (target, GEN_INT (d4));
2039 emit_move_insn (target, gen_rtx_PLUS (DImode, target, GEN_INT (d1))); 2112 emit_move_insn (target, gen_rtx_PLUS (DImode, target, GEN_INT (d1)));
2040 2113
2041 return target; 2114 return target;
2042 } 2115 }
2043 2116
2044 /* Given an integral CONST_INT, CONST_DOUBLE, or CONST_VECTOR, return 2117 /* Given an integral CONST_INT or CONST_VECTOR, return the low 64 bits. */
2045 the low 64 bits. */ 2118
2046 2119 static HOST_WIDE_INT
2047 static void 2120 alpha_extract_integer (rtx x)
2048 alpha_extract_integer (rtx x, HOST_WIDE_INT *p0, HOST_WIDE_INT *p1) 2121 {
2049 {
2050 HOST_WIDE_INT i0, i1;
2051
2052 if (GET_CODE (x) == CONST_VECTOR) 2122 if (GET_CODE (x) == CONST_VECTOR)
2053 x = simplify_subreg (DImode, x, GET_MODE (x), 0); 2123 x = simplify_subreg (DImode, x, GET_MODE (x), 0);
2054 2124
2055 2125 gcc_assert (CONST_INT_P (x));
2056 if (CONST_INT_P (x)) 2126
2057 { 2127 return INTVAL (x);
2058 i0 = INTVAL (x); 2128 }
2059 i1 = -(i0 < 0); 2129
2060 } 2130 /* Implement TARGET_LEGITIMATE_CONSTANT_P. This is all constants for which
2061 else if (HOST_BITS_PER_WIDE_INT >= 64) 2131 we are willing to load the value into a register via a move pattern.
2062 {
2063 i0 = CONST_DOUBLE_LOW (x);
2064 i1 = -(i0 < 0);
2065 }
2066 else
2067 {
2068 i0 = CONST_DOUBLE_LOW (x);
2069 i1 = CONST_DOUBLE_HIGH (x);
2070 }
2071
2072 *p0 = i0;
2073 *p1 = i1;
2074 }
2075
2076 /* Implement LEGITIMATE_CONSTANT_P. This is all constants for which we
2077 are willing to load the value into a register via a move pattern.
2078 Normally this is all symbolic constants, integral constants that 2132 Normally this is all symbolic constants, integral constants that
2079 take three or fewer instructions, and floating-point zero. */ 2133 take three or fewer instructions, and floating-point zero. */
2080 2134
2081 bool 2135 bool
2082 alpha_legitimate_constant_p (rtx x) 2136 alpha_legitimate_constant_p (machine_mode mode, rtx x)
2083 { 2137 {
2084 enum machine_mode mode = GET_MODE (x); 2138 HOST_WIDE_INT i0;
2085 HOST_WIDE_INT i0, i1;
2086 2139
2087 switch (GET_CODE (x)) 2140 switch (GET_CODE (x))
2088 { 2141 {
2089 case LABEL_REF: 2142 case LABEL_REF:
2090 case HIGH: 2143 case HIGH:
2091 return true; 2144 return true;
2092 2145
2093 case CONST: 2146 case CONST:
2094 if (GET_CODE (XEXP (x, 0)) == PLUS 2147 if (GET_CODE (XEXP (x, 0)) == PLUS
2095 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT) 2148 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
2096 x = XEXP (XEXP (x, 0), 0); 2149 x = XEXP (XEXP (x, 0), 0);
2097 else 2150 else
2098 return true; 2151 return true;
2099 2152
2100 if (GET_CODE (x) != SYMBOL_REF) 2153 if (GET_CODE (x) != SYMBOL_REF)
2101 return true; 2154 return true;
2102
2103 /* FALLTHRU */ 2155 /* FALLTHRU */
2104 2156
2105 case SYMBOL_REF: 2157 case SYMBOL_REF:
2106 /* TLS symbols are never valid. */ 2158 /* TLS symbols are never valid. */
2107 return SYMBOL_REF_TLS_MODEL (x) == 0; 2159 return SYMBOL_REF_TLS_MODEL (x) == 0;
2108 2160
2161 case CONST_WIDE_INT:
2162 if (TARGET_BUILD_CONSTANTS)
2163 return true;
2164 if (x == CONST0_RTX (mode))
2165 return true;
2166 mode = DImode;
2167 gcc_assert (CONST_WIDE_INT_NUNITS (x) == 2);
2168 i0 = CONST_WIDE_INT_ELT (x, 1);
2169 if (alpha_emit_set_const_1 (NULL_RTX, mode, i0, 3, true) == NULL)
2170 return false;
2171 i0 = CONST_WIDE_INT_ELT (x, 0);
2172 goto do_integer;
2173
2109 case CONST_DOUBLE: 2174 case CONST_DOUBLE:
2110 if (x == CONST0_RTX (mode)) 2175 if (x == CONST0_RTX (mode))
2111 return true; 2176 return true;
2112 if (FLOAT_MODE_P (mode)) 2177 return false;
2113 return false;
2114 goto do_integer;
2115 2178
2116 case CONST_VECTOR: 2179 case CONST_VECTOR:
2117 if (x == CONST0_RTX (mode)) 2180 if (x == CONST0_RTX (mode))
2118 return true; 2181 return true;
2119 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT) 2182 if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT)
2120 return false; 2183 return false;
2121 if (GET_MODE_SIZE (mode) != 8) 2184 if (GET_MODE_SIZE (mode) != 8)
2122 return false; 2185 return false;
2123 goto do_integer; 2186 /* FALLTHRU */
2124 2187
2125 case CONST_INT: 2188 case CONST_INT:
2126 do_integer:
2127 if (TARGET_BUILD_CONSTANTS) 2189 if (TARGET_BUILD_CONSTANTS)
2128 return true; 2190 return true;
2129 alpha_extract_integer (x, &i0, &i1); 2191 i0 = alpha_extract_integer (x);
2130 if (HOST_BITS_PER_WIDE_INT >= 64 || i1 == (-i0 < 0)) 2192 do_integer:
2131 return alpha_emit_set_const_1 (x, mode, i0, 3, true) != NULL; 2193 return alpha_emit_set_const_1 (NULL_RTX, mode, i0, 3, true) != NULL;
2132 return false;
2133 2194
2134 default: 2195 default:
2135 return false; 2196 return false;
2136 } 2197 }
2137 } 2198 }
2138 2199
2139 /* Operand 1 is known to be a constant, and should require more than one 2200 /* Operand 1 is known to be a constant, and should require more than one
2140 instruction to load. Emit that multi-part load. */ 2201 instruction to load. Emit that multi-part load. */
2141 2202
2142 bool 2203 bool
2143 alpha_split_const_mov (enum machine_mode mode, rtx *operands) 2204 alpha_split_const_mov (machine_mode mode, rtx *operands)
2144 { 2205 {
2145 HOST_WIDE_INT i0, i1; 2206 HOST_WIDE_INT i0;
2146 rtx temp = NULL_RTX; 2207 rtx temp = NULL_RTX;
2147 2208
2148 alpha_extract_integer (operands[1], &i0, &i1); 2209 i0 = alpha_extract_integer (operands[1]);
2149 2210
2150 if (HOST_BITS_PER_WIDE_INT >= 64 || i1 == -(i0 < 0)) 2211 temp = alpha_emit_set_const (operands[0], mode, i0, 3, false);
2151 temp = alpha_emit_set_const (operands[0], mode, i0, 3, false);
2152 2212
2153 if (!temp && TARGET_BUILD_CONSTANTS) 2213 if (!temp && TARGET_BUILD_CONSTANTS)
2154 temp = alpha_emit_set_long_const (operands[0], i0, i1); 2214 temp = alpha_emit_set_long_const (operands[0], i0);
2155 2215
2156 if (temp) 2216 if (temp)
2157 { 2217 {
2158 if (!rtx_equal_p (operands[0], temp)) 2218 if (!rtx_equal_p (operands[0], temp))
2159 emit_move_insn (operands[0], temp); 2219 emit_move_insn (operands[0], temp);
2165 2225
2166 /* Expand a move instruction; return true if all work is done. 2226 /* Expand a move instruction; return true if all work is done.
2167 We don't handle non-bwx subword loads here. */ 2227 We don't handle non-bwx subword loads here. */
2168 2228
2169 bool 2229 bool
2170 alpha_expand_mov (enum machine_mode mode, rtx *operands) 2230 alpha_expand_mov (machine_mode mode, rtx *operands)
2171 { 2231 {
2172 rtx tmp; 2232 rtx tmp;
2173 2233
2174 /* If the output is not a register, the input must be. */ 2234 /* If the output is not a register, the input must be. */
2175 if (MEM_P (operands[0]) 2235 if (MEM_P (operands[0])
2193 if (! CONSTANT_P (operands[1]) || input_operand (operands[1], mode)) 2253 if (! CONSTANT_P (operands[1]) || input_operand (operands[1], mode))
2194 return false; 2254 return false;
2195 2255
2196 /* Split large integers. */ 2256 /* Split large integers. */
2197 if (CONST_INT_P (operands[1]) 2257 if (CONST_INT_P (operands[1])
2198 || GET_CODE (operands[1]) == CONST_DOUBLE
2199 || GET_CODE (operands[1]) == CONST_VECTOR) 2258 || GET_CODE (operands[1]) == CONST_VECTOR)
2200 { 2259 {
2201 if (alpha_split_const_mov (mode, operands)) 2260 if (alpha_split_const_mov (mode, operands))
2202 return true; 2261 return true;
2203 } 2262 }
2220 2279
2221 /* Expand a non-bwx QImode or HImode move instruction; 2280 /* Expand a non-bwx QImode or HImode move instruction;
2222 return true if all work is done. */ 2281 return true if all work is done. */
2223 2282
2224 bool 2283 bool
2225 alpha_expand_mov_nobwx (enum machine_mode mode, rtx *operands) 2284 alpha_expand_mov_nobwx (machine_mode mode, rtx *operands)
2226 { 2285 {
2227 rtx seq; 2286 rtx seq;
2228 2287
2229 /* If the output is not a register, the input must be. */ 2288 /* If the output is not a register, the input must be. */
2230 if (MEM_P (operands[0])) 2289 if (MEM_P (operands[0]))
2342 2401
2343 /* Implement the movmisalign patterns. One of the operands is a memory 2402 /* Implement the movmisalign patterns. One of the operands is a memory
2344 that is not naturally aligned. Emit instructions to load it. */ 2403 that is not naturally aligned. Emit instructions to load it. */
2345 2404
2346 void 2405 void
2347 alpha_expand_movmisalign (enum machine_mode mode, rtx *operands) 2406 alpha_expand_movmisalign (machine_mode mode, rtx *operands)
2348 { 2407 {
2349 /* Honor misaligned loads, for those we promised to do so. */ 2408 /* Honor misaligned loads, for those we promised to do so. */
2350 if (MEM_P (operands[1])) 2409 if (MEM_P (operands[1]))
2351 { 2410 {
2352 rtx tmp; 2411 rtx tmp;
2422 2481
2423 void 2482 void
2424 alpha_emit_floatuns (rtx operands[2]) 2483 alpha_emit_floatuns (rtx operands[2])
2425 { 2484 {
2426 rtx neglab, donelab, i0, i1, f0, in, out; 2485 rtx neglab, donelab, i0, i1, f0, in, out;
2427 enum machine_mode mode; 2486 machine_mode mode;
2428 2487
2429 out = operands[0]; 2488 out = operands[0];
2430 in = force_reg (DImode, operands[1]); 2489 in = force_reg (DImode, operands[1]);
2431 mode = GET_MODE (out); 2490 mode = GET_MODE (out);
2432 neglab = gen_label_rtx (); 2491 neglab = gen_label_rtx ();
2435 i1 = gen_reg_rtx (DImode); 2494 i1 = gen_reg_rtx (DImode);
2436 f0 = gen_reg_rtx (mode); 2495 f0 = gen_reg_rtx (mode);
2437 2496
2438 emit_cmp_and_jump_insns (in, const0_rtx, LT, const0_rtx, DImode, 0, neglab); 2497 emit_cmp_and_jump_insns (in, const0_rtx, LT, const0_rtx, DImode, 0, neglab);
2439 2498
2440 emit_insn (gen_rtx_SET (VOIDmode, out, gen_rtx_FLOAT (mode, in))); 2499 emit_insn (gen_rtx_SET (out, gen_rtx_FLOAT (mode, in)));
2441 emit_jump_insn (gen_jump (donelab)); 2500 emit_jump_insn (gen_jump (donelab));
2442 emit_barrier (); 2501 emit_barrier ();
2443 2502
2444 emit_label (neglab); 2503 emit_label (neglab);
2445 2504
2446 emit_insn (gen_lshrdi3 (i0, in, const1_rtx)); 2505 emit_insn (gen_lshrdi3 (i0, in, const1_rtx));
2447 emit_insn (gen_anddi3 (i1, in, const1_rtx)); 2506 emit_insn (gen_anddi3 (i1, in, const1_rtx));
2448 emit_insn (gen_iordi3 (i0, i0, i1)); 2507 emit_insn (gen_iordi3 (i0, i0, i1));
2449 emit_insn (gen_rtx_SET (VOIDmode, f0, gen_rtx_FLOAT (mode, i0))); 2508 emit_insn (gen_rtx_SET (f0, gen_rtx_FLOAT (mode, i0)));
2450 emit_insn (gen_rtx_SET (VOIDmode, out, gen_rtx_PLUS (mode, f0, f0))); 2509 emit_insn (gen_rtx_SET (out, gen_rtx_PLUS (mode, f0, f0)));
2451 2510
2452 emit_label (donelab); 2511 emit_label (donelab);
2453 } 2512 }
2454 2513
2455 /* Generate the comparison for a conditional branch. */ 2514 /* Generate the comparison for a conditional branch. */
2456 2515
2457 void 2516 void
2458 alpha_emit_conditional_branch (rtx operands[], enum machine_mode cmp_mode) 2517 alpha_emit_conditional_branch (rtx operands[], machine_mode cmp_mode)
2459 { 2518 {
2460 enum rtx_code cmp_code, branch_code; 2519 enum rtx_code cmp_code, branch_code;
2461 enum machine_mode branch_mode = VOIDmode; 2520 machine_mode branch_mode = VOIDmode;
2462 enum rtx_code code = GET_CODE (operands[0]); 2521 enum rtx_code code = GET_CODE (operands[0]);
2463 rtx op0 = operands[1], op1 = operands[2]; 2522 rtx op0 = operands[1], op1 = operands[2];
2464 rtx tem; 2523 rtx tem;
2465 2524
2466 if (cmp_mode == TFmode) 2525 if (cmp_mode == TFmode)
2474 that we have, choosing the branch as necessary. */ 2533 that we have, choosing the branch as necessary. */
2475 switch (code) 2534 switch (code)
2476 { 2535 {
2477 case EQ: case LE: case LT: case LEU: case LTU: 2536 case EQ: case LE: case LT: case LEU: case LTU:
2478 case UNORDERED: 2537 case UNORDERED:
2479 /* We have these compares: */ 2538 /* We have these compares. */
2480 cmp_code = code, branch_code = NE; 2539 cmp_code = code, branch_code = NE;
2481 break; 2540 break;
2482 2541
2483 case NE: 2542 case NE:
2484 case ORDERED: 2543 case ORDERED:
2490 /* For FP, we swap them, for INT, we reverse them. */ 2549 /* For FP, we swap them, for INT, we reverse them. */
2491 if (cmp_mode == DFmode) 2550 if (cmp_mode == DFmode)
2492 { 2551 {
2493 cmp_code = swap_condition (code); 2552 cmp_code = swap_condition (code);
2494 branch_code = NE; 2553 branch_code = NE;
2495 tem = op0, op0 = op1, op1 = tem; 2554 std::swap (op0, op1);
2496 } 2555 }
2497 else 2556 else
2498 { 2557 {
2499 cmp_code = reverse_condition (code); 2558 cmp_code = reverse_condition (code);
2500 branch_code = EQ; 2559 branch_code = EQ;
2514 if (op1 == CONST0_RTX (DFmode)) 2573 if (op1 == CONST0_RTX (DFmode))
2515 cmp_code = UNKNOWN, branch_code = code; 2574 cmp_code = UNKNOWN, branch_code = code;
2516 else if (op0 == CONST0_RTX (DFmode)) 2575 else if (op0 == CONST0_RTX (DFmode))
2517 { 2576 {
2518 /* Undo the swap we probably did just above. */ 2577 /* Undo the swap we probably did just above. */
2519 tem = op0, op0 = op1, op1 = tem; 2578 std::swap (op0, op1);
2520 branch_code = swap_condition (cmp_code); 2579 branch_code = swap_condition (cmp_code);
2521 cmp_code = UNKNOWN; 2580 cmp_code = UNKNOWN;
2522 } 2581 }
2523 } 2582 }
2524 else 2583 else
2571 tem = gen_reg_rtx (cmp_mode); 2630 tem = gen_reg_rtx (cmp_mode);
2572 emit_move_insn (tem, gen_rtx_fmt_ee (cmp_code, cmp_mode, op0, op1)); 2631 emit_move_insn (tem, gen_rtx_fmt_ee (cmp_code, cmp_mode, op0, op1));
2573 } 2632 }
2574 2633
2575 /* Emit the branch instruction. */ 2634 /* Emit the branch instruction. */
2576 tem = gen_rtx_SET (VOIDmode, pc_rtx, 2635 tem = gen_rtx_SET (pc_rtx,
2577 gen_rtx_IF_THEN_ELSE (VOIDmode, 2636 gen_rtx_IF_THEN_ELSE (VOIDmode,
2578 gen_rtx_fmt_ee (branch_code, 2637 gen_rtx_fmt_ee (branch_code,
2579 branch_mode, tem, 2638 branch_mode, tem,
2580 CONST0_RTX (cmp_mode)), 2639 CONST0_RTX (cmp_mode)),
2581 gen_rtx_LABEL_REF (VOIDmode, 2640 gen_rtx_LABEL_REF (VOIDmode,
2586 2645
2587 /* Certain simplifications can be done to make invalid setcc operations 2646 /* Certain simplifications can be done to make invalid setcc operations
2588 valid. Return the final comparison, or NULL if we can't work. */ 2647 valid. Return the final comparison, or NULL if we can't work. */
2589 2648
2590 bool 2649 bool
2591 alpha_emit_setcc (rtx operands[], enum machine_mode cmp_mode) 2650 alpha_emit_setcc (rtx operands[], machine_mode cmp_mode)
2592 { 2651 {
2593 enum rtx_code cmp_code; 2652 enum rtx_code cmp_code;
2594 enum rtx_code code = GET_CODE (operands[1]); 2653 enum rtx_code code = GET_CODE (operands[1]);
2595 rtx op0 = operands[2], op1 = operands[3]; 2654 rtx op0 = operands[2], op1 = operands[3];
2596 rtx tmp; 2655 rtx tmp;
2634 if (cmp_mode == DImode && op1 == const0_rtx) 2693 if (cmp_mode == DImode && op1 == const0_rtx)
2635 break; 2694 break;
2636 code = swap_condition (code); 2695 code = swap_condition (code);
2637 if (cmp_mode == DFmode) 2696 if (cmp_mode == DFmode)
2638 cmp_code = code, code = NE; 2697 cmp_code = code, code = NE;
2639 tmp = op0, op0 = op1, op1 = tmp; 2698 std::swap (op0, op1);
2640 break; 2699 break;
2641 2700
2642 default: 2701 default:
2643 gcc_unreachable (); 2702 gcc_unreachable ();
2644 } 2703 }
2653 2712
2654 /* Emit an initial compare instruction, if necessary. */ 2713 /* Emit an initial compare instruction, if necessary. */
2655 if (cmp_code != UNKNOWN) 2714 if (cmp_code != UNKNOWN)
2656 { 2715 {
2657 tmp = gen_reg_rtx (cmp_mode); 2716 tmp = gen_reg_rtx (cmp_mode);
2658 emit_insn (gen_rtx_SET (VOIDmode, tmp, 2717 emit_insn (gen_rtx_SET (tmp, gen_rtx_fmt_ee (cmp_code, cmp_mode,
2659 gen_rtx_fmt_ee (cmp_code, cmp_mode, op0, op1))); 2718 op0, op1)));
2660 2719
2661 op0 = cmp_mode != DImode ? gen_lowpart (DImode, tmp) : tmp; 2720 op0 = cmp_mode != DImode ? gen_lowpart (DImode, tmp) : tmp;
2662 op1 = const0_rtx; 2721 op1 = const0_rtx;
2663 } 2722 }
2664 2723
2665 /* Emit the setcc instruction. */ 2724 /* Emit the setcc instruction. */
2666 emit_insn (gen_rtx_SET (VOIDmode, operands[0], 2725 emit_insn (gen_rtx_SET (operands[0], gen_rtx_fmt_ee (code, DImode,
2667 gen_rtx_fmt_ee (code, DImode, op0, op1))); 2726 op0, op1)));
2668 return true; 2727 return true;
2669 } 2728 }
2670 2729
2671 2730
2672 /* Rewrite a comparison against zero CMP of the form 2731 /* Rewrite a comparison against zero CMP of the form
2675 If both of the operands that set cc0 are nonzero we must emit 2734 If both of the operands that set cc0 are nonzero we must emit
2676 an insn to perform the compare (it can't be done within 2735 an insn to perform the compare (it can't be done within
2677 the conditional move). */ 2736 the conditional move). */
2678 2737
2679 rtx 2738 rtx
2680 alpha_emit_conditional_move (rtx cmp, enum machine_mode mode) 2739 alpha_emit_conditional_move (rtx cmp, machine_mode mode)
2681 { 2740 {
2682 enum rtx_code code = GET_CODE (cmp); 2741 enum rtx_code code = GET_CODE (cmp);
2683 enum rtx_code cmov_code = NE; 2742 enum rtx_code cmov_code = NE;
2684 rtx op0 = XEXP (cmp, 0); 2743 rtx op0 = XEXP (cmp, 0);
2685 rtx op1 = XEXP (cmp, 1); 2744 rtx op1 = XEXP (cmp, 1);
2686 enum machine_mode cmp_mode 2745 machine_mode cmp_mode
2687 = (GET_MODE (op0) == VOIDmode ? DImode : GET_MODE (op0)); 2746 = (GET_MODE (op0) == VOIDmode ? DImode : GET_MODE (op0));
2688 enum machine_mode cmov_mode = VOIDmode; 2747 machine_mode cmov_mode = VOIDmode;
2689 int local_fast_math = flag_unsafe_math_optimizations; 2748 int local_fast_math = flag_unsafe_math_optimizations;
2690 rtx tem; 2749 rtx tem;
2691 2750
2692 if (cmp_mode == TFmode) 2751 if (cmp_mode == TFmode)
2693 { 2752 {
2711 use a normal cmov, or vice-versa. */ 2770 use a normal cmov, or vice-versa. */
2712 2771
2713 switch (code) 2772 switch (code)
2714 { 2773 {
2715 case EQ: case LE: case LT: case LEU: case LTU: 2774 case EQ: case LE: case LT: case LEU: case LTU:
2775 case UNORDERED:
2716 /* We have these compares. */ 2776 /* We have these compares. */
2717 cmp_code = code, code = NE; 2777 cmp_code = code, code = NE;
2718 break; 2778 break;
2719 2779
2720 case NE: 2780 case NE:
2721 /* This must be reversed. */ 2781 case ORDERED:
2722 cmp_code = EQ, code = EQ; 2782 /* These must be reversed. */
2783 cmp_code = reverse_condition (code), code = EQ;
2723 break; 2784 break;
2724 2785
2725 case GE: case GT: case GEU: case GTU: 2786 case GE: case GT: case GEU: case GTU:
2726 /* These normally need swapping, but for integer zero we have 2787 /* These normally need swapping, but for integer zero we have
2727 special patterns that recognize swapped operands. */ 2788 special patterns that recognize swapped operands. */
2729 cmp_code = code, code = NE; 2790 cmp_code = code, code = NE;
2730 else 2791 else
2731 { 2792 {
2732 cmp_code = swap_condition (code); 2793 cmp_code = swap_condition (code);
2733 code = NE; 2794 code = NE;
2734 tem = op0, op0 = op1, op1 = tem; 2795 std::swap (op0, op1);
2735 } 2796 }
2736 break; 2797 break;
2737 2798
2738 default: 2799 default:
2739 gcc_unreachable (); 2800 gcc_unreachable ();
2740 } 2801 }
2741 2802
2803 if (cmp_mode == DImode)
2804 {
2805 if (!reg_or_0_operand (op0, DImode))
2806 op0 = force_reg (DImode, op0);
2807 if (!reg_or_8bit_operand (op1, DImode))
2808 op1 = force_reg (DImode, op1);
2809 }
2810
2742 tem = gen_reg_rtx (cmp_mode); 2811 tem = gen_reg_rtx (cmp_mode);
2743 emit_insn (gen_rtx_SET (VOIDmode, tem, 2812 emit_insn (gen_rtx_SET (tem, gen_rtx_fmt_ee (cmp_code, cmp_mode,
2744 gen_rtx_fmt_ee (cmp_code, cmp_mode, 2813 op0, op1)));
2745 op0, op1))); 2814
2746 2815 cmp_mode = cmp_mode == DImode ? E_DFmode : E_DImode;
2747 cmp_mode = cmp_mode == DImode ? DFmode : DImode;
2748 op0 = gen_lowpart (cmp_mode, tem); 2816 op0 = gen_lowpart (cmp_mode, tem);
2749 op1 = CONST0_RTX (cmp_mode); 2817 op1 = CONST0_RTX (cmp_mode);
2818 cmp = gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
2750 local_fast_math = 1; 2819 local_fast_math = 1;
2820 }
2821
2822 if (cmp_mode == DImode)
2823 {
2824 if (!reg_or_0_operand (op0, DImode))
2825 op0 = force_reg (DImode, op0);
2826 if (!reg_or_8bit_operand (op1, DImode))
2827 op1 = force_reg (DImode, op1);
2751 } 2828 }
2752 2829
2753 /* We may be able to use a conditional move directly. 2830 /* We may be able to use a conditional move directly.
2754 This avoids emitting spurious compares. */ 2831 This avoids emitting spurious compares. */
2755 if (signed_comparison_operator (cmp, VOIDmode) 2832 if (signed_comparison_operator (cmp, VOIDmode)
2766 return NULL_RTX; 2843 return NULL_RTX;
2767 2844
2768 switch (code) 2845 switch (code)
2769 { 2846 {
2770 case EQ: case LE: case LT: case LEU: case LTU: 2847 case EQ: case LE: case LT: case LEU: case LTU:
2848 case UNORDERED:
2771 /* We have these compares: */ 2849 /* We have these compares: */
2772 break; 2850 break;
2773 2851
2774 case NE: 2852 case NE:
2775 /* This must be reversed. */ 2853 case ORDERED:
2854 /* These must be reversed. */
2776 code = reverse_condition (code); 2855 code = reverse_condition (code);
2777 cmov_code = EQ; 2856 cmov_code = EQ;
2778 break; 2857 break;
2779 2858
2780 case GE: case GT: case GEU: case GTU: 2859 case GE: case GT: case GEU: case GTU:
2781 /* These must be swapped. */ 2860 /* These normally need swapping, but for integer zero we have
2782 if (op1 != CONST0_RTX (cmp_mode)) 2861 special patterns that recognize swapped operands. */
2783 { 2862 if (cmp_mode == DImode && op1 == const0_rtx)
2784 code = swap_condition (code); 2863 break;
2785 tem = op0, op0 = op1, op1 = tem; 2864 code = swap_condition (code);
2786 } 2865 std::swap (op0, op1);
2787 break; 2866 break;
2788 2867
2789 default: 2868 default:
2790 gcc_unreachable (); 2869 gcc_unreachable ();
2791 } 2870 }
2817 int 2896 int
2818 alpha_split_conditional_move (enum rtx_code code, rtx dest, rtx cond, 2897 alpha_split_conditional_move (enum rtx_code code, rtx dest, rtx cond,
2819 rtx t_rtx, rtx f_rtx) 2898 rtx t_rtx, rtx f_rtx)
2820 { 2899 {
2821 HOST_WIDE_INT t, f, diff; 2900 HOST_WIDE_INT t, f, diff;
2822 enum machine_mode mode; 2901 machine_mode mode;
2823 rtx target, subtarget, tmp; 2902 rtx target, subtarget, tmp;
2824 2903
2825 mode = GET_MODE (dest); 2904 mode = GET_MODE (dest);
2826 t = INTVAL (t_rtx); 2905 t = INTVAL (t_rtx);
2827 f = INTVAL (f_rtx); 2906 f = INTVAL (f_rtx);
2829 2908
2830 if (((code == NE || code == EQ) && diff < 0) 2909 if (((code == NE || code == EQ) && diff < 0)
2831 || (code == GE || code == GT)) 2910 || (code == GE || code == GT))
2832 { 2911 {
2833 code = reverse_condition (code); 2912 code = reverse_condition (code);
2834 diff = t, t = f, f = diff; 2913 std::swap (t, f);
2835 diff = t - f; 2914 diff = -diff;
2836 } 2915 }
2837 2916
2838 subtarget = target = dest; 2917 subtarget = target = dest;
2839 if (mode != DImode) 2918 if (mode != DImode)
2840 { 2919 {
2853 viable over a longer latency cmove. On EV5, the E0 slot is a 2932 viable over a longer latency cmove. On EV5, the E0 slot is a
2854 scarce resource, and on EV4 shift has the same latency as a cmove. */ 2933 scarce resource, and on EV4 shift has the same latency as a cmove. */
2855 && (diff <= 8 || alpha_tune == PROCESSOR_EV6)) 2934 && (diff <= 8 || alpha_tune == PROCESSOR_EV6))
2856 { 2935 {
2857 tmp = gen_rtx_fmt_ee (code, DImode, cond, const0_rtx); 2936 tmp = gen_rtx_fmt_ee (code, DImode, cond, const0_rtx);
2858 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (subtarget), tmp)); 2937 emit_insn (gen_rtx_SET (copy_rtx (subtarget), tmp));
2859 2938
2860 tmp = gen_rtx_ASHIFT (DImode, copy_rtx (subtarget), 2939 tmp = gen_rtx_ASHIFT (DImode, copy_rtx (subtarget),
2861 GEN_INT (exact_log2 (t))); 2940 GEN_INT (exact_log2 (t)));
2862 emit_insn (gen_rtx_SET (VOIDmode, target, tmp)); 2941 emit_insn (gen_rtx_SET (target, tmp));
2863 } 2942 }
2864 else if (f == 0 && t == -1) 2943 else if (f == 0 && t == -1)
2865 { 2944 {
2866 tmp = gen_rtx_fmt_ee (code, DImode, cond, const0_rtx); 2945 tmp = gen_rtx_fmt_ee (code, DImode, cond, const0_rtx);
2867 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (subtarget), tmp)); 2946 emit_insn (gen_rtx_SET (copy_rtx (subtarget), tmp));
2868 2947
2869 emit_insn (gen_negdi2 (target, copy_rtx (subtarget))); 2948 emit_insn (gen_negdi2 (target, copy_rtx (subtarget)));
2870 } 2949 }
2871 else if (diff == 1 || diff == 4 || diff == 8) 2950 else if (diff == 1 || diff == 4 || diff == 8)
2872 { 2951 {
2873 rtx add_op; 2952 rtx add_op;
2874 2953
2875 tmp = gen_rtx_fmt_ee (code, DImode, cond, const0_rtx); 2954 tmp = gen_rtx_fmt_ee (code, DImode, cond, const0_rtx);
2876 emit_insn (gen_rtx_SET (VOIDmode, copy_rtx (subtarget), tmp)); 2955 emit_insn (gen_rtx_SET (copy_rtx (subtarget), tmp));
2877 2956
2878 if (diff == 1) 2957 if (diff == 1)
2879 emit_insn (gen_adddi3 (target, copy_rtx (subtarget), GEN_INT (f))); 2958 emit_insn (gen_adddi3 (target, copy_rtx (subtarget), GEN_INT (f)));
2880 else 2959 else
2881 { 2960 {
2883 if (sext_add_operand (add_op, mode)) 2962 if (sext_add_operand (add_op, mode))
2884 { 2963 {
2885 tmp = gen_rtx_MULT (DImode, copy_rtx (subtarget), 2964 tmp = gen_rtx_MULT (DImode, copy_rtx (subtarget),
2886 GEN_INT (diff)); 2965 GEN_INT (diff));
2887 tmp = gen_rtx_PLUS (DImode, tmp, add_op); 2966 tmp = gen_rtx_PLUS (DImode, tmp, add_op);
2888 emit_insn (gen_rtx_SET (VOIDmode, target, tmp)); 2967 emit_insn (gen_rtx_SET (target, tmp));
2889 } 2968 }
2890 else 2969 else
2891 return 0; 2970 return 0;
2892 } 2971 }
2893 } 2972 }
3015 3094
3016 static void 3095 static void
3017 alpha_emit_xfloating_libcall (rtx func, rtx target, rtx operands[], 3096 alpha_emit_xfloating_libcall (rtx func, rtx target, rtx operands[],
3018 int noperands, rtx equiv) 3097 int noperands, rtx equiv)
3019 { 3098 {
3020 rtx usage = NULL_RTX, tmp, reg; 3099 rtx usage = NULL_RTX, reg;
3021 int regno = 16, i; 3100 int regno = 16, i;
3022 3101
3023 start_sequence (); 3102 start_sequence ();
3024 3103
3025 for (i = 0; i < noperands; ++i) 3104 for (i = 0; i < noperands; ++i)
3026 { 3105 {
3027 switch (GET_MODE (operands[i])) 3106 switch (GET_MODE (operands[i]))
3028 { 3107 {
3029 case TFmode: 3108 case E_TFmode:
3030 reg = gen_rtx_REG (TFmode, regno); 3109 reg = gen_rtx_REG (TFmode, regno);
3031 regno += 2; 3110 regno += 2;
3032 break; 3111 break;
3033 3112
3034 case DFmode: 3113 case E_DFmode:
3035 reg = gen_rtx_REG (DFmode, regno + 32); 3114 reg = gen_rtx_REG (DFmode, regno + 32);
3036 regno += 1; 3115 regno += 1;
3037 break; 3116 break;
3038 3117
3039 case VOIDmode: 3118 case E_VOIDmode:
3040 gcc_assert (CONST_INT_P (operands[i])); 3119 gcc_assert (CONST_INT_P (operands[i]));
3041 /* FALLTHRU */ 3120 /* FALLTHRU */
3042 case DImode: 3121 case E_DImode:
3043 reg = gen_rtx_REG (DImode, regno); 3122 reg = gen_rtx_REG (DImode, regno);
3044 regno += 1; 3123 regno += 1;
3045 break; 3124 break;
3046 3125
3047 default: 3126 default:
3048 gcc_unreachable (); 3127 gcc_unreachable ();
3049 } 3128 }
3050 3129
3051 emit_move_insn (reg, operands[i]); 3130 emit_move_insn (reg, operands[i]);
3052 usage = alloc_EXPR_LIST (0, gen_rtx_USE (VOIDmode, reg), usage); 3131 use_reg (&usage, reg);
3053 } 3132 }
3054 3133
3055 switch (GET_MODE (target)) 3134 switch (GET_MODE (target))
3056 { 3135 {
3057 case TFmode: 3136 case E_TFmode:
3058 reg = gen_rtx_REG (TFmode, 16); 3137 reg = gen_rtx_REG (TFmode, 16);
3059 break; 3138 break;
3060 case DFmode: 3139 case E_DFmode:
3061 reg = gen_rtx_REG (DFmode, 32); 3140 reg = gen_rtx_REG (DFmode, 32);
3062 break; 3141 break;
3063 case DImode: 3142 case E_DImode:
3064 reg = gen_rtx_REG (DImode, 0); 3143 reg = gen_rtx_REG (DImode, 0);
3065 break; 3144 break;
3066 default: 3145 default:
3067 gcc_unreachable (); 3146 gcc_unreachable ();
3068 } 3147 }
3069 3148
3070 tmp = gen_rtx_MEM (QImode, func); 3149 rtx mem = gen_rtx_MEM (QImode, func);
3071 tmp = emit_call_insn (GEN_CALL_VALUE (reg, tmp, const0_rtx, 3150 rtx_insn *tmp = emit_call_insn (gen_call_value (reg, mem, const0_rtx,
3072 const0_rtx, const0_rtx)); 3151 const0_rtx, const0_rtx));
3073 CALL_INSN_FUNCTION_USAGE (tmp) = usage; 3152 CALL_INSN_FUNCTION_USAGE (tmp) = usage;
3074 RTL_CONST_CALL_P (tmp) = 1; 3153 RTL_CONST_CALL_P (tmp) = 1;
3075 3154
3076 tmp = get_insns (); 3155 tmp = get_insns ();
3077 end_sequence (); 3156 end_sequence ();
3143 3222
3144 operands[0] = op0; 3223 operands[0] = op0;
3145 operands[1] = op1; 3224 operands[1] = op1;
3146 out = gen_reg_rtx (DImode); 3225 out = gen_reg_rtx (DImode);
3147 3226
3148 /* What's actually returned is -1,0,1, not a proper boolean value, 3227 /* What's actually returned is -1,0,1, not a proper boolean value. */
3149 so use an EXPR_LIST as with a generic libcall instead of a 3228 note = gen_rtx_fmt_ee (cmp_code, VOIDmode, op0, op1);
3150 comparison type expression. */ 3229 note = gen_rtx_UNSPEC (DImode, gen_rtvec (1, note), UNSPEC_XFLT_COMPARE);
3151 note = gen_rtx_EXPR_LIST (VOIDmode, op1, NULL_RTX);
3152 note = gen_rtx_EXPR_LIST (VOIDmode, op0, note);
3153 note = gen_rtx_EXPR_LIST (VOIDmode, func, note);
3154 alpha_emit_xfloating_libcall (func, out, operands, 2, note); 3230 alpha_emit_xfloating_libcall (func, out, operands, 2, note);
3155 3231
3156 return out; 3232 return out;
3157 } 3233 }
3158 3234
3202 set (OP[1] OP[3]) 3278 set (OP[1] OP[3])
3203 is valid. Naturally, output operand ordering is little-endian. 3279 is valid. Naturally, output operand ordering is little-endian.
3204 This is used by *movtf_internal and *movti_internal. */ 3280 This is used by *movtf_internal and *movti_internal. */
3205 3281
3206 void 3282 void
3207 alpha_split_tmode_pair (rtx operands[4], enum machine_mode mode, 3283 alpha_split_tmode_pair (rtx operands[4], machine_mode mode,
3208 bool fixup_overlap) 3284 bool fixup_overlap)
3209 { 3285 {
3210 switch (GET_CODE (operands[1])) 3286 switch (GET_CODE (operands[1]))
3211 { 3287 {
3212 case REG: 3288 case REG:
3217 case MEM: 3293 case MEM:
3218 operands[3] = adjust_address (operands[1], DImode, 8); 3294 operands[3] = adjust_address (operands[1], DImode, 8);
3219 operands[2] = adjust_address (operands[1], DImode, 0); 3295 operands[2] = adjust_address (operands[1], DImode, 0);
3220 break; 3296 break;
3221 3297
3222 case CONST_INT: 3298 CASE_CONST_SCALAR_INT:
3223 case CONST_DOUBLE: 3299 case CONST_DOUBLE:
3224 gcc_assert (operands[1] == CONST0_RTX (mode)); 3300 gcc_assert (operands[1] == CONST0_RTX (mode));
3225 operands[2] = operands[3] = const0_rtx; 3301 operands[2] = operands[3] = const0_rtx;
3226 break; 3302 break;
3227 3303
3245 gcc_unreachable (); 3321 gcc_unreachable ();
3246 } 3322 }
3247 3323
3248 if (fixup_overlap && reg_overlap_mentioned_p (operands[0], operands[3])) 3324 if (fixup_overlap && reg_overlap_mentioned_p (operands[0], operands[3]))
3249 { 3325 {
3250 rtx tmp; 3326 std::swap (operands[0], operands[1]);
3251 tmp = operands[0], operands[0] = operands[1], operands[1] = tmp; 3327 std::swap (operands[2], operands[3]);
3252 tmp = operands[2], operands[2] = operands[3], operands[3] = tmp;
3253 } 3328 }
3254 } 3329 }
3255 3330
3256 /* Implement negtf2 or abstf2. Op0 is destination, op1 is source, 3331 /* Implement negtf2 or abstf2. Op0 is destination, op1 is source,
3257 op2 is a register containing the sign bit, operation is the 3332 op2 is a register containing the sign bit, operation is the
3327 void 3402 void
3328 alpha_expand_unaligned_load (rtx tgt, rtx mem, HOST_WIDE_INT size, 3403 alpha_expand_unaligned_load (rtx tgt, rtx mem, HOST_WIDE_INT size,
3329 HOST_WIDE_INT ofs, int sign) 3404 HOST_WIDE_INT ofs, int sign)
3330 { 3405 {
3331 rtx meml, memh, addr, extl, exth, tmp, mema; 3406 rtx meml, memh, addr, extl, exth, tmp, mema;
3332 enum machine_mode mode; 3407 machine_mode mode;
3333 3408
3334 if (TARGET_BWX && size == 2) 3409 if (TARGET_BWX && size == 2)
3335 { 3410 {
3336 meml = adjust_address (mem, QImode, ofs); 3411 meml = adjust_address (mem, QImode, ofs);
3337 memh = adjust_address (mem, QImode, ofs+1); 3412 memh = adjust_address (mem, QImode, ofs+1);
3338 if (BYTES_BIG_ENDIAN)
3339 tmp = meml, meml = memh, memh = tmp;
3340 extl = gen_reg_rtx (DImode); 3413 extl = gen_reg_rtx (DImode);
3341 exth = gen_reg_rtx (DImode); 3414 exth = gen_reg_rtx (DImode);
3342 emit_insn (gen_zero_extendqidi2 (extl, meml)); 3415 emit_insn (gen_zero_extendqidi2 (extl, meml));
3343 emit_insn (gen_zero_extendqidi2 (exth, memh)); 3416 emit_insn (gen_zero_extendqidi2 (exth, memh));
3344 exth = expand_simple_binop (DImode, ASHIFT, exth, GEN_INT (8), 3417 exth = expand_simple_binop (DImode, ASHIFT, exth, GEN_INT (8),
3374 alias surrounding code. Ideally we'd have some alias set that 3447 alias surrounding code. Ideally we'd have some alias set that
3375 covered all types except those with alignment 8 or higher. */ 3448 covered all types except those with alignment 8 or higher. */
3376 3449
3377 tmp = change_address (mem, DImode, 3450 tmp = change_address (mem, DImode,
3378 gen_rtx_AND (DImode, 3451 gen_rtx_AND (DImode,
3379 plus_constant (mema, ofs), 3452 plus_constant (DImode, mema, ofs),
3380 GEN_INT (-8))); 3453 GEN_INT (-8)));
3381 set_mem_alias_set (tmp, 0); 3454 set_mem_alias_set (tmp, 0);
3382 emit_move_insn (meml, tmp); 3455 emit_move_insn (meml, tmp);
3383 3456
3384 tmp = change_address (mem, DImode, 3457 tmp = change_address (mem, DImode,
3385 gen_rtx_AND (DImode, 3458 gen_rtx_AND (DImode,
3386 plus_constant (mema, ofs + size - 1), 3459 plus_constant (DImode, mema,
3460 ofs + size - 1),
3387 GEN_INT (-8))); 3461 GEN_INT (-8)));
3388 set_mem_alias_set (tmp, 0); 3462 set_mem_alias_set (tmp, 0);
3389 emit_move_insn (memh, tmp); 3463 emit_move_insn (memh, tmp);
3390 3464
3391 if (WORDS_BIG_ENDIAN && sign && (size == 2 || size == 4)) 3465 if (sign && size == 2)
3392 { 3466 {
3393 emit_move_insn (addr, plus_constant (mema, -1)); 3467 emit_move_insn (addr, plus_constant (Pmode, mema, ofs+2));
3394 3468
3395 emit_insn (gen_extqh_be (extl, meml, addr)); 3469 emit_insn (gen_extql (extl, meml, addr));
3396 emit_insn (gen_extxl_be (exth, memh, GEN_INT (64), addr)); 3470 emit_insn (gen_extqh (exth, memh, addr));
3397
3398 addr = expand_binop (DImode, ior_optab, extl, exth, tgt, 1, OPTAB_WIDEN);
3399 addr = expand_binop (DImode, ashr_optab, addr, GEN_INT (64 - size*8),
3400 addr, 1, OPTAB_WIDEN);
3401 }
3402 else if (sign && size == 2)
3403 {
3404 emit_move_insn (addr, plus_constant (mema, ofs+2));
3405
3406 emit_insn (gen_extxl_le (extl, meml, GEN_INT (64), addr));
3407 emit_insn (gen_extqh_le (exth, memh, addr));
3408 3471
3409 /* We must use tgt here for the target. Alpha-vms port fails if we use 3472 /* We must use tgt here for the target. Alpha-vms port fails if we use
3410 addr for the target, because addr is marked as a pointer and combine 3473 addr for the target, because addr is marked as a pointer and combine
3411 knows that pointers are always sign-extended 32-bit values. */ 3474 knows that pointers are always sign-extended 32-bit values. */
3412 addr = expand_binop (DImode, ior_optab, extl, exth, tgt, 1, OPTAB_WIDEN); 3475 addr = expand_binop (DImode, ior_optab, extl, exth, tgt, 1, OPTAB_WIDEN);
3413 addr = expand_binop (DImode, ashr_optab, addr, GEN_INT (48), 3476 addr = expand_binop (DImode, ashr_optab, addr, GEN_INT (48),
3414 addr, 1, OPTAB_WIDEN); 3477 addr, 1, OPTAB_WIDEN);
3415 } 3478 }
3416 else 3479 else
3417 { 3480 {
3418 if (WORDS_BIG_ENDIAN) 3481 emit_move_insn (addr, plus_constant (Pmode, mema, ofs));
3419 { 3482 emit_insn (gen_extxl (extl, meml, GEN_INT (size*8), addr));
3420 emit_move_insn (addr, plus_constant (mema, ofs+size-1)); 3483 switch ((int) size)
3421 switch ((int) size) 3484 {
3422 { 3485 case 2:
3423 case 2: 3486 emit_insn (gen_extwh (exth, memh, addr));
3424 emit_insn (gen_extwh_be (extl, meml, addr)); 3487 mode = HImode;
3425 mode = HImode; 3488 break;
3426 break; 3489 case 4:
3427 3490 emit_insn (gen_extlh (exth, memh, addr));
3428 case 4: 3491 mode = SImode;
3429 emit_insn (gen_extlh_be (extl, meml, addr)); 3492 break;
3430 mode = SImode; 3493 case 8:
3431 break; 3494 emit_insn (gen_extqh (exth, memh, addr));
3432 3495 mode = DImode;
3433 case 8: 3496 break;
3434 emit_insn (gen_extqh_be (extl, meml, addr)); 3497 default:
3435 mode = DImode; 3498 gcc_unreachable ();
3436 break;
3437
3438 default:
3439 gcc_unreachable ();
3440 }
3441 emit_insn (gen_extxl_be (exth, memh, GEN_INT (size*8), addr));
3442 }
3443 else
3444 {
3445 emit_move_insn (addr, plus_constant (mema, ofs));
3446 emit_insn (gen_extxl_le (extl, meml, GEN_INT (size*8), addr));
3447 switch ((int) size)
3448 {
3449 case 2:
3450 emit_insn (gen_extwh_le (exth, memh, addr));
3451 mode = HImode;
3452 break;
3453
3454 case 4:
3455 emit_insn (gen_extlh_le (exth, memh, addr));
3456 mode = SImode;
3457 break;
3458
3459 case 8:
3460 emit_insn (gen_extqh_le (exth, memh, addr));
3461 mode = DImode;
3462 break;
3463
3464 default:
3465 gcc_unreachable ();
3466 }
3467 } 3499 }
3468 3500
3469 addr = expand_binop (mode, ior_optab, gen_lowpart (mode, extl), 3501 addr = expand_binop (mode, ior_optab, gen_lowpart (mode, extl),
3470 gen_lowpart (mode, exth), gen_lowpart (mode, tgt), 3502 gen_lowpart (mode, exth), gen_lowpart (mode, tgt),
3471 sign, OPTAB_WIDEN); 3503 sign, OPTAB_WIDEN);
3495 else 3527 else
3496 dstl = dsth = const0_rtx; 3528 dstl = dsth = const0_rtx;
3497 3529
3498 meml = adjust_address (dst, QImode, ofs); 3530 meml = adjust_address (dst, QImode, ofs);
3499 memh = adjust_address (dst, QImode, ofs+1); 3531 memh = adjust_address (dst, QImode, ofs+1);
3500 if (BYTES_BIG_ENDIAN)
3501 addr = meml, meml = memh, memh = addr;
3502 3532
3503 emit_move_insn (meml, dstl); 3533 emit_move_insn (meml, dstl);
3504 emit_move_insn (memh, dsth); 3534 emit_move_insn (memh, dsth);
3505 return; 3535 return;
3506 } 3536 }
3518 alias surrounding code. Ideally we'd have some alias set that 3548 alias surrounding code. Ideally we'd have some alias set that
3519 covered all types except those with alignment 8 or higher. */ 3549 covered all types except those with alignment 8 or higher. */
3520 3550
3521 meml = change_address (dst, DImode, 3551 meml = change_address (dst, DImode,
3522 gen_rtx_AND (DImode, 3552 gen_rtx_AND (DImode,
3523 plus_constant (dsta, ofs), 3553 plus_constant (DImode, dsta, ofs),
3524 GEN_INT (-8))); 3554 GEN_INT (-8)));
3525 set_mem_alias_set (meml, 0); 3555 set_mem_alias_set (meml, 0);
3526 3556
3527 memh = change_address (dst, DImode, 3557 memh = change_address (dst, DImode,
3528 gen_rtx_AND (DImode, 3558 gen_rtx_AND (DImode,
3529 plus_constant (dsta, ofs + size - 1), 3559 plus_constant (DImode, dsta,
3560 ofs + size - 1),
3530 GEN_INT (-8))); 3561 GEN_INT (-8)));
3531 set_mem_alias_set (memh, 0); 3562 set_mem_alias_set (memh, 0);
3532 3563
3533 emit_move_insn (dsth, memh); 3564 emit_move_insn (dsth, memh);
3534 emit_move_insn (dstl, meml); 3565 emit_move_insn (dstl, meml);
3535 if (WORDS_BIG_ENDIAN) 3566
3536 { 3567 addr = copy_addr_to_reg (plus_constant (Pmode, dsta, ofs));
3537 addr = copy_addr_to_reg (plus_constant (dsta, ofs+size-1)); 3568
3538 3569 if (src != CONST0_RTX (GET_MODE (src)))
3539 if (src != const0_rtx) 3570 {
3540 { 3571 emit_insn (gen_insxh (insh, gen_lowpart (DImode, src),
3541 switch ((int) size) 3572 GEN_INT (size*8), addr));
3542 {
3543 case 2:
3544 emit_insn (gen_inswl_be (insh, gen_lowpart (HImode,src), addr));
3545 break;
3546 case 4:
3547 emit_insn (gen_insll_be (insh, gen_lowpart (SImode,src), addr));
3548 break;
3549 case 8:
3550 emit_insn (gen_insql_be (insh, gen_lowpart (DImode,src), addr));
3551 break;
3552 }
3553 emit_insn (gen_insxh (insl, gen_lowpart (DImode, src),
3554 GEN_INT (size*8), addr));
3555 }
3556 3573
3557 switch ((int) size) 3574 switch ((int) size)
3558 { 3575 {
3559 case 2: 3576 case 2:
3560 emit_insn (gen_mskxl_be (dsth, dsth, GEN_INT (0xffff), addr)); 3577 emit_insn (gen_inswl (insl, gen_lowpart (HImode, src), addr));
3561 break; 3578 break;
3562 case 4: 3579 case 4:
3563 { 3580 emit_insn (gen_insll (insl, gen_lowpart (SImode, src), addr));
3564 rtx msk = immed_double_const (0xffffffff, 0, DImode); 3581 break;
3565 emit_insn (gen_mskxl_be (dsth, dsth, msk, addr));
3566 break;
3567 }
3568 case 8: 3582 case 8:
3569 emit_insn (gen_mskxl_be (dsth, dsth, constm1_rtx, addr)); 3583 emit_insn (gen_insql (insl, gen_lowpart (DImode, src), addr));
3570 break; 3584 break;
3571 } 3585 default:
3572 3586 gcc_unreachable ();
3573 emit_insn (gen_mskxh (dstl, dstl, GEN_INT (size*8), addr)); 3587 }
3574 } 3588 }
3575 else 3589
3576 { 3590 emit_insn (gen_mskxh (dsth, dsth, GEN_INT (size*8), addr));
3577 addr = copy_addr_to_reg (plus_constant (dsta, ofs)); 3591
3578 3592 switch ((int) size)
3579 if (src != CONST0_RTX (GET_MODE (src))) 3593 {
3580 { 3594 case 2:
3581 emit_insn (gen_insxh (insh, gen_lowpart (DImode, src), 3595 emit_insn (gen_mskwl (dstl, dstl, addr));
3582 GEN_INT (size*8), addr)); 3596 break;
3583 3597 case 4:
3584 switch ((int) size) 3598 emit_insn (gen_mskll (dstl, dstl, addr));
3585 { 3599 break;
3586 case 2: 3600 case 8:
3587 emit_insn (gen_inswl_le (insl, gen_lowpart (HImode, src), addr)); 3601 emit_insn (gen_mskql (dstl, dstl, addr));
3588 break; 3602 break;
3589 case 4: 3603 default:
3590 emit_insn (gen_insll_le (insl, gen_lowpart (SImode, src), addr)); 3604 gcc_unreachable ();
3591 break;
3592 case 8:
3593 emit_insn (gen_insql_le (insl, gen_lowpart (DImode, src), addr));
3594 break;
3595 }
3596 }
3597
3598 emit_insn (gen_mskxh (dsth, dsth, GEN_INT (size*8), addr));
3599
3600 switch ((int) size)
3601 {
3602 case 2:
3603 emit_insn (gen_mskxl_le (dstl, dstl, GEN_INT (0xffff), addr));
3604 break;
3605 case 4:
3606 {
3607 rtx msk = immed_double_const (0xffffffff, 0, DImode);
3608 emit_insn (gen_mskxl_le (dstl, dstl, msk, addr));
3609 break;
3610 }
3611 case 8:
3612 emit_insn (gen_mskxl_le (dstl, dstl, constm1_rtx, addr));
3613 break;
3614 }
3615 } 3605 }
3616 3606
3617 if (src != CONST0_RTX (GET_MODE (src))) 3607 if (src != CONST0_RTX (GET_MODE (src)))
3618 { 3608 {
3619 dsth = expand_binop (DImode, ior_optab, insh, dsth, dsth, 0, OPTAB_WIDEN); 3609 dsth = expand_binop (DImode, ior_optab, insh, dsth, dsth, 0, OPTAB_WIDEN);
3620 dstl = expand_binop (DImode, ior_optab, insl, dstl, dstl, 0, OPTAB_WIDEN); 3610 dstl = expand_binop (DImode, ior_optab, insl, dstl, dstl, 0, OPTAB_WIDEN);
3621 } 3611 }
3622 3612
3623 if (WORDS_BIG_ENDIAN) 3613 /* Must store high before low for degenerate case of aligned. */
3624 { 3614 emit_move_insn (memh, dsth);
3625 emit_move_insn (meml, dstl); 3615 emit_move_insn (meml, dstl);
3626 emit_move_insn (memh, dsth);
3627 }
3628 else
3629 {
3630 /* Must store high before low for degenerate case of aligned. */
3631 emit_move_insn (memh, dsth);
3632 emit_move_insn (meml, dstl);
3633 }
3634 } 3616 }
3635 3617
3636 /* The block move code tries to maximize speed by separating loads and 3618 /* The block move code tries to maximize speed by separating loads and
3637 stores at the expense of register pressure: we load all of the data 3619 stores at the expense of register pressure: we load all of the data
3638 before we store it back out. There are two secondary effects worth 3620 before we store it back out. There are two secondary effects worth
3646 static void 3628 static void
3647 alpha_expand_unaligned_load_words (rtx *out_regs, rtx smem, 3629 alpha_expand_unaligned_load_words (rtx *out_regs, rtx smem,
3648 HOST_WIDE_INT words, HOST_WIDE_INT ofs) 3630 HOST_WIDE_INT words, HOST_WIDE_INT ofs)
3649 { 3631 {
3650 rtx const im8 = GEN_INT (-8); 3632 rtx const im8 = GEN_INT (-8);
3651 rtx const i64 = GEN_INT (64);
3652 rtx ext_tmps[MAX_MOVE_WORDS], data_regs[MAX_MOVE_WORDS+1]; 3633 rtx ext_tmps[MAX_MOVE_WORDS], data_regs[MAX_MOVE_WORDS+1];
3653 rtx sreg, areg, tmp, smema; 3634 rtx sreg, areg, tmp, smema;
3654 HOST_WIDE_INT i; 3635 HOST_WIDE_INT i;
3655 3636
3656 smema = XEXP (smem, 0); 3637 smema = XEXP (smem, 0);
3671 /* Load up all of the source data. */ 3652 /* Load up all of the source data. */
3672 for (i = 0; i < words; ++i) 3653 for (i = 0; i < words; ++i)
3673 { 3654 {
3674 tmp = change_address (smem, DImode, 3655 tmp = change_address (smem, DImode,
3675 gen_rtx_AND (DImode, 3656 gen_rtx_AND (DImode,
3676 plus_constant (smema, 8*i), 3657 plus_constant (DImode, smema, 8*i),
3677 im8)); 3658 im8));
3678 set_mem_alias_set (tmp, 0); 3659 set_mem_alias_set (tmp, 0);
3679 emit_move_insn (data_regs[i], tmp); 3660 emit_move_insn (data_regs[i], tmp);
3680 } 3661 }
3681 3662
3682 tmp = change_address (smem, DImode, 3663 tmp = change_address (smem, DImode,
3683 gen_rtx_AND (DImode, 3664 gen_rtx_AND (DImode,
3684 plus_constant (smema, 8*words - 1), 3665 plus_constant (DImode, smema,
3666 8*words - 1),
3685 im8)); 3667 im8));
3686 set_mem_alias_set (tmp, 0); 3668 set_mem_alias_set (tmp, 0);
3687 emit_move_insn (data_regs[words], tmp); 3669 emit_move_insn (data_regs[words], tmp);
3688 3670
3689 /* Extract the half-word fragments. Unfortunately DEC decided to make 3671 /* Extract the half-word fragments. Unfortunately DEC decided to make
3691 we must take care of that edge condition ourselves with cmov. */ 3673 we must take care of that edge condition ourselves with cmov. */
3692 3674
3693 sreg = copy_addr_to_reg (smema); 3675 sreg = copy_addr_to_reg (smema);
3694 areg = expand_binop (DImode, and_optab, sreg, GEN_INT (7), NULL, 3676 areg = expand_binop (DImode, and_optab, sreg, GEN_INT (7), NULL,
3695 1, OPTAB_WIDEN); 3677 1, OPTAB_WIDEN);
3696 if (WORDS_BIG_ENDIAN)
3697 emit_move_insn (sreg, plus_constant (sreg, 7));
3698 for (i = 0; i < words; ++i) 3678 for (i = 0; i < words; ++i)
3699 { 3679 {
3700 if (WORDS_BIG_ENDIAN) 3680 emit_insn (gen_extql (data_regs[i], data_regs[i], sreg));
3701 { 3681 emit_insn (gen_extqh (ext_tmps[i], data_regs[i+1], sreg));
3702 emit_insn (gen_extqh_be (data_regs[i], data_regs[i], sreg)); 3682 emit_insn (gen_rtx_SET (ext_tmps[i],
3703 emit_insn (gen_extxl_be (ext_tmps[i], data_regs[i+1], i64, sreg));
3704 }
3705 else
3706 {
3707 emit_insn (gen_extxl_le (data_regs[i], data_regs[i], i64, sreg));
3708 emit_insn (gen_extqh_le (ext_tmps[i], data_regs[i+1], sreg));
3709 }
3710 emit_insn (gen_rtx_SET (VOIDmode, ext_tmps[i],
3711 gen_rtx_IF_THEN_ELSE (DImode, 3683 gen_rtx_IF_THEN_ELSE (DImode,
3712 gen_rtx_EQ (DImode, areg, 3684 gen_rtx_EQ (DImode, areg,
3713 const0_rtx), 3685 const0_rtx),
3714 const0_rtx, ext_tmps[i]))); 3686 const0_rtx, ext_tmps[i])));
3715 } 3687 }
3728 static void 3700 static void
3729 alpha_expand_unaligned_store_words (rtx *data_regs, rtx dmem, 3701 alpha_expand_unaligned_store_words (rtx *data_regs, rtx dmem,
3730 HOST_WIDE_INT words, HOST_WIDE_INT ofs) 3702 HOST_WIDE_INT words, HOST_WIDE_INT ofs)
3731 { 3703 {
3732 rtx const im8 = GEN_INT (-8); 3704 rtx const im8 = GEN_INT (-8);
3733 rtx const i64 = GEN_INT (64);
3734 rtx ins_tmps[MAX_MOVE_WORDS]; 3705 rtx ins_tmps[MAX_MOVE_WORDS];
3735 rtx st_tmp_1, st_tmp_2, dreg; 3706 rtx st_tmp_1, st_tmp_2, dreg;
3736 rtx st_addr_1, st_addr_2, dmema; 3707 rtx st_addr_1, st_addr_2, dmema;
3737 HOST_WIDE_INT i; 3708 HOST_WIDE_INT i;
3738 3709
3750 if (ofs != 0) 3721 if (ofs != 0)
3751 dmem = adjust_address (dmem, GET_MODE (dmem), ofs); 3722 dmem = adjust_address (dmem, GET_MODE (dmem), ofs);
3752 3723
3753 st_addr_2 = change_address (dmem, DImode, 3724 st_addr_2 = change_address (dmem, DImode,
3754 gen_rtx_AND (DImode, 3725 gen_rtx_AND (DImode,
3755 plus_constant (dmema, words*8 - 1), 3726 plus_constant (DImode, dmema,
3756 im8)); 3727 words*8 - 1),
3728 im8));
3757 set_mem_alias_set (st_addr_2, 0); 3729 set_mem_alias_set (st_addr_2, 0);
3758 3730
3759 st_addr_1 = change_address (dmem, DImode, 3731 st_addr_1 = change_address (dmem, DImode,
3760 gen_rtx_AND (DImode, dmema, im8)); 3732 gen_rtx_AND (DImode, dmema, im8));
3761 set_mem_alias_set (st_addr_1, 0); 3733 set_mem_alias_set (st_addr_1, 0);
3764 emit_move_insn (st_tmp_2, st_addr_2); 3736 emit_move_insn (st_tmp_2, st_addr_2);
3765 emit_move_insn (st_tmp_1, st_addr_1); 3737 emit_move_insn (st_tmp_1, st_addr_1);
3766 3738
3767 /* Shift the input data into place. */ 3739 /* Shift the input data into place. */
3768 dreg = copy_addr_to_reg (dmema); 3740 dreg = copy_addr_to_reg (dmema);
3769 if (WORDS_BIG_ENDIAN)
3770 emit_move_insn (dreg, plus_constant (dreg, 7));
3771 if (data_regs != NULL) 3741 if (data_regs != NULL)
3772 { 3742 {
3773 for (i = words-1; i >= 0; --i) 3743 for (i = words-1; i >= 0; --i)
3774 { 3744 {
3775 if (WORDS_BIG_ENDIAN) 3745 emit_insn (gen_insqh (ins_tmps[i], data_regs[i], dreg));
3776 { 3746 emit_insn (gen_insql (data_regs[i], data_regs[i], dreg));
3777 emit_insn (gen_insql_be (ins_tmps[i], data_regs[i], dreg));
3778 emit_insn (gen_insxh (data_regs[i], data_regs[i], i64, dreg));
3779 }
3780 else
3781 {
3782 emit_insn (gen_insxh (ins_tmps[i], data_regs[i], i64, dreg));
3783 emit_insn (gen_insql_le (data_regs[i], data_regs[i], dreg));
3784 }
3785 } 3747 }
3786 for (i = words-1; i > 0; --i) 3748 for (i = words-1; i > 0; --i)
3787 { 3749 {
3788 ins_tmps[i-1] = expand_binop (DImode, ior_optab, data_regs[i], 3750 ins_tmps[i-1] = expand_binop (DImode, ior_optab, data_regs[i],
3789 ins_tmps[i-1], ins_tmps[i-1], 1, 3751 ins_tmps[i-1], ins_tmps[i-1], 1,
3790 OPTAB_WIDEN); 3752 OPTAB_WIDEN);
3791 } 3753 }
3792 } 3754 }
3793 3755
3794 /* Split and merge the ends with the destination data. */ 3756 /* Split and merge the ends with the destination data. */
3795 if (WORDS_BIG_ENDIAN) 3757 emit_insn (gen_mskqh (st_tmp_2, st_tmp_2, dreg));
3796 { 3758 emit_insn (gen_mskql (st_tmp_1, st_tmp_1, dreg));
3797 emit_insn (gen_mskxl_be (st_tmp_2, st_tmp_2, constm1_rtx, dreg));
3798 emit_insn (gen_mskxh (st_tmp_1, st_tmp_1, i64, dreg));
3799 }
3800 else
3801 {
3802 emit_insn (gen_mskxh (st_tmp_2, st_tmp_2, i64, dreg));
3803 emit_insn (gen_mskxl_le (st_tmp_1, st_tmp_1, constm1_rtx, dreg));
3804 }
3805 3759
3806 if (data_regs != NULL) 3760 if (data_regs != NULL)
3807 { 3761 {
3808 st_tmp_2 = expand_binop (DImode, ior_optab, st_tmp_2, ins_tmps[words-1], 3762 st_tmp_2 = expand_binop (DImode, ior_optab, st_tmp_2, ins_tmps[words-1],
3809 st_tmp_2, 1, OPTAB_WIDEN); 3763 st_tmp_2, 1, OPTAB_WIDEN);
3810 st_tmp_1 = expand_binop (DImode, ior_optab, st_tmp_1, data_regs[0], 3764 st_tmp_1 = expand_binop (DImode, ior_optab, st_tmp_1, data_regs[0],
3811 st_tmp_1, 1, OPTAB_WIDEN); 3765 st_tmp_1, 1, OPTAB_WIDEN);
3812 } 3766 }
3813 3767
3814 /* Store it all. */ 3768 /* Store it all. */
3815 if (WORDS_BIG_ENDIAN) 3769 emit_move_insn (st_addr_2, st_tmp_2);
3816 emit_move_insn (st_addr_1, st_tmp_1);
3817 else
3818 emit_move_insn (st_addr_2, st_tmp_2);
3819 for (i = words-1; i > 0; --i) 3770 for (i = words-1; i > 0; --i)
3820 { 3771 {
3821 rtx tmp = change_address (dmem, DImode, 3772 rtx tmp = change_address (dmem, DImode,
3822 gen_rtx_AND (DImode, 3773 gen_rtx_AND (DImode,
3823 plus_constant(dmema, 3774 plus_constant (DImode,
3824 WORDS_BIG_ENDIAN ? i*8-1 : i*8), 3775 dmema, i*8),
3825 im8)); 3776 im8));
3826 set_mem_alias_set (tmp, 0); 3777 set_mem_alias_set (tmp, 0);
3827 emit_move_insn (tmp, data_regs ? ins_tmps[i-1] : const0_rtx); 3778 emit_move_insn (tmp, data_regs ? ins_tmps[i-1] : const0_rtx);
3828 } 3779 }
3829 if (WORDS_BIG_ENDIAN) 3780 emit_move_insn (st_addr_1, st_tmp_1);
3830 emit_move_insn (st_addr_2, st_tmp_2);
3831 else
3832 emit_move_insn (st_addr_1, st_tmp_1);
3833 } 3781 }
3834 3782
3835 3783
3836 /* Expand string/block move operations. 3784 /* Expand string/block move operations.
3837 3785
4129 4077
4130 /* Handle an unaligned prefix first. */ 4078 /* Handle an unaligned prefix first. */
4131 4079
4132 if (alignofs > 0) 4080 if (alignofs > 0)
4133 { 4081 {
4134 #if HOST_BITS_PER_WIDE_INT >= 64
4135 /* Given that alignofs is bounded by align, the only time BWX could 4082 /* Given that alignofs is bounded by align, the only time BWX could
4136 generate three stores is for a 7 byte fill. Prefer two individual 4083 generate three stores is for a 7 byte fill. Prefer two individual
4137 stores over a load/mask/store sequence. */ 4084 stores over a load/mask/store sequence. */
4138 if ((!TARGET_BWX || alignofs == 7) 4085 if ((!TARGET_BWX || alignofs == 7)
4139 && align >= 32 4086 && align >= 32
4140 && !(alignofs == 4 && bytes >= 4)) 4087 && !(alignofs == 4 && bytes >= 4))
4141 { 4088 {
4142 enum machine_mode mode = (align >= 64 ? DImode : SImode); 4089 machine_mode mode = (align >= 64 ? DImode : SImode);
4143 int inv_alignofs = (align >= 64 ? 8 : 4) - alignofs; 4090 int inv_alignofs = (align >= 64 ? 8 : 4) - alignofs;
4144 rtx mem, tmp; 4091 rtx mem, tmp;
4145 HOST_WIDE_INT mask; 4092 HOST_WIDE_INT mask;
4146 4093
4147 mem = adjust_address (orig_dst, mode, ofs - inv_alignofs); 4094 mem = adjust_address (orig_dst, mode, ofs - inv_alignofs);
4148 set_mem_alias_set (mem, 0); 4095 set_mem_alias_set (mem, 0);
4149 4096
4150 mask = ~(~(HOST_WIDE_INT)0 << (inv_alignofs * 8)); 4097 mask = ~(HOST_WIDE_INT_M1U << (inv_alignofs * 8));
4151 if (bytes < alignofs) 4098 if (bytes < alignofs)
4152 { 4099 {
4153 mask |= ~(HOST_WIDE_INT)0 << ((inv_alignofs + bytes) * 8); 4100 mask |= HOST_WIDE_INT_M1U << ((inv_alignofs + bytes) * 8);
4154 ofs += bytes; 4101 ofs += bytes;
4155 bytes = 0; 4102 bytes = 0;
4156 } 4103 }
4157 else 4104 else
4158 { 4105 {
4164 tmp = expand_binop (mode, and_optab, mem, GEN_INT (mask), 4111 tmp = expand_binop (mode, and_optab, mem, GEN_INT (mask),
4165 NULL_RTX, 1, OPTAB_WIDEN); 4112 NULL_RTX, 1, OPTAB_WIDEN);
4166 4113
4167 emit_move_insn (mem, tmp); 4114 emit_move_insn (mem, tmp);
4168 } 4115 }
4169 #endif
4170 4116
4171 if (TARGET_BWX && (alignofs & 1) && bytes >= 1) 4117 if (TARGET_BWX && (alignofs & 1) && bytes >= 1)
4172 { 4118 {
4173 emit_move_insn (adjust_address (orig_dst, QImode, ofs), const0_rtx); 4119 emit_move_insn (adjust_address (orig_dst, QImode, ofs), const0_rtx);
4174 bytes -= 1; 4120 bytes -= 1;
4232 for (i = 0; i < words; ++i) 4178 for (i = 0; i < words; ++i)
4233 { 4179 {
4234 rtx mem 4180 rtx mem
4235 = change_address (orig_dst, DImode, 4181 = change_address (orig_dst, DImode,
4236 gen_rtx_AND (DImode, 4182 gen_rtx_AND (DImode,
4237 plus_constant (orig_dsta, ofs + i*8), 4183 plus_constant (DImode, orig_dsta,
4184 ofs + i*8),
4238 GEN_INT (-8))); 4185 GEN_INT (-8)));
4239 set_mem_alias_set (mem, 0); 4186 set_mem_alias_set (mem, 0);
4240 emit_move_insn (mem, const0_rtx); 4187 emit_move_insn (mem, const0_rtx);
4241 } 4188 }
4242 4189
4275 ofs += words * 8; 4222 ofs += words * 8;
4276 } 4223 }
4277 4224
4278 /* Next clean up any trailing pieces. */ 4225 /* Next clean up any trailing pieces. */
4279 4226
4280 #if HOST_BITS_PER_WIDE_INT >= 64
4281 /* Count the number of bits in BYTES for which aligned stores could 4227 /* Count the number of bits in BYTES for which aligned stores could
4282 be emitted. */ 4228 be emitted. */
4283 words = 0; 4229 words = 0;
4284 for (i = (TARGET_BWX ? 1 : 4); i * BITS_PER_UNIT <= align ; i <<= 1) 4230 for (i = (TARGET_BWX ? 1 : 4); i * BITS_PER_UNIT <= align ; i <<= 1)
4285 if (bytes & i) 4231 if (bytes & i)
4295 HOST_WIDE_INT mask; 4241 HOST_WIDE_INT mask;
4296 4242
4297 mem = adjust_address (orig_dst, DImode, ofs); 4243 mem = adjust_address (orig_dst, DImode, ofs);
4298 set_mem_alias_set (mem, 0); 4244 set_mem_alias_set (mem, 0);
4299 4245
4300 mask = ~(HOST_WIDE_INT)0 << (bytes * 8); 4246 mask = HOST_WIDE_INT_M1U << (bytes * 8);
4301 4247
4302 tmp = expand_binop (DImode, and_optab, mem, GEN_INT (mask), 4248 tmp = expand_binop (DImode, and_optab, mem, GEN_INT (mask),
4303 NULL_RTX, 1, OPTAB_WIDEN); 4249 NULL_RTX, 1, OPTAB_WIDEN);
4304 4250
4305 emit_move_insn (mem, tmp); 4251 emit_move_insn (mem, tmp);
4311 HOST_WIDE_INT mask; 4257 HOST_WIDE_INT mask;
4312 4258
4313 mem = adjust_address (orig_dst, SImode, ofs); 4259 mem = adjust_address (orig_dst, SImode, ofs);
4314 set_mem_alias_set (mem, 0); 4260 set_mem_alias_set (mem, 0);
4315 4261
4316 mask = ~(HOST_WIDE_INT)0 << (bytes * 8); 4262 mask = HOST_WIDE_INT_M1U << (bytes * 8);
4317 4263
4318 tmp = expand_binop (SImode, and_optab, mem, GEN_INT (mask), 4264 tmp = expand_binop (SImode, and_optab, mem, GEN_INT (mask),
4319 NULL_RTX, 1, OPTAB_WIDEN); 4265 NULL_RTX, 1, OPTAB_WIDEN);
4320 4266
4321 emit_move_insn (mem, tmp); 4267 emit_move_insn (mem, tmp);
4322 return 1; 4268 return 1;
4323 } 4269 }
4324 } 4270 }
4325 #endif
4326 4271
4327 if (!TARGET_BWX && bytes >= 4) 4272 if (!TARGET_BWX && bytes >= 4)
4328 { 4273 {
4329 alpha_expand_unaligned_store (orig_dst, const0_rtx, 4, ofs); 4274 alpha_expand_unaligned_store (orig_dst, const0_rtx, 4, ofs);
4330 bytes -= 4; 4275 bytes -= 4;
4365 rtx 4310 rtx
4366 alpha_expand_zap_mask (HOST_WIDE_INT value) 4311 alpha_expand_zap_mask (HOST_WIDE_INT value)
4367 { 4312 {
4368 rtx result; 4313 rtx result;
4369 int i; 4314 int i;
4370 4315 HOST_WIDE_INT mask = 0;
4371 if (HOST_BITS_PER_WIDE_INT >= 64) 4316
4372 { 4317 for (i = 7; i >= 0; --i)
4373 HOST_WIDE_INT mask = 0; 4318 {
4374 4319 mask <<= 8;
4375 for (i = 7; i >= 0; --i) 4320 if (!((value >> i) & 1))
4376 { 4321 mask |= 0xff;
4377 mask <<= 8; 4322 }
4378 if (!((value >> i) & 1)) 4323
4379 mask |= 0xff; 4324 result = gen_int_mode (mask, DImode);
4380 }
4381
4382 result = gen_int_mode (mask, DImode);
4383 }
4384 else
4385 {
4386 HOST_WIDE_INT mask_lo = 0, mask_hi = 0;
4387
4388 gcc_assert (HOST_BITS_PER_WIDE_INT == 32);
4389
4390 for (i = 7; i >= 4; --i)
4391 {
4392 mask_hi <<= 8;
4393 if (!((value >> i) & 1))
4394 mask_hi |= 0xff;
4395 }
4396
4397 for (i = 3; i >= 0; --i)
4398 {
4399 mask_lo <<= 8;
4400 if (!((value >> i) & 1))
4401 mask_lo |= 0xff;
4402 }
4403
4404 result = immed_double_const (mask_lo, mask_hi, DImode);
4405 }
4406
4407 return result; 4325 return result;
4408 } 4326 }
4409 4327
4410 void 4328 void
4411 alpha_expand_builtin_vector_binop (rtx (*gen) (rtx, rtx, rtx), 4329 alpha_expand_builtin_vector_binop (rtx (*gen) (rtx, rtx, rtx),
4412 enum machine_mode mode, 4330 machine_mode mode,
4413 rtx op0, rtx op1, rtx op2) 4331 rtx op0, rtx op1, rtx op2)
4414 { 4332 {
4415 op0 = gen_lowpart (mode, op0); 4333 op0 = gen_lowpart (mode, op0);
4416 4334
4417 if (op1 == const0_rtx) 4335 if (op1 == const0_rtx)
4431 COND is true. Mark the jump as unlikely to be taken. */ 4349 COND is true. Mark the jump as unlikely to be taken. */
4432 4350
4433 static void 4351 static void
4434 emit_unlikely_jump (rtx cond, rtx label) 4352 emit_unlikely_jump (rtx cond, rtx label)
4435 { 4353 {
4436 rtx very_unlikely = GEN_INT (REG_BR_PROB_BASE / 100 - 1); 4354 rtx x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
4437 rtx x; 4355 rtx_insn *insn = emit_jump_insn (gen_rtx_SET (pc_rtx, x));
4438 4356 add_reg_br_prob_note (insn, profile_probability::very_unlikely ());
4439 x = gen_rtx_IF_THEN_ELSE (VOIDmode, cond, label, pc_rtx);
4440 x = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, x));
4441 add_reg_note (x, REG_BR_PROB, very_unlikely);
4442 } 4357 }
4443 4358
4444 /* A subroutine of the atomic operation splitters. Emit a load-locked 4359 /* A subroutine of the atomic operation splitters. Emit a load-locked
4445 instruction in MODE. */ 4360 instruction in MODE. */
4446 4361
4447 static void 4362 static void
4448 emit_load_locked (enum machine_mode mode, rtx reg, rtx mem) 4363 emit_load_locked (machine_mode mode, rtx reg, rtx mem)
4449 { 4364 {
4450 rtx (*fn) (rtx, rtx) = NULL; 4365 rtx (*fn) (rtx, rtx) = NULL;
4451 if (mode == SImode) 4366 if (mode == SImode)
4452 fn = gen_load_locked_si; 4367 fn = gen_load_locked_si;
4453 else if (mode == DImode) 4368 else if (mode == DImode)
4457 4372
4458 /* A subroutine of the atomic operation splitters. Emit a store-conditional 4373 /* A subroutine of the atomic operation splitters. Emit a store-conditional
4459 instruction in MODE. */ 4374 instruction in MODE. */
4460 4375
4461 static void 4376 static void
4462 emit_store_conditional (enum machine_mode mode, rtx res, rtx mem, rtx val) 4377 emit_store_conditional (machine_mode mode, rtx res, rtx mem, rtx val)
4463 { 4378 {
4464 rtx (*fn) (rtx, rtx, rtx) = NULL; 4379 rtx (*fn) (rtx, rtx, rtx) = NULL;
4465 if (mode == SImode) 4380 if (mode == SImode)
4466 fn = gen_store_conditional_si; 4381 fn = gen_store_conditional_si;
4467 else if (mode == DImode) 4382 else if (mode == DImode)
4468 fn = gen_store_conditional_di; 4383 fn = gen_store_conditional_di;
4469 emit_insn (fn (res, mem, val)); 4384 emit_insn (fn (res, mem, val));
4470 } 4385 }
4471 4386
4387 /* Subroutines of the atomic operation splitters. Emit barriers
4388 as needed for the memory MODEL. */
4389
4390 static void
4391 alpha_pre_atomic_barrier (enum memmodel model)
4392 {
4393 if (need_atomic_barrier_p (model, true))
4394 emit_insn (gen_memory_barrier ());
4395 }
4396
4397 static void
4398 alpha_post_atomic_barrier (enum memmodel model)
4399 {
4400 if (need_atomic_barrier_p (model, false))
4401 emit_insn (gen_memory_barrier ());
4402 }
4403
4472 /* A subroutine of the atomic operation splitters. Emit an insxl 4404 /* A subroutine of the atomic operation splitters. Emit an insxl
4473 instruction in MODE. */ 4405 instruction in MODE. */
4474 4406
4475 static rtx 4407 static rtx
4476 emit_insxl (enum machine_mode mode, rtx op1, rtx op2) 4408 emit_insxl (machine_mode mode, rtx op1, rtx op2)
4477 { 4409 {
4478 rtx ret = gen_reg_rtx (DImode); 4410 rtx ret = gen_reg_rtx (DImode);
4479 rtx (*fn) (rtx, rtx, rtx); 4411 rtx (*fn) (rtx, rtx, rtx);
4480 4412
4481 if (WORDS_BIG_ENDIAN) 4413 switch (mode)
4482 { 4414 {
4483 if (mode == QImode) 4415 case E_QImode:
4484 fn = gen_insbl_be; 4416 fn = gen_insbl;
4485 else 4417 break;
4486 fn = gen_inswl_be; 4418 case E_HImode:
4487 } 4419 fn = gen_inswl;
4488 else 4420 break;
4489 { 4421 case E_SImode:
4490 if (mode == QImode) 4422 fn = gen_insll;
4491 fn = gen_insbl_le; 4423 break;
4492 else 4424 case E_DImode:
4493 fn = gen_inswl_le; 4425 fn = gen_insql;
4494 } 4426 break;
4495 /* The insbl and inswl patterns require a register operand. */ 4427 default:
4428 gcc_unreachable ();
4429 }
4430
4496 op1 = force_reg (mode, op1); 4431 op1 = force_reg (mode, op1);
4497 emit_insn (fn (ret, op1, op2)); 4432 emit_insn (fn (ret, op1, op2));
4498 4433
4499 return ret; 4434 return ret;
4500 } 4435 }
4504 operand of the binary operator. BEFORE and AFTER are optional locations to 4439 operand of the binary operator. BEFORE and AFTER are optional locations to
4505 return the value of MEM either before of after the operation. SCRATCH is 4440 return the value of MEM either before of after the operation. SCRATCH is
4506 a scratch register. */ 4441 a scratch register. */
4507 4442
4508 void 4443 void
4509 alpha_split_atomic_op (enum rtx_code code, rtx mem, rtx val, 4444 alpha_split_atomic_op (enum rtx_code code, rtx mem, rtx val, rtx before,
4510 rtx before, rtx after, rtx scratch) 4445 rtx after, rtx scratch, enum memmodel model)
4511 { 4446 {
4512 enum machine_mode mode = GET_MODE (mem); 4447 machine_mode mode = GET_MODE (mem);
4513 rtx label, x, cond = gen_rtx_REG (DImode, REGNO (scratch)); 4448 rtx label, x, cond = gen_rtx_REG (DImode, REGNO (scratch));
4514 4449
4515 emit_insn (gen_memory_barrier ()); 4450 alpha_pre_atomic_barrier (model);
4516 4451
4517 label = gen_label_rtx (); 4452 label = gen_label_rtx ();
4518 emit_label (label); 4453 emit_label (label);
4519 label = gen_rtx_LABEL_REF (DImode, label); 4454 label = gen_rtx_LABEL_REF (DImode, label);
4520 4455
4523 emit_load_locked (mode, before, mem); 4458 emit_load_locked (mode, before, mem);
4524 4459
4525 if (code == NOT) 4460 if (code == NOT)
4526 { 4461 {
4527 x = gen_rtx_AND (mode, before, val); 4462 x = gen_rtx_AND (mode, before, val);
4528 emit_insn (gen_rtx_SET (VOIDmode, val, x)); 4463 emit_insn (gen_rtx_SET (val, x));
4529 4464
4530 x = gen_rtx_NOT (mode, val); 4465 x = gen_rtx_NOT (mode, val);
4531 } 4466 }
4532 else 4467 else
4533 x = gen_rtx_fmt_ee (code, mode, before, val); 4468 x = gen_rtx_fmt_ee (code, mode, before, val);
4534 if (after) 4469 if (after)
4535 emit_insn (gen_rtx_SET (VOIDmode, after, copy_rtx (x))); 4470 emit_insn (gen_rtx_SET (after, copy_rtx (x)));
4536 emit_insn (gen_rtx_SET (VOIDmode, scratch, x)); 4471 emit_insn (gen_rtx_SET (scratch, x));
4537 4472
4538 emit_store_conditional (mode, cond, mem, scratch); 4473 emit_store_conditional (mode, cond, mem, scratch);
4539 4474
4540 x = gen_rtx_EQ (DImode, cond, const0_rtx); 4475 x = gen_rtx_EQ (DImode, cond, const0_rtx);
4541 emit_unlikely_jump (x, label); 4476 emit_unlikely_jump (x, label);
4542 4477
4543 emit_insn (gen_memory_barrier ()); 4478 alpha_post_atomic_barrier (model);
4544 } 4479 }
4545 4480
4546 /* Expand a compare and swap operation. */ 4481 /* Expand a compare and swap operation. */
4547 4482
4548 void 4483 void
4549 alpha_split_compare_and_swap (rtx retval, rtx mem, rtx oldval, rtx newval, 4484 alpha_split_compare_and_swap (rtx operands[])
4550 rtx scratch) 4485 {
4551 { 4486 rtx cond, retval, mem, oldval, newval;
4552 enum machine_mode mode = GET_MODE (mem); 4487 bool is_weak;
4553 rtx label1, label2, x, cond = gen_lowpart (DImode, scratch); 4488 enum memmodel mod_s, mod_f;
4554 4489 machine_mode mode;
4555 emit_insn (gen_memory_barrier ()); 4490 rtx label1, label2, x;
4556 4491
4557 label1 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ()); 4492 cond = operands[0];
4493 retval = operands[1];
4494 mem = operands[2];
4495 oldval = operands[3];
4496 newval = operands[4];
4497 is_weak = (operands[5] != const0_rtx);
4498 mod_s = memmodel_from_int (INTVAL (operands[6]));
4499 mod_f = memmodel_from_int (INTVAL (operands[7]));
4500 mode = GET_MODE (mem);
4501
4502 alpha_pre_atomic_barrier (mod_s);
4503
4504 label1 = NULL_RTX;
4505 if (!is_weak)
4506 {
4507 label1 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
4508 emit_label (XEXP (label1, 0));
4509 }
4558 label2 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ()); 4510 label2 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
4559 emit_label (XEXP (label1, 0));
4560 4511
4561 emit_load_locked (mode, retval, mem); 4512 emit_load_locked (mode, retval, mem);
4562 4513
4563 x = gen_lowpart (DImode, retval); 4514 x = gen_lowpart (DImode, retval);
4564 if (oldval == const0_rtx) 4515 if (oldval == const0_rtx)
4565 x = gen_rtx_NE (DImode, x, const0_rtx); 4516 {
4517 emit_move_insn (cond, const0_rtx);
4518 x = gen_rtx_NE (DImode, x, const0_rtx);
4519 }
4566 else 4520 else
4567 { 4521 {
4568 x = gen_rtx_EQ (DImode, x, oldval); 4522 x = gen_rtx_EQ (DImode, x, oldval);
4569 emit_insn (gen_rtx_SET (VOIDmode, cond, x)); 4523 emit_insn (gen_rtx_SET (cond, x));
4570 x = gen_rtx_EQ (DImode, cond, const0_rtx); 4524 x = gen_rtx_EQ (DImode, cond, const0_rtx);
4571 } 4525 }
4572 emit_unlikely_jump (x, label2); 4526 emit_unlikely_jump (x, label2);
4573 4527
4574 emit_move_insn (scratch, newval); 4528 emit_move_insn (cond, newval);
4575 emit_store_conditional (mode, cond, mem, scratch); 4529 emit_store_conditional (mode, cond, mem, gen_lowpart (mode, cond));
4576 4530
4577 x = gen_rtx_EQ (DImode, cond, const0_rtx); 4531 if (!is_weak)
4578 emit_unlikely_jump (x, label1); 4532 {
4579 4533 x = gen_rtx_EQ (DImode, cond, const0_rtx);
4580 emit_insn (gen_memory_barrier ()); 4534 emit_unlikely_jump (x, label1);
4581 emit_label (XEXP (label2, 0)); 4535 }
4536
4537 if (!is_mm_relaxed (mod_f))
4538 emit_label (XEXP (label2, 0));
4539
4540 alpha_post_atomic_barrier (mod_s);
4541
4542 if (is_mm_relaxed (mod_f))
4543 emit_label (XEXP (label2, 0));
4582 } 4544 }
4583 4545
4584 void 4546 void
4585 alpha_expand_compare_and_swap_12 (rtx dst, rtx mem, rtx oldval, rtx newval) 4547 alpha_expand_compare_and_swap_12 (rtx operands[])
4586 { 4548 {
4587 enum machine_mode mode = GET_MODE (mem); 4549 rtx cond, dst, mem, oldval, newval, is_weak, mod_s, mod_f;
4550 machine_mode mode;
4588 rtx addr, align, wdst; 4551 rtx addr, align, wdst;
4589 rtx (*fn5) (rtx, rtx, rtx, rtx, rtx); 4552 rtx (*gen) (rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx, rtx);
4590 4553
4591 addr = force_reg (DImode, XEXP (mem, 0)); 4554 cond = operands[0];
4555 dst = operands[1];
4556 mem = operands[2];
4557 oldval = operands[3];
4558 newval = operands[4];
4559 is_weak = operands[5];
4560 mod_s = operands[6];
4561 mod_f = operands[7];
4562 mode = GET_MODE (mem);
4563
4564 /* We forced the address into a register via mem_noofs_operand. */
4565 addr = XEXP (mem, 0);
4566 gcc_assert (register_operand (addr, DImode));
4567
4592 align = expand_simple_binop (Pmode, AND, addr, GEN_INT (-8), 4568 align = expand_simple_binop (Pmode, AND, addr, GEN_INT (-8),
4593 NULL_RTX, 1, OPTAB_DIRECT); 4569 NULL_RTX, 1, OPTAB_DIRECT);
4594 4570
4595 oldval = convert_modes (DImode, mode, oldval, 1); 4571 oldval = convert_modes (DImode, mode, oldval, 1);
4596 newval = emit_insxl (mode, newval, addr); 4572
4573 if (newval != const0_rtx)
4574 newval = emit_insxl (mode, newval, addr);
4597 4575
4598 wdst = gen_reg_rtx (DImode); 4576 wdst = gen_reg_rtx (DImode);
4599 if (mode == QImode) 4577 if (mode == QImode)
4600 fn5 = gen_sync_compare_and_swapqi_1; 4578 gen = gen_atomic_compare_and_swapqi_1;
4601 else 4579 else
4602 fn5 = gen_sync_compare_and_swaphi_1; 4580 gen = gen_atomic_compare_and_swaphi_1;
4603 emit_insn (fn5 (wdst, addr, oldval, newval, align)); 4581 emit_insn (gen (cond, wdst, mem, oldval, newval, align,
4582 is_weak, mod_s, mod_f));
4604 4583
4605 emit_move_insn (dst, gen_lowpart (mode, wdst)); 4584 emit_move_insn (dst, gen_lowpart (mode, wdst));
4606 } 4585 }
4607 4586
4608 void 4587 void
4609 alpha_split_compare_and_swap_12 (enum machine_mode mode, rtx dest, rtx addr, 4588 alpha_split_compare_and_swap_12 (rtx operands[])
4610 rtx oldval, rtx newval, rtx align, 4589 {
4611 rtx scratch, rtx cond) 4590 rtx cond, dest, orig_mem, oldval, newval, align, scratch;
4612 { 4591 machine_mode mode;
4613 rtx label1, label2, mem, width, mask, x; 4592 bool is_weak;
4593 enum memmodel mod_s, mod_f;
4594 rtx label1, label2, mem, addr, width, mask, x;
4595
4596 cond = operands[0];
4597 dest = operands[1];
4598 orig_mem = operands[2];
4599 oldval = operands[3];
4600 newval = operands[4];
4601 align = operands[5];
4602 is_weak = (operands[6] != const0_rtx);
4603 mod_s = memmodel_from_int (INTVAL (operands[7]));
4604 mod_f = memmodel_from_int (INTVAL (operands[8]));
4605 scratch = operands[9];
4606 mode = GET_MODE (orig_mem);
4607 addr = XEXP (orig_mem, 0);
4614 4608
4615 mem = gen_rtx_MEM (DImode, align); 4609 mem = gen_rtx_MEM (DImode, align);
4616 MEM_VOLATILE_P (mem) = 1; 4610 MEM_VOLATILE_P (mem) = MEM_VOLATILE_P (orig_mem);
4617 4611 if (MEM_ALIAS_SET (orig_mem) == ALIAS_SET_MEMORY_BARRIER)
4618 emit_insn (gen_memory_barrier ()); 4612 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
4619 label1 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ()); 4613
4614 alpha_pre_atomic_barrier (mod_s);
4615
4616 label1 = NULL_RTX;
4617 if (!is_weak)
4618 {
4619 label1 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
4620 emit_label (XEXP (label1, 0));
4621 }
4620 label2 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ()); 4622 label2 = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
4621 emit_label (XEXP (label1, 0));
4622 4623
4623 emit_load_locked (DImode, scratch, mem); 4624 emit_load_locked (DImode, scratch, mem);
4624 4625
4625 width = GEN_INT (GET_MODE_BITSIZE (mode)); 4626 width = GEN_INT (GET_MODE_BITSIZE (mode));
4626 mask = GEN_INT (mode == QImode ? 0xff : 0xffff); 4627 mask = GEN_INT (mode == QImode ? 0xff : 0xffff);
4627 if (WORDS_BIG_ENDIAN) 4628 emit_insn (gen_extxl (dest, scratch, width, addr));
4628 emit_insn (gen_extxl_be (dest, scratch, width, addr)); 4629
4630 if (oldval == const0_rtx)
4631 {
4632 emit_move_insn (cond, const0_rtx);
4633 x = gen_rtx_NE (DImode, dest, const0_rtx);
4634 }
4629 else 4635 else
4630 emit_insn (gen_extxl_le (dest, scratch, width, addr));
4631
4632 if (oldval == const0_rtx)
4633 x = gen_rtx_NE (DImode, dest, const0_rtx);
4634 else
4635 { 4636 {
4636 x = gen_rtx_EQ (DImode, dest, oldval); 4637 x = gen_rtx_EQ (DImode, dest, oldval);
4637 emit_insn (gen_rtx_SET (VOIDmode, cond, x)); 4638 emit_insn (gen_rtx_SET (cond, x));
4638 x = gen_rtx_EQ (DImode, cond, const0_rtx); 4639 x = gen_rtx_EQ (DImode, cond, const0_rtx);
4639 } 4640 }
4640 emit_unlikely_jump (x, label2); 4641 emit_unlikely_jump (x, label2);
4641 4642
4642 if (WORDS_BIG_ENDIAN) 4643 emit_insn (gen_mskxl (cond, scratch, mask, addr));
4643 emit_insn (gen_mskxl_be (scratch, scratch, mask, addr)); 4644
4644 else 4645 if (newval != const0_rtx)
4645 emit_insn (gen_mskxl_le (scratch, scratch, mask, addr)); 4646 emit_insn (gen_iordi3 (cond, cond, newval));
4646 emit_insn (gen_iordi3 (scratch, scratch, newval)); 4647
4647 4648 emit_store_conditional (DImode, cond, mem, cond);
4648 emit_store_conditional (DImode, scratch, mem, scratch); 4649
4649 4650 if (!is_weak)
4650 x = gen_rtx_EQ (DImode, scratch, const0_rtx); 4651 {
4651 emit_unlikely_jump (x, label1); 4652 x = gen_rtx_EQ (DImode, cond, const0_rtx);
4652 4653 emit_unlikely_jump (x, label1);
4653 emit_insn (gen_memory_barrier ()); 4654 }
4654 emit_label (XEXP (label2, 0)); 4655
4656 if (!is_mm_relaxed (mod_f))
4657 emit_label (XEXP (label2, 0));
4658
4659 alpha_post_atomic_barrier (mod_s);
4660
4661 if (is_mm_relaxed (mod_f))
4662 emit_label (XEXP (label2, 0));
4655 } 4663 }
4656 4664
4657 /* Expand an atomic exchange operation. */ 4665 /* Expand an atomic exchange operation. */
4658 4666
4659 void 4667 void
4660 alpha_split_lock_test_and_set (rtx retval, rtx mem, rtx val, rtx scratch) 4668 alpha_split_atomic_exchange (rtx operands[])
4661 { 4669 {
4662 enum machine_mode mode = GET_MODE (mem); 4670 rtx retval, mem, val, scratch;
4663 rtx label, x, cond = gen_lowpart (DImode, scratch); 4671 enum memmodel model;
4672 machine_mode mode;
4673 rtx label, x, cond;
4674
4675 retval = operands[0];
4676 mem = operands[1];
4677 val = operands[2];
4678 model = (enum memmodel) INTVAL (operands[3]);
4679 scratch = operands[4];
4680 mode = GET_MODE (mem);
4681 cond = gen_lowpart (DImode, scratch);
4682
4683 alpha_pre_atomic_barrier (model);
4664 4684
4665 label = gen_rtx_LABEL_REF (DImode, gen_label_rtx ()); 4685 label = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
4666 emit_label (XEXP (label, 0)); 4686 emit_label (XEXP (label, 0));
4667 4687
4668 emit_load_locked (mode, retval, mem); 4688 emit_load_locked (mode, retval, mem);
4670 emit_store_conditional (mode, cond, mem, scratch); 4690 emit_store_conditional (mode, cond, mem, scratch);
4671 4691
4672 x = gen_rtx_EQ (DImode, cond, const0_rtx); 4692 x = gen_rtx_EQ (DImode, cond, const0_rtx);
4673 emit_unlikely_jump (x, label); 4693 emit_unlikely_jump (x, label);
4674 4694
4675 emit_insn (gen_memory_barrier ()); 4695 alpha_post_atomic_barrier (model);
4676 } 4696 }
4677 4697
4678 void 4698 void
4679 alpha_expand_lock_test_and_set_12 (rtx dst, rtx mem, rtx val) 4699 alpha_expand_atomic_exchange_12 (rtx operands[])
4680 { 4700 {
4681 enum machine_mode mode = GET_MODE (mem); 4701 rtx dst, mem, val, model;
4702 machine_mode mode;
4682 rtx addr, align, wdst; 4703 rtx addr, align, wdst;
4683 rtx (*fn4) (rtx, rtx, rtx, rtx); 4704 rtx (*gen) (rtx, rtx, rtx, rtx, rtx);
4684 4705
4685 /* Force the address into a register. */ 4706 dst = operands[0];
4686 addr = force_reg (DImode, XEXP (mem, 0)); 4707 mem = operands[1];
4687 4708 val = operands[2];
4688 /* Align it to a multiple of 8. */ 4709 model = operands[3];
4710 mode = GET_MODE (mem);
4711
4712 /* We forced the address into a register via mem_noofs_operand. */
4713 addr = XEXP (mem, 0);
4714 gcc_assert (register_operand (addr, DImode));
4715
4689 align = expand_simple_binop (Pmode, AND, addr, GEN_INT (-8), 4716 align = expand_simple_binop (Pmode, AND, addr, GEN_INT (-8),
4690 NULL_RTX, 1, OPTAB_DIRECT); 4717 NULL_RTX, 1, OPTAB_DIRECT);
4691 4718
4692 /* Insert val into the correct byte location within the word. */ 4719 /* Insert val into the correct byte location within the word. */
4693 val = emit_insxl (mode, val, addr); 4720 if (val != const0_rtx)
4721 val = emit_insxl (mode, val, addr);
4694 4722
4695 wdst = gen_reg_rtx (DImode); 4723 wdst = gen_reg_rtx (DImode);
4696 if (mode == QImode) 4724 if (mode == QImode)
4697 fn4 = gen_sync_lock_test_and_setqi_1; 4725 gen = gen_atomic_exchangeqi_1;
4698 else 4726 else
4699 fn4 = gen_sync_lock_test_and_sethi_1; 4727 gen = gen_atomic_exchangehi_1;
4700 emit_insn (fn4 (wdst, addr, val, align)); 4728 emit_insn (gen (wdst, mem, val, align, model));
4701 4729
4702 emit_move_insn (dst, gen_lowpart (mode, wdst)); 4730 emit_move_insn (dst, gen_lowpart (mode, wdst));
4703 } 4731 }
4704 4732
4705 void 4733 void
4706 alpha_split_lock_test_and_set_12 (enum machine_mode mode, rtx dest, rtx addr, 4734 alpha_split_atomic_exchange_12 (rtx operands[])
4707 rtx val, rtx align, rtx scratch) 4735 {
4708 { 4736 rtx dest, orig_mem, addr, val, align, scratch;
4709 rtx label, mem, width, mask, x; 4737 rtx label, mem, width, mask, x;
4738 machine_mode mode;
4739 enum memmodel model;
4740
4741 dest = operands[0];
4742 orig_mem = operands[1];
4743 val = operands[2];
4744 align = operands[3];
4745 model = (enum memmodel) INTVAL (operands[4]);
4746 scratch = operands[5];
4747 mode = GET_MODE (orig_mem);
4748 addr = XEXP (orig_mem, 0);
4710 4749
4711 mem = gen_rtx_MEM (DImode, align); 4750 mem = gen_rtx_MEM (DImode, align);
4712 MEM_VOLATILE_P (mem) = 1; 4751 MEM_VOLATILE_P (mem) = MEM_VOLATILE_P (orig_mem);
4752 if (MEM_ALIAS_SET (orig_mem) == ALIAS_SET_MEMORY_BARRIER)
4753 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
4754
4755 alpha_pre_atomic_barrier (model);
4713 4756
4714 label = gen_rtx_LABEL_REF (DImode, gen_label_rtx ()); 4757 label = gen_rtx_LABEL_REF (DImode, gen_label_rtx ());
4715 emit_label (XEXP (label, 0)); 4758 emit_label (XEXP (label, 0));
4716 4759
4717 emit_load_locked (DImode, scratch, mem); 4760 emit_load_locked (DImode, scratch, mem);
4718 4761
4719 width = GEN_INT (GET_MODE_BITSIZE (mode)); 4762 width = GEN_INT (GET_MODE_BITSIZE (mode));
4720 mask = GEN_INT (mode == QImode ? 0xff : 0xffff); 4763 mask = GEN_INT (mode == QImode ? 0xff : 0xffff);
4721 if (WORDS_BIG_ENDIAN) 4764 emit_insn (gen_extxl (dest, scratch, width, addr));
4722 { 4765 emit_insn (gen_mskxl (scratch, scratch, mask, addr));
4723 emit_insn (gen_extxl_be (dest, scratch, width, addr)); 4766 if (val != const0_rtx)
4724 emit_insn (gen_mskxl_be (scratch, scratch, mask, addr)); 4767 emit_insn (gen_iordi3 (scratch, scratch, val));
4725 }
4726 else
4727 {
4728 emit_insn (gen_extxl_le (dest, scratch, width, addr));
4729 emit_insn (gen_mskxl_le (scratch, scratch, mask, addr));
4730 }
4731 emit_insn (gen_iordi3 (scratch, scratch, val));
4732 4768
4733 emit_store_conditional (DImode, scratch, mem, scratch); 4769 emit_store_conditional (DImode, scratch, mem, scratch);
4734 4770
4735 x = gen_rtx_EQ (DImode, scratch, const0_rtx); 4771 x = gen_rtx_EQ (DImode, scratch, const0_rtx);
4736 emit_unlikely_jump (x, label); 4772 emit_unlikely_jump (x, label);
4737 4773
4738 emit_insn (gen_memory_barrier ()); 4774 alpha_post_atomic_barrier (model);
4739 } 4775 }
4740 4776
4741 /* Adjust the cost of a scheduling dependency. Return the new cost of 4777 /* Adjust the cost of a scheduling dependency. Return the new cost of
4742 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */ 4778 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
4743 4779
4744 static int 4780 static int
4745 alpha_adjust_cost (rtx insn, rtx link, rtx dep_insn, int cost) 4781 alpha_adjust_cost (rtx_insn *insn, int dep_type, rtx_insn *dep_insn, int cost,
4782 unsigned int)
4746 { 4783 {
4747 enum attr_type dep_insn_type; 4784 enum attr_type dep_insn_type;
4748 4785
4749 /* If the dependence is an anti-dependence, there is no cost. For an 4786 /* If the dependence is an anti-dependence, there is no cost. For an
4750 output dependence, there is sometimes a cost, but it doesn't seem 4787 output dependence, there is sometimes a cost, but it doesn't seem
4751 worth handling those few cases. */ 4788 worth handling those few cases. */
4752 if (REG_NOTE_KIND (link) != 0) 4789 if (dep_type != 0)
4753 return cost; 4790 return cost;
4754 4791
4755 /* If we can't recognize the insns, we can't really do anything. */ 4792 /* If we can't recognize the insns, we can't really do anything. */
4756 if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0) 4793 if (recog_memoized (insn) < 0 || recog_memoized (dep_insn) < 0)
4757 return cost; 4794 return cost;
4791 return (alpha_tune == PROCESSOR_EV6 ? 4 : 2); 4828 return (alpha_tune == PROCESSOR_EV6 ? 4 : 2);
4792 } 4829 }
4793 4830
4794 /* Machine-specific function data. */ 4831 /* Machine-specific function data. */
4795 4832
4833 struct GTY(()) alpha_links;
4834
4796 struct GTY(()) machine_function 4835 struct GTY(()) machine_function
4797 { 4836 {
4798 /* For unicosmk. */ 4837 /* For flag_reorder_blocks_and_partition. */
4799 /* List of call information words for calls from this function. */ 4838 rtx gp_save_rtx;
4800 struct rtx_def *first_ciw;
4801 struct rtx_def *last_ciw;
4802 int ciw_count;
4803
4804 /* List of deferred case vectors. */
4805 struct rtx_def *addr_list;
4806
4807 /* For OSF. */
4808 const char *some_ld_name;
4809
4810 /* For TARGET_LD_BUGGY_LDGP. */
4811 struct rtx_def *gp_save_rtx;
4812 4839
4813 /* For VMS condition handlers. */ 4840 /* For VMS condition handlers. */
4814 bool uses_condition_handler; 4841 bool uses_condition_handler;
4842
4843 /* Linkage entries. */
4844 hash_map<nofree_string_hash, alpha_links *> *links;
4815 }; 4845 };
4816 4846
4817 /* How to allocate a 'struct machine_function'. */ 4847 /* How to allocate a 'struct machine_function'. */
4818 4848
4819 static struct machine_function * 4849 static struct machine_function *
4820 alpha_init_machine_status (void) 4850 alpha_init_machine_status (void)
4821 { 4851 {
4822 return ggc_alloc_cleared_machine_function (); 4852 return ggc_cleared_alloc<machine_function> ();
4823 } 4853 }
4824 4854
4825 /* Support for frame based VMS condition handlers. */ 4855 /* Support for frame based VMS condition handlers. */
4826 4856
4827 /* A VMS condition handler may be established for a function with a call to 4857 /* A VMS condition handler may be established for a function with a call to
4847 into TARGET and install HANDLER as the new condition handler. */ 4877 into TARGET and install HANDLER as the new condition handler. */
4848 4878
4849 void 4879 void
4850 alpha_expand_builtin_establish_vms_condition_handler (rtx target, rtx handler) 4880 alpha_expand_builtin_establish_vms_condition_handler (rtx target, rtx handler)
4851 { 4881 {
4852 rtx handler_slot_address 4882 rtx handler_slot_address = plus_constant (Pmode, hard_frame_pointer_rtx,
4853 = plus_constant (hard_frame_pointer_rtx, VMS_COND_HANDLER_FP_OFFSET); 4883 VMS_COND_HANDLER_FP_OFFSET);
4854 4884
4855 rtx handler_slot 4885 rtx handler_slot
4856 = gen_rtx_MEM (DImode, handler_slot_address); 4886 = gen_rtx_MEM (DImode, handler_slot_address);
4857 4887
4858 emit_move_insn (target, handler_slot); 4888 emit_move_insn (target, handler_slot);
4896 function. Needed only if TARGET_LD_BUGGY_LDGP. */ 4926 function. Needed only if TARGET_LD_BUGGY_LDGP. */
4897 4927
4898 rtx 4928 rtx
4899 alpha_gp_save_rtx (void) 4929 alpha_gp_save_rtx (void)
4900 { 4930 {
4901 rtx seq, m = cfun->machine->gp_save_rtx; 4931 rtx_insn *seq;
4932 rtx m = cfun->machine->gp_save_rtx;
4902 4933
4903 if (m == NULL) 4934 if (m == NULL)
4904 { 4935 {
4905 start_sequence (); 4936 start_sequence ();
4906 4937
4915 However this breaks the CFG if the first instruction in the 4946 However this breaks the CFG if the first instruction in the
4916 first block is not the NOTE_INSN_BASIC_BLOCK, for example a 4947 first block is not the NOTE_INSN_BASIC_BLOCK, for example a
4917 label. Emit the sequence properly on the edge. We are only 4948 label. Emit the sequence properly on the edge. We are only
4918 invoked from dw2_build_landing_pads and finish_eh_generation 4949 invoked from dw2_build_landing_pads and finish_eh_generation
4919 will call commit_edge_insertions thanks to a kludge. */ 4950 will call commit_edge_insertions thanks to a kludge. */
4920 insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR)); 4951 insert_insn_on_edge (seq,
4952 single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
4921 4953
4922 cfun->machine->gp_save_rtx = m; 4954 cfun->machine->gp_save_rtx = m;
4923 } 4955 }
4924 4956
4925 return m; 4957 return m;
4958 }
4959
4960 static void
4961 alpha_instantiate_decls (void)
4962 {
4963 if (cfun->machine->gp_save_rtx != NULL_RTX)
4964 instantiate_decl_rtl (cfun->machine->gp_save_rtx);
4926 } 4965 }
4927 4966
4928 static int 4967 static int
4929 alpha_ra_ever_killed (void) 4968 alpha_ra_ever_killed (void)
4930 { 4969 {
4931 rtx top; 4970 rtx_insn *top;
4932 4971
4933 if (!has_hard_reg_initial_val (Pmode, REG_RA)) 4972 if (!has_hard_reg_initial_val (Pmode, REG_RA))
4934 return (int)df_regs_ever_live_p (REG_RA); 4973 return (int)df_regs_ever_live_p (REG_RA);
4935 4974
4936 push_topmost_sequence (); 4975 push_topmost_sequence ();
4937 top = get_insns (); 4976 top = get_insns ();
4938 pop_topmost_sequence (); 4977 pop_topmost_sequence ();
4939 4978
4940 return reg_set_between_p (gen_rtx_REG (Pmode, REG_RA), top, NULL_RTX); 4979 return reg_set_between_p (gen_rtx_REG (Pmode, REG_RA), top, NULL);
4941 } 4980 }
4942 4981
4943 4982
4944 /* Return the trap mode suffix applicable to the current 4983 /* Return the trap mode suffix applicable to the current
4945 instruction, or NULL. */ 4984 instruction, or NULL. */
5051 gcc_unreachable (); 5090 gcc_unreachable ();
5052 } 5091 }
5053 gcc_unreachable (); 5092 gcc_unreachable ();
5054 } 5093 }
5055 5094
5056 /* Locate some local-dynamic symbol still in use by this function 5095 /* Implement TARGET_PRINT_OPERAND_PUNCT_VALID_P. */
5057 so that we can print its name in some movdi_er_tlsldm pattern. */ 5096
5058 5097 static bool
5059 static int 5098 alpha_print_operand_punct_valid_p (unsigned char code)
5060 get_some_local_dynamic_name_1 (rtx *px, void *data ATTRIBUTE_UNUSED) 5099 {
5061 { 5100 return (code == '/' || code == ',' || code == '-' || code == '~'
5062 rtx x = *px; 5101 || code == '#' || code == '*' || code == '&');
5063 5102 }
5064 if (GET_CODE (x) == SYMBOL_REF 5103
5065 && SYMBOL_REF_TLS_MODEL (x) == TLS_MODEL_LOCAL_DYNAMIC) 5104 /* Implement TARGET_PRINT_OPERAND. The alpha-specific
5066 { 5105 operand codes are documented below. */
5067 cfun->machine->some_ld_name = XSTR (x, 0); 5106
5068 return 1; 5107 static void
5069 } 5108 alpha_print_operand (FILE *file, rtx x, int code)
5070
5071 return 0;
5072 }
5073
5074 static const char *
5075 get_some_local_dynamic_name (void)
5076 {
5077 rtx insn;
5078
5079 if (cfun->machine->some_ld_name)
5080 return cfun->machine->some_ld_name;
5081
5082 for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
5083 if (INSN_P (insn)
5084 && for_each_rtx (&PATTERN (insn), get_some_local_dynamic_name_1, 0))
5085 return cfun->machine->some_ld_name;
5086
5087 gcc_unreachable ();
5088 }
5089
5090 /* Print an operand. Recognize special options, documented below. */
5091
5092 void
5093 print_operand (FILE *file, rtx x, int code)
5094 { 5109 {
5095 int i; 5110 int i;
5096 5111
5097 switch (code) 5112 switch (code)
5098 { 5113 {
5100 /* Print the assembler name of the current function. */ 5115 /* Print the assembler name of the current function. */
5101 assemble_name (file, alpha_fnname); 5116 assemble_name (file, alpha_fnname);
5102 break; 5117 break;
5103 5118
5104 case '&': 5119 case '&':
5105 assemble_name (file, get_some_local_dynamic_name ()); 5120 if (const char *name = get_some_local_dynamic_name ())
5121 assemble_name (file, name);
5122 else
5123 output_operand_lossage ("'%%&' used without any "
5124 "local dynamic TLS references");
5106 break; 5125 break;
5107 5126
5108 case '/': 5127 case '/':
5128 /* Generates the instruction suffix. The TRAP_SUFFIX and ROUND_SUFFIX
5129 attributes are examined to determine what is appropriate. */
5109 { 5130 {
5110 const char *trap = get_trap_mode_suffix (); 5131 const char *trap = get_trap_mode_suffix ();
5111 const char *round = get_round_mode_suffix (); 5132 const char *round = get_round_mode_suffix ();
5112 5133
5113 if (trap || round) 5134 if (trap || round)
5114 fprintf (file, (TARGET_AS_SLASH_BEFORE_SUFFIX ? "/%s%s" : "%s%s"), 5135 fprintf (file, "/%s%s", (trap ? trap : ""), (round ? round : ""));
5115 (trap ? trap : ""), (round ? round : ""));
5116 break; 5136 break;
5117 } 5137 }
5118 5138
5119 case ',': 5139 case ',':
5120 /* Generates single precision instruction suffix. */ 5140 /* Generates single precision suffix for floating point
5141 instructions (s for IEEE, f for VAX). */
5121 fputc ((TARGET_FLOAT_VAX ? 'f' : 's'), file); 5142 fputc ((TARGET_FLOAT_VAX ? 'f' : 's'), file);
5122 break; 5143 break;
5123 5144
5124 case '-': 5145 case '-':
5125 /* Generates double precision instruction suffix. */ 5146 /* Generates double precision suffix for floating point
5147 instructions (t for IEEE, g for VAX). */
5126 fputc ((TARGET_FLOAT_VAX ? 'g' : 't'), file); 5148 fputc ((TARGET_FLOAT_VAX ? 'g' : 't'), file);
5127 break; 5149 break;
5128 5150
5129 case '#': 5151 case '#':
5130 if (alpha_this_literal_sequence_number == 0) 5152 if (alpha_this_literal_sequence_number == 0)
5134 5156
5135 case '*': 5157 case '*':
5136 if (alpha_this_gpdisp_sequence_number == 0) 5158 if (alpha_this_gpdisp_sequence_number == 0)
5137 alpha_this_gpdisp_sequence_number = alpha_next_sequence_number++; 5159 alpha_this_gpdisp_sequence_number = alpha_next_sequence_number++;
5138 fprintf (file, "%d", alpha_this_gpdisp_sequence_number); 5160 fprintf (file, "%d", alpha_this_gpdisp_sequence_number);
5139 break;
5140
5141 case 'H':
5142 if (GET_CODE (x) == HIGH)
5143 output_addr_const (file, XEXP (x, 0));
5144 else
5145 output_operand_lossage ("invalid %%H value");
5146 break; 5161 break;
5147 5162
5148 case 'J': 5163 case 'J':
5149 { 5164 {
5150 const char *lituse; 5165 const char *lituse;
5217 case 'P': 5232 case 'P':
5218 /* Write 1 << C, for a constant C. */ 5233 /* Write 1 << C, for a constant C. */
5219 if (!CONST_INT_P (x)) 5234 if (!CONST_INT_P (x))
5220 output_operand_lossage ("invalid %%P value"); 5235 output_operand_lossage ("invalid %%P value");
5221 5236
5222 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (HOST_WIDE_INT) 1 << INTVAL (x)); 5237 fprintf (file, HOST_WIDE_INT_PRINT_DEC, HOST_WIDE_INT_1 << INTVAL (x));
5223 break; 5238 break;
5224 5239
5225 case 'h': 5240 case 'h':
5226 /* Write the high-order 16 bits of a constant, sign-extended. */ 5241 /* Write the high-order 16 bits of a constant, sign-extended. */
5227 if (!CONST_INT_P (x)) 5242 if (!CONST_INT_P (x))
5239 (INTVAL (x) & 0xffff) - 2 * (INTVAL (x) & 0x8000)); 5254 (INTVAL (x) & 0xffff) - 2 * (INTVAL (x) & 0x8000));
5240 break; 5255 break;
5241 5256
5242 case 'm': 5257 case 'm':
5243 /* Write mask for ZAP insn. */ 5258 /* Write mask for ZAP insn. */
5244 if (GET_CODE (x) == CONST_DOUBLE) 5259 if (CONST_INT_P (x))
5245 {
5246 HOST_WIDE_INT mask = 0;
5247 HOST_WIDE_INT value;
5248
5249 value = CONST_DOUBLE_LOW (x);
5250 for (i = 0; i < HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
5251 i++, value >>= 8)
5252 if (value & 0xff)
5253 mask |= (1 << i);
5254
5255 value = CONST_DOUBLE_HIGH (x);
5256 for (i = 0; i < HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
5257 i++, value >>= 8)
5258 if (value & 0xff)
5259 mask |= (1 << (i + sizeof (int)));
5260
5261 fprintf (file, HOST_WIDE_INT_PRINT_DEC, mask & 0xff);
5262 }
5263
5264 else if (CONST_INT_P (x))
5265 { 5260 {
5266 HOST_WIDE_INT mask = 0, value = INTVAL (x); 5261 HOST_WIDE_INT mask = 0, value = INTVAL (x);
5267 5262
5268 for (i = 0; i < 8; i++, value >>= 8) 5263 for (i = 0; i < 8; i++, value >>= 8)
5269 if (value & 0xff) 5264 if (value & 0xff)
5275 output_operand_lossage ("invalid %%m value"); 5270 output_operand_lossage ("invalid %%m value");
5276 break; 5271 break;
5277 5272
5278 case 'M': 5273 case 'M':
5279 /* 'b', 'w', 'l', or 'q' as the value of the constant. */ 5274 /* 'b', 'w', 'l', or 'q' as the value of the constant. */
5280 if (!CONST_INT_P (x) 5275 if (!mode_width_operand (x, VOIDmode))
5281 || (INTVAL (x) != 8 && INTVAL (x) != 16
5282 && INTVAL (x) != 32 && INTVAL (x) != 64))
5283 output_operand_lossage ("invalid %%M value"); 5276 output_operand_lossage ("invalid %%M value");
5284 5277
5285 fprintf (file, "%s", 5278 fprintf (file, "%s",
5286 (INTVAL (x) == 8 ? "b" 5279 (INTVAL (x) == 8 ? "b"
5287 : INTVAL (x) == 16 ? "w" 5280 : INTVAL (x) == 16 ? "w"
5314 { 5307 {
5315 fputc ('q', file); 5308 fputc ('q', file);
5316 break; 5309 break;
5317 } 5310 }
5318 } 5311 }
5319 else if (HOST_BITS_PER_WIDE_INT == 32 5312
5320 && GET_CODE (x) == CONST_DOUBLE
5321 && CONST_DOUBLE_LOW (x) == 0xffffffff
5322 && CONST_DOUBLE_HIGH (x) == 0)
5323 {
5324 fputc ('l', file);
5325 break;
5326 }
5327 output_operand_lossage ("invalid %%U value"); 5313 output_operand_lossage ("invalid %%U value");
5328 break; 5314 break;
5329 5315
5330 case 's': 5316 case 's':
5331 /* Write the constant value divided by 8 for little-endian mode or 5317 /* Write the constant value divided by 8. */
5332 (56 - value) / 8 for big-endian mode. */
5333
5334 if (!CONST_INT_P (x) 5318 if (!CONST_INT_P (x)
5335 || (unsigned HOST_WIDE_INT) INTVAL (x) >= (WORDS_BIG_ENDIAN 5319 || (unsigned HOST_WIDE_INT) INTVAL (x) >= 64
5336 ? 56
5337 : 64)
5338 || (INTVAL (x) & 7) != 0) 5320 || (INTVAL (x) & 7) != 0)
5339 output_operand_lossage ("invalid %%s value"); 5321 output_operand_lossage ("invalid %%s value");
5340 5322
5341 fprintf (file, HOST_WIDE_INT_PRINT_DEC, 5323 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) / 8);
5342 WORDS_BIG_ENDIAN
5343 ? (56 - INTVAL (x)) / 8
5344 : INTVAL (x) / 8);
5345 break;
5346
5347 case 'S':
5348 /* Same, except compute (64 - c) / 8 */
5349
5350 if (!CONST_INT_P (x)
5351 && (unsigned HOST_WIDE_INT) INTVAL (x) >= 64
5352 && (INTVAL (x) & 7) != 8)
5353 output_operand_lossage ("invalid %%s value");
5354
5355 fprintf (file, HOST_WIDE_INT_PRINT_DEC, (64 - INTVAL (x)) / 8);
5356 break;
5357
5358 case 't':
5359 {
5360 /* On Unicos/Mk systems: use a DEX expression if the symbol
5361 clashes with a register name. */
5362 int dex = unicosmk_need_dex (x);
5363 if (dex)
5364 fprintf (file, "DEX(%d)", dex);
5365 else
5366 output_addr_const (file, x);
5367 }
5368 break; 5324 break;
5369 5325
5370 case 'C': case 'D': case 'c': case 'd': 5326 case 'C': case 'D': case 'c': case 'd':
5371 /* Write out comparison name. */ 5327 /* Write out comparison name. */
5372 { 5328 {
5423 5379
5424 case 0: 5380 case 0:
5425 if (REG_P (x)) 5381 if (REG_P (x))
5426 fprintf (file, "%s", reg_names[REGNO (x)]); 5382 fprintf (file, "%s", reg_names[REGNO (x)]);
5427 else if (MEM_P (x)) 5383 else if (MEM_P (x))
5428 output_address (XEXP (x, 0)); 5384 output_address (GET_MODE (x), XEXP (x, 0));
5429 else if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == UNSPEC) 5385 else if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == UNSPEC)
5430 { 5386 {
5431 switch (XINT (XEXP (x, 0), 1)) 5387 switch (XINT (XEXP (x, 0), 1))
5432 { 5388 {
5433 case UNSPEC_DTPREL: 5389 case UNSPEC_DTPREL:
5446 default: 5402 default:
5447 output_operand_lossage ("invalid %%xn code"); 5403 output_operand_lossage ("invalid %%xn code");
5448 } 5404 }
5449 } 5405 }
5450 5406
5451 void 5407 /* Implement TARGET_PRINT_OPERAND_ADDRESS. */
5452 print_operand_address (FILE *file, rtx addr) 5408
5409 static void
5410 alpha_print_operand_address (FILE *file, machine_mode /*mode*/, rtx addr)
5453 { 5411 {
5454 int basereg = 31; 5412 int basereg = 31;
5455 HOST_WIDE_INT offset = 0; 5413 HOST_WIDE_INT offset = 0;
5456 5414
5457 if (GET_CODE (addr) == AND) 5415 if (GET_CODE (addr) == AND)
5531 5489
5532 case CONST_INT: 5490 case CONST_INT:
5533 offset = INTVAL (addr); 5491 offset = INTVAL (addr);
5534 break; 5492 break;
5535 5493
5536 #if TARGET_ABI_OPEN_VMS
5537 case SYMBOL_REF: 5494 case SYMBOL_REF:
5495 gcc_assert(TARGET_ABI_OPEN_VMS || this_is_asm_operands);
5538 fprintf (file, "%s", XSTR (addr, 0)); 5496 fprintf (file, "%s", XSTR (addr, 0));
5539 return; 5497 return;
5540 5498
5541 case CONST: 5499 case CONST:
5500 gcc_assert(TARGET_ABI_OPEN_VMS || this_is_asm_operands);
5542 gcc_assert (GET_CODE (XEXP (addr, 0)) == PLUS 5501 gcc_assert (GET_CODE (XEXP (addr, 0)) == PLUS
5543 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == SYMBOL_REF); 5502 && GET_CODE (XEXP (XEXP (addr, 0), 0)) == SYMBOL_REF);
5544 fprintf (file, "%s+" HOST_WIDE_INT_PRINT_DEC, 5503 fprintf (file, "%s+" HOST_WIDE_INT_PRINT_DEC,
5545 XSTR (XEXP (XEXP (addr, 0), 0), 0), 5504 XSTR (XEXP (XEXP (addr, 0), 0), 0),
5546 INTVAL (XEXP (XEXP (addr, 0), 1))); 5505 INTVAL (XEXP (XEXP (addr, 0), 1)));
5547 return; 5506 return;
5548 5507
5549 #endif
5550 default: 5508 default:
5551 gcc_unreachable (); 5509 output_operand_lossage ("invalid operand address");
5510 return;
5552 } 5511 }
5553 5512
5554 fprintf (file, HOST_WIDE_INT_PRINT_DEC "($%d)", offset, basereg); 5513 fprintf (file, HOST_WIDE_INT_PRINT_DEC "($%d)", offset, basereg);
5555 } 5514 }
5556 5515
5585 5544
5586 /* Trampoline (or "bounded") procedure descriptor is constructed from 5545 /* Trampoline (or "bounded") procedure descriptor is constructed from
5587 the function's procedure descriptor with certain fields zeroed IAW 5546 the function's procedure descriptor with certain fields zeroed IAW
5588 the VMS calling standard. This is stored in the first quadword. */ 5547 the VMS calling standard. This is stored in the first quadword. */
5589 word1 = force_reg (DImode, gen_const_mem (DImode, fnaddr)); 5548 word1 = force_reg (DImode, gen_const_mem (DImode, fnaddr));
5590 word1 = expand_and (DImode, word1, GEN_INT (0xffff0fff0000fff0), NULL); 5549 word1 = expand_and (DImode, word1,
5550 GEN_INT (HOST_WIDE_INT_C (0xffff0fff0000fff0)),
5551 NULL);
5591 } 5552 }
5592 else 5553 else
5593 { 5554 {
5594 /* These 4 instructions are: 5555 /* These 4 instructions are:
5595 ldq $1,24($27) 5556 ldq $1,24($27)
5596 ldq $27,16($27) 5557 ldq $27,16($27)
5597 jmp $31,($27),0 5558 jmp $31,($27),0
5598 nop 5559 nop
5599 We don't bother setting the HINT field of the jump; the nop 5560 We don't bother setting the HINT field of the jump; the nop
5600 is merely there for padding. */ 5561 is merely there for padding. */
5601 word1 = GEN_INT (0xa77b0010a43b0018); 5562 word1 = GEN_INT (HOST_WIDE_INT_C (0xa77b0010a43b0018));
5602 word2 = GEN_INT (0x47ff041f6bfb0000); 5563 word2 = GEN_INT (HOST_WIDE_INT_C (0x47ff041f6bfb0000));
5603 } 5564 }
5604 5565
5605 /* Store the first two words, as computed above. */ 5566 /* Store the first two words, as computed above. */
5606 mem = adjust_address (m_tramp, DImode, 0); 5567 mem = adjust_address (m_tramp, DImode, 0);
5607 emit_move_insn (mem, word1); 5568 emit_move_insn (mem, word1);
5612 mem = adjust_address (m_tramp, Pmode, 16); 5573 mem = adjust_address (m_tramp, Pmode, 16);
5613 emit_move_insn (mem, fnaddr); 5574 emit_move_insn (mem, fnaddr);
5614 mem = adjust_address (m_tramp, Pmode, 24); 5575 mem = adjust_address (m_tramp, Pmode, 24);
5615 emit_move_insn (mem, chain_value); 5576 emit_move_insn (mem, chain_value);
5616 5577
5617 if (!TARGET_ABI_OPEN_VMS) 5578 if (TARGET_ABI_OSF)
5618 { 5579 {
5619 emit_insn (gen_imb ()); 5580 emit_insn (gen_imb ());
5620 #ifdef ENABLE_EXECUTE_STACK 5581 #ifdef HAVE_ENABLE_EXECUTE_STACK
5621 emit_library_call (init_one_libfunc ("__enable_execute_stack"), 5582 emit_library_call (init_one_libfunc ("__enable_execute_stack"),
5622 LCT_NORMAL, VOIDmode, 1, XEXP (m_tramp, 0), Pmode); 5583 LCT_NORMAL, VOIDmode, XEXP (m_tramp, 0), Pmode);
5623 #endif 5584 #endif
5624 } 5585 }
5625 } 5586 }
5626 5587
5627 /* Determine where to put an argument to a function. 5588 /* Determine where to put an argument to a function.
5639 5600
5640 On Alpha the first 6 words of args are normally in registers 5601 On Alpha the first 6 words of args are normally in registers
5641 and the rest are pushed. */ 5602 and the rest are pushed. */
5642 5603
5643 static rtx 5604 static rtx
5644 alpha_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, 5605 alpha_function_arg (cumulative_args_t cum_v, machine_mode mode,
5645 const_tree type, bool named ATTRIBUTE_UNUSED) 5606 const_tree type, bool named ATTRIBUTE_UNUSED)
5646 { 5607 {
5608 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
5647 int basereg; 5609 int basereg;
5648 int num_args; 5610 int num_args;
5649 5611
5650 /* Don't get confused and pass small structures in FP registers. */ 5612 /* Don't get confused and pass small structures in FP registers. */
5651 if (type && AGGREGATE_TYPE_P (type)) 5613 if (type && AGGREGATE_TYPE_P (type))
5652 basereg = 16; 5614 basereg = 16;
5653 else 5615 else
5654 { 5616 {
5655 #ifdef ENABLE_CHECKING
5656 /* With alpha_split_complex_arg, we shouldn't see any raw complex 5617 /* With alpha_split_complex_arg, we shouldn't see any raw complex
5657 values here. */ 5618 values here. */
5658 gcc_assert (!COMPLEX_MODE_P (mode)); 5619 gcc_checking_assert (!COMPLEX_MODE_P (mode));
5659 #endif
5660 5620
5661 /* Set up defaults for FP operands passed in FP registers, and 5621 /* Set up defaults for FP operands passed in FP registers, and
5662 integral operands passed in integer registers. */ 5622 integral operands passed in integer registers. */
5663 if (TARGET_FPREGS && GET_MODE_CLASS (mode) == MODE_FLOAT) 5623 if (TARGET_FPREGS && GET_MODE_CLASS (mode) == MODE_FLOAT)
5664 basereg = 32 + 16; 5624 basereg = 32 + 16;
5700 /* Update the data in CUM to advance over an argument 5660 /* Update the data in CUM to advance over an argument
5701 of mode MODE and data type TYPE. 5661 of mode MODE and data type TYPE.
5702 (TYPE is null for libcalls where that information may not be available.) */ 5662 (TYPE is null for libcalls where that information may not be available.) */
5703 5663
5704 static void 5664 static void
5705 alpha_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, 5665 alpha_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
5706 const_tree type, bool named ATTRIBUTE_UNUSED) 5666 const_tree type, bool named ATTRIBUTE_UNUSED)
5707 { 5667 {
5668 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
5708 bool onstack = targetm.calls.must_pass_in_stack (mode, type); 5669 bool onstack = targetm.calls.must_pass_in_stack (mode, type);
5709 int increment = onstack ? 6 : ALPHA_ARG_SIZE (mode, type, named); 5670 int increment = onstack ? 6 : ALPHA_ARG_SIZE (mode, type);
5710 5671
5711 #if TARGET_ABI_OSF 5672 #if TARGET_ABI_OSF
5712 *cum += increment; 5673 *cum += increment;
5713 #else 5674 #else
5714 if (!onstack && cum->num_args < 6) 5675 if (!onstack && cum->num_args < 6)
5716 cum->num_args += increment; 5677 cum->num_args += increment;
5717 #endif 5678 #endif
5718 } 5679 }
5719 5680
5720 static int 5681 static int
5721 alpha_arg_partial_bytes (CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED, 5682 alpha_arg_partial_bytes (cumulative_args_t cum_v,
5722 enum machine_mode mode ATTRIBUTE_UNUSED, 5683 machine_mode mode ATTRIBUTE_UNUSED,
5723 tree type ATTRIBUTE_UNUSED, 5684 tree type ATTRIBUTE_UNUSED,
5724 bool named ATTRIBUTE_UNUSED) 5685 bool named ATTRIBUTE_UNUSED)
5725 { 5686 {
5726 int words = 0; 5687 int words = 0;
5688 CUMULATIVE_ARGS *cum ATTRIBUTE_UNUSED = get_cumulative_args (cum_v);
5727 5689
5728 #if TARGET_ABI_OPEN_VMS 5690 #if TARGET_ABI_OPEN_VMS
5729 if (cum->num_args < 6 5691 if (cum->num_args < 6
5730 && 6 < cum->num_args + ALPHA_ARG_SIZE (mode, type, named)) 5692 && 6 < cum->num_args + ALPHA_ARG_SIZE (mode, type))
5731 words = 6 - cum->num_args; 5693 words = 6 - cum->num_args;
5732 #elif TARGET_ABI_UNICOSMK
5733 /* Never any split arguments. */
5734 #elif TARGET_ABI_OSF 5694 #elif TARGET_ABI_OSF
5735 if (*cum < 6 && 6 < *cum + ALPHA_ARG_SIZE (mode, type, named)) 5695 if (*cum < 6 && 6 < *cum + ALPHA_ARG_SIZE (mode, type))
5736 words = 6 - *cum; 5696 words = 6 - *cum;
5737 #else 5697 #else
5738 #error Unhandled ABI 5698 #error Unhandled ABI
5739 #endif 5699 #endif
5740 5700
5745 /* Return true if TYPE must be returned in memory, instead of in registers. */ 5705 /* Return true if TYPE must be returned in memory, instead of in registers. */
5746 5706
5747 static bool 5707 static bool
5748 alpha_return_in_memory (const_tree type, const_tree fndecl ATTRIBUTE_UNUSED) 5708 alpha_return_in_memory (const_tree type, const_tree fndecl ATTRIBUTE_UNUSED)
5749 { 5709 {
5750 enum machine_mode mode = VOIDmode; 5710 machine_mode mode = VOIDmode;
5751 int size; 5711 int size;
5752 5712
5753 if (type) 5713 if (type)
5754 { 5714 {
5755 mode = TYPE_MODE (type); 5715 mode = TYPE_MODE (type);
5798 } 5758 }
5799 5759
5800 /* Return true if TYPE should be passed by invisible reference. */ 5760 /* Return true if TYPE should be passed by invisible reference. */
5801 5761
5802 static bool 5762 static bool
5803 alpha_pass_by_reference (CUMULATIVE_ARGS *ca ATTRIBUTE_UNUSED, 5763 alpha_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
5804 enum machine_mode mode, 5764 machine_mode mode,
5805 const_tree type ATTRIBUTE_UNUSED, 5765 const_tree type ATTRIBUTE_UNUSED,
5806 bool named ATTRIBUTE_UNUSED) 5766 bool named)
5807 { 5767 {
5768 /* Pass float and _Complex float variable arguments by reference.
5769 This avoids 64-bit store from a FP register to a pretend args save area
5770 and subsequent 32-bit load from the saved location to a FP register.
5771
5772 Note that 32-bit loads and stores to/from a FP register on alpha reorder
5773 bits to form a canonical 64-bit value in the FP register. This fact
5774 invalidates compiler assumption that 32-bit FP value lives in the lower
5775 32-bits of the passed 64-bit FP value, so loading the 32-bit value from
5776 the stored 64-bit location using 32-bit FP load is invalid on alpha.
5777
5778 This introduces sort of ABI incompatibility, but until _Float32 was
5779 introduced, C-family languages promoted 32-bit float variable arg to
5780 a 64-bit double, and it was not allowed to pass float as a varible
5781 argument. Passing _Complex float as a variable argument never
5782 worked on alpha. Thus, we have no backward compatibility issues
5783 to worry about, and passing unpromoted _Float32 and _Complex float
5784 as a variable argument will actually work in the future. */
5785
5786 if (mode == SFmode || mode == SCmode)
5787 return !named;
5788
5808 return mode == TFmode || mode == TCmode; 5789 return mode == TFmode || mode == TCmode;
5809 } 5790 }
5810 5791
5811 /* Define how to find the value returned by a function. VALTYPE is the 5792 /* Define how to find the value returned by a function. VALTYPE is the
5812 data type of the value (as a tree). If the precise function being 5793 data type of the value (as a tree). If the precise function being
5814 MODE is set instead of VALTYPE for libcalls. 5795 MODE is set instead of VALTYPE for libcalls.
5815 5796
5816 On Alpha the value is found in $0 for integer functions and 5797 On Alpha the value is found in $0 for integer functions and
5817 $f0 for floating-point functions. */ 5798 $f0 for floating-point functions. */
5818 5799
5819 rtx 5800 static rtx
5820 function_value (const_tree valtype, const_tree func ATTRIBUTE_UNUSED, 5801 alpha_function_value_1 (const_tree valtype, const_tree func ATTRIBUTE_UNUSED,
5821 enum machine_mode mode) 5802 machine_mode mode)
5822 { 5803 {
5823 unsigned int regnum, dummy ATTRIBUTE_UNUSED; 5804 unsigned int regnum, dummy ATTRIBUTE_UNUSED;
5824 enum mode_class mclass; 5805 enum mode_class mclass;
5825 5806
5826 gcc_assert (!valtype || !alpha_return_in_memory (valtype, func)); 5807 gcc_assert (!valtype || !alpha_return_in_memory (valtype, func));
5847 regnum = 32; 5828 regnum = 32;
5848 break; 5829 break;
5849 5830
5850 case MODE_COMPLEX_FLOAT: 5831 case MODE_COMPLEX_FLOAT:
5851 { 5832 {
5852 enum machine_mode cmode = GET_MODE_INNER (mode); 5833 machine_mode cmode = GET_MODE_INNER (mode);
5853 5834
5854 return gen_rtx_PARALLEL 5835 return gen_rtx_PARALLEL
5855 (VOIDmode, 5836 (VOIDmode,
5856 gen_rtvec (2, 5837 gen_rtvec (2,
5857 gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_REG (cmode, 32), 5838 gen_rtx_EXPR_LIST (VOIDmode, gen_rtx_REG (cmode, 32),
5871 } 5852 }
5872 5853
5873 return gen_rtx_REG (mode, regnum); 5854 return gen_rtx_REG (mode, regnum);
5874 } 5855 }
5875 5856
5857 /* Implement TARGET_FUNCTION_VALUE. */
5858
5859 static rtx
5860 alpha_function_value (const_tree valtype, const_tree fn_decl_or_type,
5861 bool /*outgoing*/)
5862 {
5863 return alpha_function_value_1 (valtype, fn_decl_or_type, VOIDmode);
5864 }
5865
5866 /* Implement TARGET_LIBCALL_VALUE. */
5867
5868 static rtx
5869 alpha_libcall_value (machine_mode mode, const_rtx /*fun*/)
5870 {
5871 return alpha_function_value_1 (NULL_TREE, NULL_TREE, mode);
5872 }
5873
5874 /* Implement TARGET_FUNCTION_VALUE_REGNO_P.
5875
5876 On the Alpha, $0 $1 and $f0 $f1 are the only register thus used. */
5877
5878 static bool
5879 alpha_function_value_regno_p (const unsigned int regno)
5880 {
5881 return (regno == 0 || regno == 1 || regno == 32 || regno == 33);
5882 }
5883
5876 /* TCmode complex values are passed by invisible reference. We 5884 /* TCmode complex values are passed by invisible reference. We
5877 should not split these values. */ 5885 should not split these values. */
5878 5886
5879 static bool 5887 static bool
5880 alpha_split_complex_arg (const_tree type) 5888 alpha_split_complex_arg (const_tree type)
5885 static tree 5893 static tree
5886 alpha_build_builtin_va_list (void) 5894 alpha_build_builtin_va_list (void)
5887 { 5895 {
5888 tree base, ofs, space, record, type_decl; 5896 tree base, ofs, space, record, type_decl;
5889 5897
5890 if (TARGET_ABI_OPEN_VMS || TARGET_ABI_UNICOSMK) 5898 if (TARGET_ABI_OPEN_VMS)
5891 return ptr_type_node; 5899 return ptr_type_node;
5892 5900
5893 record = (*lang_hooks.types.make_type) (RECORD_TYPE); 5901 record = (*lang_hooks.types.make_type) (RECORD_TYPE);
5894 type_decl = build_decl (BUILTINS_LOCATION, 5902 type_decl = build_decl (BUILTINS_LOCATION,
5895 TYPE_DECL, get_identifier ("__va_list_tag"), record); 5903 TYPE_DECL, get_identifier ("__va_list_tag"), record);
5908 ofs = build_decl (BUILTINS_LOCATION, 5916 ofs = build_decl (BUILTINS_LOCATION,
5909 FIELD_DECL, get_identifier ("__offset"), 5917 FIELD_DECL, get_identifier ("__offset"),
5910 integer_type_node); 5918 integer_type_node);
5911 DECL_FIELD_CONTEXT (ofs) = record; 5919 DECL_FIELD_CONTEXT (ofs) = record;
5912 DECL_CHAIN (ofs) = space; 5920 DECL_CHAIN (ofs) = space;
5913 /* ??? This is a hack, __offset is marked volatile to prevent
5914 DCE that confuses stdarg optimization and results in
5915 gcc.c-torture/execute/stdarg-1.c failure. See PR 41089. */
5916 TREE_THIS_VOLATILE (ofs) = 1;
5917 5921
5918 base = build_decl (BUILTINS_LOCATION, 5922 base = build_decl (BUILTINS_LOCATION,
5919 FIELD_DECL, get_identifier ("__base"), 5923 FIELD_DECL, get_identifier ("__base"),
5920 ptr_type_node); 5924 ptr_type_node);
5921 DECL_FIELD_CONTEXT (base) = record; 5925 DECL_FIELD_CONTEXT (base) = record;
5930 5934
5931 #if TARGET_ABI_OSF 5935 #if TARGET_ABI_OSF
5932 /* Helper function for alpha_stdarg_optimize_hook. Skip over casts 5936 /* Helper function for alpha_stdarg_optimize_hook. Skip over casts
5933 and constant additions. */ 5937 and constant additions. */
5934 5938
5935 static gimple 5939 static gimple *
5936 va_list_skip_additions (tree lhs) 5940 va_list_skip_additions (tree lhs)
5937 { 5941 {
5938 gimple stmt; 5942 gimple *stmt;
5939 5943
5940 for (;;) 5944 for (;;)
5941 { 5945 {
5942 enum tree_code code; 5946 enum tree_code code;
5943 5947
5954 return stmt; 5958 return stmt;
5955 code = gimple_assign_rhs_code (stmt); 5959 code = gimple_assign_rhs_code (stmt);
5956 if (!CONVERT_EXPR_CODE_P (code) 5960 if (!CONVERT_EXPR_CODE_P (code)
5957 && ((code != PLUS_EXPR && code != POINTER_PLUS_EXPR) 5961 && ((code != PLUS_EXPR && code != POINTER_PLUS_EXPR)
5958 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST 5962 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST
5959 || !host_integerp (gimple_assign_rhs2 (stmt), 1))) 5963 || !tree_fits_uhwi_p (gimple_assign_rhs2 (stmt))))
5960 return stmt; 5964 return stmt;
5961 5965
5962 lhs = gimple_assign_rhs1 (stmt); 5966 lhs = gimple_assign_rhs1 (stmt);
5963 } 5967 }
5964 } 5968 }
5980 registers are needed and bit 1 set if FPR registers are needed. 5984 registers are needed and bit 1 set if FPR registers are needed.
5981 Return true if va_list references should not be scanned for the 5985 Return true if va_list references should not be scanned for the
5982 current statement. */ 5986 current statement. */
5983 5987
5984 static bool 5988 static bool
5985 alpha_stdarg_optimize_hook (struct stdarg_info *si, const_gimple stmt) 5989 alpha_stdarg_optimize_hook (struct stdarg_info *si, const gimple *stmt)
5986 { 5990 {
5987 tree base, offset, rhs; 5991 tree base, offset, rhs;
5988 int offset_arg = 1; 5992 int offset_arg = 1;
5989 gimple base_stmt; 5993 gimple *base_stmt;
5990 5994
5991 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) 5995 if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
5992 != GIMPLE_SINGLE_RHS) 5996 != GIMPLE_SINGLE_RHS)
5993 return false; 5997 return false;
5994 5998
6035 offset_arg = 0; 6039 offset_arg = 0;
6036 } 6040 }
6037 6041
6038 base = get_base_address (base); 6042 base = get_base_address (base);
6039 if (TREE_CODE (base) != VAR_DECL 6043 if (TREE_CODE (base) != VAR_DECL
6040 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base))) 6044 || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
6041 return false; 6045 return false;
6042 6046
6043 offset = gimple_op (stmt, 1 + offset_arg); 6047 offset = gimple_op (stmt, 1 + offset_arg);
6044 if (TREE_CODE (offset) == SSA_NAME) 6048 if (TREE_CODE (offset) == SSA_NAME)
6045 { 6049 {
6046 gimple offset_stmt = va_list_skip_additions (offset); 6050 gimple *offset_stmt = va_list_skip_additions (offset);
6047 6051
6048 if (offset_stmt 6052 if (offset_stmt
6049 && gimple_code (offset_stmt) == GIMPLE_PHI) 6053 && gimple_code (offset_stmt) == GIMPLE_PHI)
6050 { 6054 {
6051 HOST_WIDE_INT sub; 6055 HOST_WIDE_INT sub;
6052 gimple arg1_stmt, arg2_stmt; 6056 gimple *arg1_stmt, *arg2_stmt;
6053 tree arg1, arg2; 6057 tree arg1, arg2;
6054 enum tree_code code1, code2; 6058 enum tree_code code1, code2;
6055 6059
6056 if (gimple_phi_num_args (offset_stmt) != 2) 6060 if (gimple_phi_num_args (offset_stmt) != 2)
6057 goto escapes; 6061 goto escapes;
6072 && (code2 == MINUS_EXPR || code2 == PLUS_EXPR)) 6076 && (code2 == MINUS_EXPR || code2 == PLUS_EXPR))
6073 /* Do nothing. */; 6077 /* Do nothing. */;
6074 else if (code2 == COMPONENT_REF 6078 else if (code2 == COMPONENT_REF
6075 && (code1 == MINUS_EXPR || code1 == PLUS_EXPR)) 6079 && (code1 == MINUS_EXPR || code1 == PLUS_EXPR))
6076 { 6080 {
6077 gimple tem = arg1_stmt; 6081 std::swap (arg1_stmt, arg2_stmt);
6078 code2 = code1; 6082 code2 = code1;
6079 arg1_stmt = arg2_stmt;
6080 arg2_stmt = tem;
6081 } 6083 }
6082 else 6084 else
6083 goto escapes; 6085 goto escapes;
6084 6086
6085 if (!host_integerp (gimple_assign_rhs2 (arg2_stmt), 0)) 6087 if (!tree_fits_shwi_p (gimple_assign_rhs2 (arg2_stmt)))
6086 goto escapes; 6088 goto escapes;
6087 6089
6088 sub = tree_low_cst (gimple_assign_rhs2 (arg2_stmt), 0); 6090 sub = tree_to_shwi (gimple_assign_rhs2 (arg2_stmt));
6089 if (code2 == MINUS_EXPR) 6091 if (code2 == MINUS_EXPR)
6090 sub = -sub; 6092 sub = -sub;
6091 if (sub < -48 || sub > -32) 6093 if (sub < -48 || sub > -32)
6092 goto escapes; 6094 goto escapes;
6093 6095
6136 6138
6137 /* Perform any needed actions needed for a function that is receiving a 6139 /* Perform any needed actions needed for a function that is receiving a
6138 variable number of arguments. */ 6140 variable number of arguments. */
6139 6141
6140 static void 6142 static void
6141 alpha_setup_incoming_varargs (CUMULATIVE_ARGS *pcum, enum machine_mode mode, 6143 alpha_setup_incoming_varargs (cumulative_args_t pcum, machine_mode mode,
6142 tree type, int *pretend_size, int no_rtl) 6144 tree type, int *pretend_size, int no_rtl)
6143 { 6145 {
6144 CUMULATIVE_ARGS cum = *pcum; 6146 CUMULATIVE_ARGS cum = *get_cumulative_args (pcum);
6145 6147
6146 /* Skip the current argument. */ 6148 /* Skip the current argument. */
6147 targetm.calls.function_arg_advance (&cum, mode, type, true); 6149 targetm.calls.function_arg_advance (pack_cumulative_args (&cum), mode, type,
6148 6150 true);
6149 #if TARGET_ABI_UNICOSMK 6151
6150 /* On Unicos/Mk, the standard subroutine __T3E_MISMATCH stores all register 6152 #if TARGET_ABI_OPEN_VMS
6151 arguments on the stack. Unfortunately, it doesn't always store the first
6152 one (i.e. the one that arrives in $16 or $f16). This is not a problem
6153 with stdargs as we always have at least one named argument there. */
6154 if (cum.num_reg_words < 6)
6155 {
6156 if (!no_rtl)
6157 {
6158 emit_insn (gen_umk_mismatch_args (GEN_INT (cum.num_reg_words)));
6159 emit_insn (gen_arg_home_umk ());
6160 }
6161 *pretend_size = 0;
6162 }
6163 #elif TARGET_ABI_OPEN_VMS
6164 /* For VMS, we allocate space for all 6 arg registers plus a count. 6153 /* For VMS, we allocate space for all 6 arg registers plus a count.
6165 6154
6166 However, if NO registers need to be saved, don't allocate any space. 6155 However, if NO registers need to be saved, don't allocate any space.
6167 This is not only because we won't need the space, but because AP 6156 This is not only because we won't need the space, but because AP
6168 includes the current_pretend_args_size and we don't want to mess up 6157 includes the current_pretend_args_size and we don't want to mess up
6209 gcc_assert ((VA_LIST_MAX_FPR_SIZE & 3) == 3); 6198 gcc_assert ((VA_LIST_MAX_FPR_SIZE & 3) == 3);
6210 6199
6211 if (cfun->va_list_fpr_size & 1) 6200 if (cfun->va_list_fpr_size & 1)
6212 { 6201 {
6213 tmp = gen_rtx_MEM (BLKmode, 6202 tmp = gen_rtx_MEM (BLKmode,
6214 plus_constant (virtual_incoming_args_rtx, 6203 plus_constant (Pmode, virtual_incoming_args_rtx,
6215 (cum + 6) * UNITS_PER_WORD)); 6204 (cum + 6) * UNITS_PER_WORD));
6216 MEM_NOTRAP_P (tmp) = 1; 6205 MEM_NOTRAP_P (tmp) = 1;
6217 set_mem_alias_set (tmp, set); 6206 set_mem_alias_set (tmp, set);
6218 move_block_from_reg (16 + cum, tmp, count); 6207 move_block_from_reg (16 + cum, tmp, count);
6219 } 6208 }
6220 6209
6221 if (cfun->va_list_fpr_size & 2) 6210 if (cfun->va_list_fpr_size & 2)
6222 { 6211 {
6223 tmp = gen_rtx_MEM (BLKmode, 6212 tmp = gen_rtx_MEM (BLKmode,
6224 plus_constant (virtual_incoming_args_rtx, 6213 plus_constant (Pmode, virtual_incoming_args_rtx,
6225 cum * UNITS_PER_WORD)); 6214 cum * UNITS_PER_WORD));
6226 MEM_NOTRAP_P (tmp) = 1; 6215 MEM_NOTRAP_P (tmp) = 1;
6227 set_mem_alias_set (tmp, set); 6216 set_mem_alias_set (tmp, set);
6228 move_block_from_reg (16 + cum + TARGET_FPREGS*32, tmp, count); 6217 move_block_from_reg (16 + cum + TARGET_FPREGS*32, tmp, count);
6229 } 6218 }
6238 HOST_WIDE_INT offset; 6227 HOST_WIDE_INT offset;
6239 tree t, offset_field, base_field; 6228 tree t, offset_field, base_field;
6240 6229
6241 if (TREE_CODE (TREE_TYPE (valist)) == ERROR_MARK) 6230 if (TREE_CODE (TREE_TYPE (valist)) == ERROR_MARK)
6242 return; 6231 return;
6243
6244 if (TARGET_ABI_UNICOSMK)
6245 std_expand_builtin_va_start (valist, nextarg);
6246 6232
6247 /* For Unix, TARGET_SETUP_INCOMING_VARARGS moves the starting address base 6233 /* For Unix, TARGET_SETUP_INCOMING_VARARGS moves the starting address base
6248 up by 48, storing fp arg registers in the first 48 bytes, and the 6234 up by 48, storing fp arg registers in the first 48 bytes, and the
6249 integer arg registers in the next 48 bytes. This is only done, 6235 integer arg registers in the next 48 bytes. This is only done,
6250 however, if any integer registers need to be stored. 6236 however, if any integer registers need to be stored.
6262 offset = -6 * UNITS_PER_WORD + crtl->args.pretend_args_size; 6248 offset = -6 * UNITS_PER_WORD + crtl->args.pretend_args_size;
6263 6249
6264 if (TARGET_ABI_OPEN_VMS) 6250 if (TARGET_ABI_OPEN_VMS)
6265 { 6251 {
6266 t = make_tree (ptr_type_node, virtual_incoming_args_rtx); 6252 t = make_tree (ptr_type_node, virtual_incoming_args_rtx);
6267 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, 6253 t = fold_build_pointer_plus_hwi (t, offset + NUM_ARGS * UNITS_PER_WORD);
6268 size_int (offset + NUM_ARGS * UNITS_PER_WORD));
6269 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t); 6254 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
6270 TREE_SIDE_EFFECTS (t) = 1; 6255 TREE_SIDE_EFFECTS (t) = 1;
6271 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); 6256 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6272 } 6257 }
6273 else 6258 else
6279 valist, base_field, NULL_TREE); 6264 valist, base_field, NULL_TREE);
6280 offset_field = build3 (COMPONENT_REF, TREE_TYPE (offset_field), 6265 offset_field = build3 (COMPONENT_REF, TREE_TYPE (offset_field),
6281 valist, offset_field, NULL_TREE); 6266 valist, offset_field, NULL_TREE);
6282 6267
6283 t = make_tree (ptr_type_node, virtual_incoming_args_rtx); 6268 t = make_tree (ptr_type_node, virtual_incoming_args_rtx);
6284 t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, 6269 t = fold_build_pointer_plus_hwi (t, offset);
6285 size_int (offset));
6286 t = build2 (MODIFY_EXPR, TREE_TYPE (base_field), base_field, t); 6270 t = build2 (MODIFY_EXPR, TREE_TYPE (base_field), base_field, t);
6287 TREE_SIDE_EFFECTS (t) = 1; 6271 TREE_SIDE_EFFECTS (t) = 1;
6288 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL); 6272 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
6289 6273
6290 t = build_int_cst (NULL_TREE, NUM_ARGS * UNITS_PER_WORD); 6274 t = build_int_cst (NULL_TREE, NUM_ARGS * UNITS_PER_WORD);
6341 addend = fold_build3 (COND_EXPR, TREE_TYPE (addend), cond, 6325 addend = fold_build3 (COND_EXPR, TREE_TYPE (addend), cond,
6342 fpaddend, addend); 6326 fpaddend, addend);
6343 } 6327 }
6344 6328
6345 /* Build the final address and force that value into a temporary. */ 6329 /* Build the final address and force that value into a temporary. */
6346 addr = build2 (POINTER_PLUS_EXPR, ptr_type, fold_convert (ptr_type, base), 6330 addr = fold_build_pointer_plus (fold_convert (ptr_type, base), addend);
6347 fold_convert (sizetype, addend));
6348 internal_post = NULL; 6331 internal_post = NULL;
6349 gimplify_expr (&addr, pre_p, &internal_post, is_gimple_val, fb_rvalue); 6332 gimplify_expr (&addr, pre_p, &internal_post, is_gimple_val, fb_rvalue);
6350 gimple_seq_add_seq (pre_p, internal_post); 6333 gimple_seq_add_seq (pre_p, internal_post);
6351 6334
6352 /* Update the offset field. */ 6335 /* Update the offset field. */
6371 gimple_seq *post_p) 6354 gimple_seq *post_p)
6372 { 6355 {
6373 tree offset_field, base_field, offset, base, t, r; 6356 tree offset_field, base_field, offset, base, t, r;
6374 bool indirect; 6357 bool indirect;
6375 6358
6376 if (TARGET_ABI_OPEN_VMS || TARGET_ABI_UNICOSMK) 6359 if (TARGET_ABI_OPEN_VMS)
6377 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p); 6360 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
6378 6361
6379 base_field = TYPE_FIELDS (va_list_type_node); 6362 base_field = TYPE_FIELDS (va_list_type_node);
6380 offset_field = DECL_CHAIN (base_field); 6363 offset_field = DECL_CHAIN (base_field);
6381 base_field = build3 (COMPONENT_REF, TREE_TYPE (base_field), 6364 base_field = build3 (COMPONENT_REF, TREE_TYPE (base_field),
6386 /* Pull the fields of the structure out into temporaries. Since we never 6369 /* Pull the fields of the structure out into temporaries. Since we never
6387 modify the base field, we can use a formal temporary. Sign-extend the 6370 modify the base field, we can use a formal temporary. Sign-extend the
6388 offset field so that it's the proper width for pointer arithmetic. */ 6371 offset field so that it's the proper width for pointer arithmetic. */
6389 base = get_formal_tmp_var (base_field, pre_p); 6372 base = get_formal_tmp_var (base_field, pre_p);
6390 6373
6391 t = fold_convert (lang_hooks.types.type_for_size (64, 0), offset_field); 6374 t = fold_convert (build_nonstandard_integer_type (64, 0), offset_field);
6392 offset = get_initialized_tmp_var (t, pre_p, NULL); 6375 offset = get_initialized_tmp_var (t, pre_p, NULL);
6393 6376
6394 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false); 6377 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
6395 if (indirect) 6378 if (indirect)
6396 type = build_pointer_type_for_mode (type, ptr_mode, true); 6379 type = build_pointer_type_for_mode (type, ptr_mode, true);
6439 ALPHA_BUILTIN_ZAP, 6422 ALPHA_BUILTIN_ZAP,
6440 ALPHA_BUILTIN_ZAPNOT, 6423 ALPHA_BUILTIN_ZAPNOT,
6441 ALPHA_BUILTIN_AMASK, 6424 ALPHA_BUILTIN_AMASK,
6442 ALPHA_BUILTIN_IMPLVER, 6425 ALPHA_BUILTIN_IMPLVER,
6443 ALPHA_BUILTIN_RPCC, 6426 ALPHA_BUILTIN_RPCC,
6444 ALPHA_BUILTIN_THREAD_POINTER,
6445 ALPHA_BUILTIN_SET_THREAD_POINTER,
6446 ALPHA_BUILTIN_ESTABLISH_VMS_CONDITION_HANDLER, 6427 ALPHA_BUILTIN_ESTABLISH_VMS_CONDITION_HANDLER,
6447 ALPHA_BUILTIN_REVERT_VMS_CONDITION_HANDLER, 6428 ALPHA_BUILTIN_REVERT_VMS_CONDITION_HANDLER,
6448 6429
6449 /* TARGET_MAX */ 6430 /* TARGET_MAX */
6450 ALPHA_BUILTIN_MINUB8, 6431 ALPHA_BUILTIN_MINUB8,
6469 ALPHA_BUILTIN_max 6450 ALPHA_BUILTIN_max
6470 }; 6451 };
6471 6452
6472 static enum insn_code const code_for_builtin[ALPHA_BUILTIN_max] = { 6453 static enum insn_code const code_for_builtin[ALPHA_BUILTIN_max] = {
6473 CODE_FOR_builtin_cmpbge, 6454 CODE_FOR_builtin_cmpbge,
6474 CODE_FOR_builtin_extbl, 6455 CODE_FOR_extbl,
6475 CODE_FOR_builtin_extwl, 6456 CODE_FOR_extwl,
6476 CODE_FOR_builtin_extll, 6457 CODE_FOR_extll,
6477 CODE_FOR_builtin_extql, 6458 CODE_FOR_extql,
6478 CODE_FOR_builtin_extwh, 6459 CODE_FOR_extwh,
6479 CODE_FOR_builtin_extlh, 6460 CODE_FOR_extlh,
6480 CODE_FOR_builtin_extqh, 6461 CODE_FOR_extqh,
6481 CODE_FOR_builtin_insbl, 6462 CODE_FOR_builtin_insbl,
6482 CODE_FOR_builtin_inswl, 6463 CODE_FOR_builtin_inswl,
6483 CODE_FOR_builtin_insll, 6464 CODE_FOR_builtin_insll,
6484 CODE_FOR_builtin_insql, 6465 CODE_FOR_insql,
6485 CODE_FOR_builtin_inswh, 6466 CODE_FOR_inswh,
6486 CODE_FOR_builtin_inslh, 6467 CODE_FOR_inslh,
6487 CODE_FOR_builtin_insqh, 6468 CODE_FOR_insqh,
6488 CODE_FOR_builtin_mskbl, 6469 CODE_FOR_mskbl,
6489 CODE_FOR_builtin_mskwl, 6470 CODE_FOR_mskwl,
6490 CODE_FOR_builtin_mskll, 6471 CODE_FOR_mskll,
6491 CODE_FOR_builtin_mskql, 6472 CODE_FOR_mskql,
6492 CODE_FOR_builtin_mskwh, 6473 CODE_FOR_mskwh,
6493 CODE_FOR_builtin_msklh, 6474 CODE_FOR_msklh,
6494 CODE_FOR_builtin_mskqh, 6475 CODE_FOR_mskqh,
6495 CODE_FOR_umuldi3_highpart, 6476 CODE_FOR_umuldi3_highpart,
6496 CODE_FOR_builtin_zap, 6477 CODE_FOR_builtin_zap,
6497 CODE_FOR_builtin_zapnot, 6478 CODE_FOR_builtin_zapnot,
6498 CODE_FOR_builtin_amask, 6479 CODE_FOR_builtin_amask,
6499 CODE_FOR_builtin_implver, 6480 CODE_FOR_builtin_implver,
6500 CODE_FOR_builtin_rpcc, 6481 CODE_FOR_builtin_rpcc,
6501 CODE_FOR_load_tp,
6502 CODE_FOR_set_tp,
6503 CODE_FOR_builtin_establish_vms_condition_handler, 6482 CODE_FOR_builtin_establish_vms_condition_handler,
6504 CODE_FOR_builtin_revert_vms_condition_handler, 6483 CODE_FOR_builtin_revert_vms_condition_handler,
6505 6484
6506 /* TARGET_MAX */ 6485 /* TARGET_MAX */
6507 CODE_FOR_builtin_minub8, 6486 CODE_FOR_builtin_minub8,
6583 { "__builtin_alpha_maxuw4", ALPHA_BUILTIN_MAXUW4, MASK_MAX, true }, 6562 { "__builtin_alpha_maxuw4", ALPHA_BUILTIN_MAXUW4, MASK_MAX, true },
6584 { "__builtin_alpha_maxsw4", ALPHA_BUILTIN_MAXSW4, MASK_MAX, true }, 6563 { "__builtin_alpha_maxsw4", ALPHA_BUILTIN_MAXSW4, MASK_MAX, true },
6585 { "__builtin_alpha_perr", ALPHA_BUILTIN_PERR, MASK_MAX, true } 6564 { "__builtin_alpha_perr", ALPHA_BUILTIN_PERR, MASK_MAX, true }
6586 }; 6565 };
6587 6566
6567 static GTY(()) tree alpha_dimode_u;
6588 static GTY(()) tree alpha_v8qi_u; 6568 static GTY(()) tree alpha_v8qi_u;
6589 static GTY(()) tree alpha_v8qi_s; 6569 static GTY(()) tree alpha_v8qi_s;
6590 static GTY(()) tree alpha_v4hi_u; 6570 static GTY(()) tree alpha_v4hi_u;
6591 static GTY(()) tree alpha_v4hi_s; 6571 static GTY(()) tree alpha_v4hi_s;
6592 6572
6636 } 6616 }
6637 6617
6638 static void 6618 static void
6639 alpha_init_builtins (void) 6619 alpha_init_builtins (void)
6640 { 6620 {
6641 tree dimode_integer_type_node;
6642 tree ftype; 6621 tree ftype;
6643 6622
6644 dimode_integer_type_node = lang_hooks.types.type_for_mode (DImode, 0); 6623 alpha_dimode_u = lang_hooks.types.type_for_mode (DImode, 1);
6645 6624 alpha_v8qi_u = build_vector_type (unsigned_intQI_type_node, 8);
6646 /* Fwrite on VMS is non-standard. */ 6625 alpha_v8qi_s = build_vector_type (intQI_type_node, 8);
6647 #if TARGET_ABI_OPEN_VMS 6626 alpha_v4hi_u = build_vector_type (unsigned_intHI_type_node, 4);
6648 implicit_built_in_decls[(int) BUILT_IN_FWRITE] = NULL_TREE; 6627 alpha_v4hi_s = build_vector_type (intHI_type_node, 4);
6649 implicit_built_in_decls[(int) BUILT_IN_FWRITE_UNLOCKED] = NULL_TREE; 6628
6650 #endif 6629 ftype = build_function_type_list (alpha_dimode_u, NULL_TREE);
6651 6630 alpha_add_builtins (zero_arg_builtins, ARRAY_SIZE (zero_arg_builtins), ftype);
6652 ftype = build_function_type (dimode_integer_type_node, void_list_node); 6631
6653 alpha_add_builtins (zero_arg_builtins, ARRAY_SIZE (zero_arg_builtins), 6632 ftype = build_function_type_list (alpha_dimode_u, alpha_dimode_u, NULL_TREE);
6654 ftype); 6633 alpha_add_builtins (one_arg_builtins, ARRAY_SIZE (one_arg_builtins), ftype);
6655 6634
6656 ftype = build_function_type_list (dimode_integer_type_node, 6635 ftype = build_function_type_list (alpha_dimode_u, alpha_dimode_u,
6657 dimode_integer_type_node, NULL_TREE); 6636 alpha_dimode_u, NULL_TREE);
6658 alpha_add_builtins (one_arg_builtins, ARRAY_SIZE (one_arg_builtins), 6637 alpha_add_builtins (two_arg_builtins, ARRAY_SIZE (two_arg_builtins), ftype);
6659 ftype);
6660
6661 ftype = build_function_type_list (dimode_integer_type_node,
6662 dimode_integer_type_node,
6663 dimode_integer_type_node, NULL_TREE);
6664 alpha_add_builtins (two_arg_builtins, ARRAY_SIZE (two_arg_builtins),
6665 ftype);
6666
6667 ftype = build_function_type (ptr_type_node, void_list_node);
6668 alpha_builtin_function ("__builtin_thread_pointer", ftype,
6669 ALPHA_BUILTIN_THREAD_POINTER, ECF_NOTHROW);
6670
6671 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
6672 alpha_builtin_function ("__builtin_set_thread_pointer", ftype,
6673 ALPHA_BUILTIN_SET_THREAD_POINTER, ECF_NOTHROW);
6674 6638
6675 if (TARGET_ABI_OPEN_VMS) 6639 if (TARGET_ABI_OPEN_VMS)
6676 { 6640 {
6677 ftype = build_function_type_list (ptr_type_node, ptr_type_node, 6641 ftype = build_function_type_list (ptr_type_node, ptr_type_node,
6678 NULL_TREE); 6642 NULL_TREE);
6683 6647
6684 ftype = build_function_type_list (ptr_type_node, void_type_node, 6648 ftype = build_function_type_list (ptr_type_node, void_type_node,
6685 NULL_TREE); 6649 NULL_TREE);
6686 alpha_builtin_function ("__builtin_revert_vms_condition_handler", ftype, 6650 alpha_builtin_function ("__builtin_revert_vms_condition_handler", ftype,
6687 ALPHA_BUILTIN_REVERT_VMS_CONDITION_HANDLER, 0); 6651 ALPHA_BUILTIN_REVERT_VMS_CONDITION_HANDLER, 0);
6688 } 6652
6689 6653 vms_patch_builtins ();
6690 alpha_v8qi_u = build_vector_type (unsigned_intQI_type_node, 8); 6654 }
6691 alpha_v8qi_s = build_vector_type (intQI_type_node, 8);
6692 alpha_v4hi_u = build_vector_type (unsigned_intHI_type_node, 4);
6693 alpha_v4hi_s = build_vector_type (intHI_type_node, 4);
6694 } 6655 }
6695 6656
6696 /* Expand an expression EXP that calls a built-in function, 6657 /* Expand an expression EXP that calls a built-in function,
6697 with result going to TARGET if that's convenient 6658 with result going to TARGET if that's convenient
6698 (and in mode MODE if that's convenient). 6659 (and in mode MODE if that's convenient).
6700 IGNORE is nonzero if the value is to be ignored. */ 6661 IGNORE is nonzero if the value is to be ignored. */
6701 6662
6702 static rtx 6663 static rtx
6703 alpha_expand_builtin (tree exp, rtx target, 6664 alpha_expand_builtin (tree exp, rtx target,
6704 rtx subtarget ATTRIBUTE_UNUSED, 6665 rtx subtarget ATTRIBUTE_UNUSED,
6705 enum machine_mode mode ATTRIBUTE_UNUSED, 6666 machine_mode mode ATTRIBUTE_UNUSED,
6706 int ignore ATTRIBUTE_UNUSED) 6667 int ignore ATTRIBUTE_UNUSED)
6707 { 6668 {
6708 #define MAX_ARGS 2 6669 #define MAX_ARGS 2
6709 6670
6710 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0); 6671 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
6743 arity++; 6704 arity++;
6744 } 6705 }
6745 6706
6746 if (nonvoid) 6707 if (nonvoid)
6747 { 6708 {
6748 enum machine_mode tmode = insn_data[icode].operand[0].mode; 6709 machine_mode tmode = insn_data[icode].operand[0].mode;
6749 if (!target 6710 if (!target
6750 || GET_MODE (target) != tmode 6711 || GET_MODE (target) != tmode
6751 || !(*insn_data[icode].operand[0].predicate) (target, tmode)) 6712 || !(*insn_data[icode].operand[0].predicate) (target, tmode))
6752 target = gen_reg_rtx (tmode); 6713 target = gen_reg_rtx (tmode);
6753 } 6714 }
6777 return target; 6738 return target;
6778 else 6739 else
6779 return const0_rtx; 6740 return const0_rtx;
6780 } 6741 }
6781 6742
6782
6783 /* Several bits below assume HWI >= 64 bits. This should be enforced
6784 by config.gcc. */
6785 #if HOST_BITS_PER_WIDE_INT < 64
6786 # error "HOST_WIDE_INT too small"
6787 #endif
6788
6789 /* Fold the builtin for the CMPBGE instruction. This is a vector comparison 6743 /* Fold the builtin for the CMPBGE instruction. This is a vector comparison
6790 with an 8-bit output vector. OPINT contains the integer operands; bit N 6744 with an 8-bit output vector. OPINT contains the integer operands; bit N
6791 of OP_CONST is set if OPINT[N] is valid. */ 6745 of OP_CONST is set if OPINT[N] is valid. */
6792 6746
6793 static tree 6747 static tree
6801 unsigned HOST_WIDE_INT c0 = (opint[0] >> (i * 8)) & 0xff; 6755 unsigned HOST_WIDE_INT c0 = (opint[0] >> (i * 8)) & 0xff;
6802 unsigned HOST_WIDE_INT c1 = (opint[1] >> (i * 8)) & 0xff; 6756 unsigned HOST_WIDE_INT c1 = (opint[1] >> (i * 8)) & 0xff;
6803 if (c0 >= c1) 6757 if (c0 >= c1)
6804 val |= 1 << i; 6758 val |= 1 << i;
6805 } 6759 }
6806 return build_int_cst (long_integer_type_node, val); 6760 return build_int_cst (alpha_dimode_u, val);
6807 } 6761 }
6808 else if (op_const == 2 && opint[1] == 0) 6762 else if (op_const == 2 && opint[1] == 0)
6809 return build_int_cst (long_integer_type_node, 0xff); 6763 return build_int_cst (alpha_dimode_u, 0xff);
6810 return NULL; 6764 return NULL;
6811 } 6765 }
6812 6766
6813 /* Fold the builtin for the ZAPNOT instruction. This is essentially a 6767 /* Fold the builtin for the ZAPNOT instruction. This is essentially a
6814 specialized form of an AND operation. Other byte manipulation instructions 6768 specialized form of an AND operation. Other byte manipulation instructions
6831 for (i = 0; i < 8; ++i) 6785 for (i = 0; i < 8; ++i)
6832 if ((opint[1] >> i) & 1) 6786 if ((opint[1] >> i) & 1)
6833 mask |= (unsigned HOST_WIDE_INT)0xff << (i * 8); 6787 mask |= (unsigned HOST_WIDE_INT)0xff << (i * 8);
6834 6788
6835 if (op_const & 1) 6789 if (op_const & 1)
6836 return build_int_cst (long_integer_type_node, opint[0] & mask); 6790 return build_int_cst (alpha_dimode_u, opint[0] & mask);
6837 6791
6838 if (op) 6792 if (op)
6839 return fold_build2 (BIT_AND_EXPR, long_integer_type_node, op[0], 6793 return fold_build2 (BIT_AND_EXPR, alpha_dimode_u, op[0],
6840 build_int_cst (long_integer_type_node, mask)); 6794 build_int_cst (alpha_dimode_u, mask));
6841 } 6795 }
6842 else if ((op_const & 1) && opint[0] == 0) 6796 else if ((op_const & 1) && opint[0] == 0)
6843 return build_int_cst (long_integer_type_node, 0); 6797 return build_int_cst (alpha_dimode_u, 0);
6844 return NULL; 6798 return NULL;
6845 } 6799 }
6846 6800
6847 /* Fold the builtins for the EXT family of instructions. */ 6801 /* Fold the builtins for the EXT family of instructions. */
6848 6802
6857 if (op_const & 2) 6811 if (op_const & 2)
6858 { 6812 {
6859 unsigned HOST_WIDE_INT loc; 6813 unsigned HOST_WIDE_INT loc;
6860 6814
6861 loc = opint[1] & 7; 6815 loc = opint[1] & 7;
6862 if (BYTES_BIG_ENDIAN) 6816 loc *= BITS_PER_UNIT;
6863 loc ^= 7;
6864 loc *= 8;
6865 6817
6866 if (loc != 0) 6818 if (loc != 0)
6867 { 6819 {
6868 if (op_const & 1) 6820 if (op_const & 1)
6869 { 6821 {
6890 alpha_fold_builtin_insxx (tree op[], unsigned HOST_WIDE_INT opint[], 6842 alpha_fold_builtin_insxx (tree op[], unsigned HOST_WIDE_INT opint[],
6891 long op_const, unsigned HOST_WIDE_INT bytemask, 6843 long op_const, unsigned HOST_WIDE_INT bytemask,
6892 bool is_high) 6844 bool is_high)
6893 { 6845 {
6894 if ((op_const & 1) && opint[0] == 0) 6846 if ((op_const & 1) && opint[0] == 0)
6895 return build_int_cst (long_integer_type_node, 0); 6847 return build_int_cst (alpha_dimode_u, 0);
6896 6848
6897 if (op_const & 2) 6849 if (op_const & 2)
6898 { 6850 {
6899 unsigned HOST_WIDE_INT temp, loc, byteloc; 6851 unsigned HOST_WIDE_INT temp, loc, byteloc;
6900 tree *zap_op = NULL; 6852 tree *zap_op = NULL;
6901 6853
6902 loc = opint[1] & 7; 6854 loc = opint[1] & 7;
6903 if (BYTES_BIG_ENDIAN)
6904 loc ^= 7;
6905 bytemask <<= loc; 6855 bytemask <<= loc;
6906 6856
6907 temp = opint[0]; 6857 temp = opint[0];
6908 if (is_high) 6858 if (is_high)
6909 { 6859 {
6939 if (op_const & 2) 6889 if (op_const & 2)
6940 { 6890 {
6941 unsigned HOST_WIDE_INT loc; 6891 unsigned HOST_WIDE_INT loc;
6942 6892
6943 loc = opint[1] & 7; 6893 loc = opint[1] & 7;
6944 if (BYTES_BIG_ENDIAN)
6945 loc ^= 7;
6946 bytemask <<= loc; 6894 bytemask <<= loc;
6947 6895
6948 if (is_high) 6896 if (is_high)
6949 bytemask >>= 8; 6897 bytemask >>= 8;
6950 6898
6951 opint[1] = bytemask ^ 0xff; 6899 opint[1] = bytemask ^ 0xff;
6952 } 6900 }
6953 6901
6954 return alpha_fold_builtin_zapnot (op, opint, op_const); 6902 return alpha_fold_builtin_zapnot (op, opint, op_const);
6955 }
6956
6957 static tree
6958 alpha_fold_builtin_umulh (unsigned HOST_WIDE_INT opint[], long op_const)
6959 {
6960 switch (op_const)
6961 {
6962 case 3:
6963 {
6964 unsigned HOST_WIDE_INT l;
6965 HOST_WIDE_INT h;
6966
6967 mul_double (opint[0], 0, opint[1], 0, &l, &h);
6968
6969 #if HOST_BITS_PER_WIDE_INT > 64
6970 # error fixme
6971 #endif
6972
6973 return build_int_cst (long_integer_type_node, h);
6974 }
6975
6976 case 1:
6977 opint[1] = opint[0];
6978 /* FALLTHRU */
6979 case 2:
6980 /* Note that (X*1) >> 64 == 0. */
6981 if (opint[1] == 0 || opint[1] == 1)
6982 return build_int_cst (long_integer_type_node, 0);
6983 break;
6984 }
6985 return NULL;
6986 } 6903 }
6987 6904
6988 static tree 6905 static tree
6989 alpha_fold_vector_minmax (enum tree_code code, tree op[], tree vtype) 6906 alpha_fold_vector_minmax (enum tree_code code, tree op[], tree vtype)
6990 { 6907 {
6991 tree op0 = fold_convert (vtype, op[0]); 6908 tree op0 = fold_convert (vtype, op[0]);
6992 tree op1 = fold_convert (vtype, op[1]); 6909 tree op1 = fold_convert (vtype, op[1]);
6993 tree val = fold_build2 (code, vtype, op0, op1); 6910 tree val = fold_build2 (code, vtype, op0, op1);
6994 return fold_build1 (VIEW_CONVERT_EXPR, long_integer_type_node, val); 6911 return fold_build1 (VIEW_CONVERT_EXPR, alpha_dimode_u, val);
6995 } 6912 }
6996 6913
6997 static tree 6914 static tree
6998 alpha_fold_builtin_perr (unsigned HOST_WIDE_INT opint[], long op_const) 6915 alpha_fold_builtin_perr (unsigned HOST_WIDE_INT opint[], long op_const)
6999 { 6916 {
7011 temp += a - b; 6928 temp += a - b;
7012 else 6929 else
7013 temp += b - a; 6930 temp += b - a;
7014 } 6931 }
7015 6932
7016 return build_int_cst (long_integer_type_node, temp); 6933 return build_int_cst (alpha_dimode_u, temp);
7017 } 6934 }
7018 6935
7019 static tree 6936 static tree
7020 alpha_fold_builtin_pklb (unsigned HOST_WIDE_INT opint[], long op_const) 6937 alpha_fold_builtin_pklb (unsigned HOST_WIDE_INT opint[], long op_const)
7021 { 6938 {
7025 return NULL; 6942 return NULL;
7026 6943
7027 temp = opint[0] & 0xff; 6944 temp = opint[0] & 0xff;
7028 temp |= (opint[0] >> 24) & 0xff00; 6945 temp |= (opint[0] >> 24) & 0xff00;
7029 6946
7030 return build_int_cst (long_integer_type_node, temp); 6947 return build_int_cst (alpha_dimode_u, temp);
7031 } 6948 }
7032 6949
7033 static tree 6950 static tree
7034 alpha_fold_builtin_pkwb (unsigned HOST_WIDE_INT opint[], long op_const) 6951 alpha_fold_builtin_pkwb (unsigned HOST_WIDE_INT opint[], long op_const)
7035 { 6952 {
7041 temp = opint[0] & 0xff; 6958 temp = opint[0] & 0xff;
7042 temp |= (opint[0] >> 8) & 0xff00; 6959 temp |= (opint[0] >> 8) & 0xff00;
7043 temp |= (opint[0] >> 16) & 0xff0000; 6960 temp |= (opint[0] >> 16) & 0xff0000;
7044 temp |= (opint[0] >> 24) & 0xff000000; 6961 temp |= (opint[0] >> 24) & 0xff000000;
7045 6962
7046 return build_int_cst (long_integer_type_node, temp); 6963 return build_int_cst (alpha_dimode_u, temp);
7047 } 6964 }
7048 6965
7049 static tree 6966 static tree
7050 alpha_fold_builtin_unpkbl (unsigned HOST_WIDE_INT opint[], long op_const) 6967 alpha_fold_builtin_unpkbl (unsigned HOST_WIDE_INT opint[], long op_const)
7051 { 6968 {
7055 return NULL; 6972 return NULL;
7056 6973
7057 temp = opint[0] & 0xff; 6974 temp = opint[0] & 0xff;
7058 temp |= (opint[0] & 0xff00) << 24; 6975 temp |= (opint[0] & 0xff00) << 24;
7059 6976
7060 return build_int_cst (long_integer_type_node, temp); 6977 return build_int_cst (alpha_dimode_u, temp);
7061 } 6978 }
7062 6979
7063 static tree 6980 static tree
7064 alpha_fold_builtin_unpkbw (unsigned HOST_WIDE_INT opint[], long op_const) 6981 alpha_fold_builtin_unpkbw (unsigned HOST_WIDE_INT opint[], long op_const)
7065 { 6982 {
7071 temp = opint[0] & 0xff; 6988 temp = opint[0] & 0xff;
7072 temp |= (opint[0] & 0x0000ff00) << 8; 6989 temp |= (opint[0] & 0x0000ff00) << 8;
7073 temp |= (opint[0] & 0x00ff0000) << 16; 6990 temp |= (opint[0] & 0x00ff0000) << 16;
7074 temp |= (opint[0] & 0xff000000) << 24; 6991 temp |= (opint[0] & 0xff000000) << 24;
7075 6992
7076 return build_int_cst (long_integer_type_node, temp); 6993 return build_int_cst (alpha_dimode_u, temp);
7077 } 6994 }
7078 6995
7079 static tree 6996 static tree
7080 alpha_fold_builtin_cttz (unsigned HOST_WIDE_INT opint[], long op_const) 6997 alpha_fold_builtin_cttz (unsigned HOST_WIDE_INT opint[], long op_const)
7081 { 6998 {
7087 if (opint[0] == 0) 7004 if (opint[0] == 0)
7088 temp = 64; 7005 temp = 64;
7089 else 7006 else
7090 temp = exact_log2 (opint[0] & -opint[0]); 7007 temp = exact_log2 (opint[0] & -opint[0]);
7091 7008
7092 return build_int_cst (long_integer_type_node, temp); 7009 return build_int_cst (alpha_dimode_u, temp);
7093 } 7010 }
7094 7011
7095 static tree 7012 static tree
7096 alpha_fold_builtin_ctlz (unsigned HOST_WIDE_INT opint[], long op_const) 7013 alpha_fold_builtin_ctlz (unsigned HOST_WIDE_INT opint[], long op_const)
7097 { 7014 {
7103 if (opint[0] == 0) 7020 if (opint[0] == 0)
7104 temp = 64; 7021 temp = 64;
7105 else 7022 else
7106 temp = 64 - floor_log2 (opint[0]) - 1; 7023 temp = 64 - floor_log2 (opint[0]) - 1;
7107 7024
7108 return build_int_cst (long_integer_type_node, temp); 7025 return build_int_cst (alpha_dimode_u, temp);
7109 } 7026 }
7110 7027
7111 static tree 7028 static tree
7112 alpha_fold_builtin_ctpop (unsigned HOST_WIDE_INT opint[], long op_const) 7029 alpha_fold_builtin_ctpop (unsigned HOST_WIDE_INT opint[], long op_const)
7113 { 7030 {
7119 op = opint[0]; 7036 op = opint[0];
7120 temp = 0; 7037 temp = 0;
7121 while (op) 7038 while (op)
7122 temp++, op &= op - 1; 7039 temp++, op &= op - 1;
7123 7040
7124 return build_int_cst (long_integer_type_node, temp); 7041 return build_int_cst (alpha_dimode_u, temp);
7125 } 7042 }
7126 7043
7127 /* Fold one of our builtin functions. */ 7044 /* Fold one of our builtin functions. */
7128 7045
7129 static tree 7046 static tree
7132 { 7049 {
7133 unsigned HOST_WIDE_INT opint[MAX_ARGS]; 7050 unsigned HOST_WIDE_INT opint[MAX_ARGS];
7134 long op_const = 0; 7051 long op_const = 0;
7135 int i; 7052 int i;
7136 7053
7137 if (n_args >= MAX_ARGS) 7054 if (n_args > MAX_ARGS)
7138 return NULL; 7055 return NULL;
7139 7056
7140 for (i = 0; i < n_args; i++) 7057 for (i = 0; i < n_args; i++)
7141 { 7058 {
7142 tree arg = op[i]; 7059 tree arg = op[i];
7198 return alpha_fold_builtin_mskxx (op, opint, op_const, 0x03, true); 7115 return alpha_fold_builtin_mskxx (op, opint, op_const, 0x03, true);
7199 case ALPHA_BUILTIN_MSKLH: 7116 case ALPHA_BUILTIN_MSKLH:
7200 return alpha_fold_builtin_mskxx (op, opint, op_const, 0x0f, true); 7117 return alpha_fold_builtin_mskxx (op, opint, op_const, 0x0f, true);
7201 case ALPHA_BUILTIN_MSKQH: 7118 case ALPHA_BUILTIN_MSKQH:
7202 return alpha_fold_builtin_mskxx (op, opint, op_const, 0xff, true); 7119 return alpha_fold_builtin_mskxx (op, opint, op_const, 0xff, true);
7203
7204 case ALPHA_BUILTIN_UMULH:
7205 return alpha_fold_builtin_umulh (opint, op_const);
7206 7120
7207 case ALPHA_BUILTIN_ZAP: 7121 case ALPHA_BUILTIN_ZAP:
7208 opint[1] ^= 0xff; 7122 opint[1] ^= 0xff;
7209 /* FALLTHRU */ 7123 /* FALLTHRU */
7210 case ALPHA_BUILTIN_ZAPNOT: 7124 case ALPHA_BUILTIN_ZAPNOT:
7246 return alpha_fold_builtin_ctpop (opint, op_const); 7160 return alpha_fold_builtin_ctpop (opint, op_const);
7247 7161
7248 case ALPHA_BUILTIN_AMASK: 7162 case ALPHA_BUILTIN_AMASK:
7249 case ALPHA_BUILTIN_IMPLVER: 7163 case ALPHA_BUILTIN_IMPLVER:
7250 case ALPHA_BUILTIN_RPCC: 7164 case ALPHA_BUILTIN_RPCC:
7251 case ALPHA_BUILTIN_THREAD_POINTER:
7252 case ALPHA_BUILTIN_SET_THREAD_POINTER:
7253 /* None of these are foldable at compile-time. */ 7165 /* None of these are foldable at compile-time. */
7254 default: 7166 default:
7255 return NULL; 7167 return NULL;
7256 } 7168 }
7169 }
7170
7171 bool
7172 alpha_gimple_fold_builtin (gimple_stmt_iterator *gsi)
7173 {
7174 bool changed = false;
7175 gimple *stmt = gsi_stmt (*gsi);
7176 tree call = gimple_call_fn (stmt);
7177 gimple *new_stmt = NULL;
7178
7179 if (call)
7180 {
7181 tree fndecl = gimple_call_fndecl (stmt);
7182
7183 if (fndecl)
7184 {
7185 tree arg0, arg1;
7186
7187 switch (DECL_FUNCTION_CODE (fndecl))
7188 {
7189 case ALPHA_BUILTIN_UMULH:
7190 arg0 = gimple_call_arg (stmt, 0);
7191 arg1 = gimple_call_arg (stmt, 1);
7192
7193 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
7194 MULT_HIGHPART_EXPR, arg0, arg1);
7195 break;
7196 default:
7197 break;
7198 }
7199 }
7200 }
7201
7202 if (new_stmt)
7203 {
7204 gsi_replace (gsi, new_stmt, true);
7205 changed = true;
7206 }
7207
7208 return changed;
7257 } 7209 }
7258 7210
7259 /* This page contains routines that are used to determine what the function 7211 /* This page contains routines that are used to determine what the function
7260 prologue and epilogue code will do and write them out. */ 7212 prologue and epilogue code will do and write them out. */
7261 7213
7304 imask |= (1UL << HARD_FRAME_POINTER_REGNUM); 7256 imask |= (1UL << HARD_FRAME_POINTER_REGNUM);
7305 7257
7306 /* One for every register we have to save. */ 7258 /* One for every register we have to save. */
7307 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 7259 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
7308 if (! fixed_regs[i] && ! call_used_regs[i] 7260 if (! fixed_regs[i] && ! call_used_regs[i]
7309 && df_regs_ever_live_p (i) && i != REG_RA 7261 && df_regs_ever_live_p (i) && i != REG_RA)
7310 && (!TARGET_ABI_UNICOSMK || i != HARD_FRAME_POINTER_REGNUM))
7311 { 7262 {
7312 if (i < 32) 7263 if (i < 32)
7313 imask |= (1UL << i); 7264 imask |= (1UL << i);
7314 else 7265 else
7315 fmask |= (1UL << (i - 32)); 7266 fmask |= (1UL << (i - 32));
7344 int sa_size = 0; 7295 int sa_size = 0;
7345 int i, j; 7296 int i, j;
7346 7297
7347 alpha_sa_mask (&mask[0], &mask[1]); 7298 alpha_sa_mask (&mask[0], &mask[1]);
7348 7299
7349 if (TARGET_ABI_UNICOSMK) 7300 for (j = 0; j < 2; ++j)
7350 { 7301 for (i = 0; i < 32; ++i)
7351 if (mask[0] || mask[1]) 7302 if ((mask[j] >> i) & 1)
7352 sa_size = 14; 7303 sa_size++;
7353 } 7304
7354 else 7305 if (TARGET_ABI_OPEN_VMS)
7355 {
7356 for (j = 0; j < 2; ++j)
7357 for (i = 0; i < 32; ++i)
7358 if ((mask[j] >> i) & 1)
7359 sa_size++;
7360 }
7361
7362 if (TARGET_ABI_UNICOSMK)
7363 {
7364 /* We might not need to generate a frame if we don't make any calls
7365 (including calls to __T3E_MISMATCH if this is a vararg function),
7366 don't have any local variables which require stack slots, don't
7367 use alloca and have not determined that we need a frame for other
7368 reasons. */
7369
7370 alpha_procedure_type
7371 = (sa_size || get_frame_size() != 0
7372 || crtl->outgoing_args_size
7373 || cfun->stdarg || cfun->calls_alloca
7374 || frame_pointer_needed)
7375 ? PT_STACK : PT_REGISTER;
7376
7377 /* Always reserve space for saving callee-saved registers if we
7378 need a frame as required by the calling convention. */
7379 if (alpha_procedure_type == PT_STACK)
7380 sa_size = 14;
7381 }
7382 else if (TARGET_ABI_OPEN_VMS)
7383 { 7306 {
7384 /* Start with a stack procedure if we make any calls (REG_RA used), or 7307 /* Start with a stack procedure if we make any calls (REG_RA used), or
7385 need a frame pointer, with a register procedure if we otherwise need 7308 need a frame pointer, with a register procedure if we otherwise need
7386 at least a slot, and with a null procedure in other cases. */ 7309 at least a slot, and with a null procedure in other cases. */
7387 if ((mask[0] >> REG_RA) & 1 || frame_pointer_needed) 7310 if ((mask[0] >> REG_RA) & 1 || frame_pointer_needed)
7580 return NULL_TREE; 7503 return NULL_TREE;
7581 } 7504 }
7582 7505
7583 static const struct attribute_spec vms_attribute_table[] = 7506 static const struct attribute_spec vms_attribute_table[] =
7584 { 7507 {
7585 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */ 7508 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
7586 { COMMON_OBJECT, 0, 1, true, false, false, common_object_handler }, 7509 affects_type_identity } */
7587 { NULL, 0, 0, false, false, false, NULL } 7510 { COMMON_OBJECT, 0, 1, true, false, false, common_object_handler, false },
7511 { NULL, 0, 0, false, false, false, NULL, false }
7588 }; 7512 };
7589 7513
7590 void 7514 void
7591 vms_output_aligned_decl_common(FILE *file, tree decl, const char *name, 7515 vms_output_aligned_decl_common(FILE *file, tree decl, const char *name,
7592 unsigned HOST_WIDE_INT size, 7516 unsigned HOST_WIDE_INT size,
7610 7534
7611 #undef COMMON_OBJECT 7535 #undef COMMON_OBJECT
7612 7536
7613 #endif 7537 #endif
7614 7538
7615 static int 7539 bool
7616 find_lo_sum_using_gp (rtx *px, void *data ATTRIBUTE_UNUSED)
7617 {
7618 return GET_CODE (*px) == LO_SUM && XEXP (*px, 0) == pic_offset_table_rtx;
7619 }
7620
7621 int
7622 alpha_find_lo_sum_using_gp (rtx insn) 7540 alpha_find_lo_sum_using_gp (rtx insn)
7623 { 7541 {
7624 return for_each_rtx (&PATTERN (insn), find_lo_sum_using_gp, NULL) > 0; 7542 subrtx_iterator::array_type array;
7543 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
7544 {
7545 const_rtx x = *iter;
7546 if (GET_CODE (x) == LO_SUM && XEXP (x, 0) == pic_offset_table_rtx)
7547 return true;
7548 }
7549 return false;
7625 } 7550 }
7626 7551
7627 static int 7552 static int
7628 alpha_does_function_need_gp (void) 7553 alpha_does_function_need_gp (void)
7629 { 7554 {
7630 rtx insn; 7555 rtx_insn *insn;
7631 7556
7632 /* The GP being variable is an OSF abi thing. */ 7557 /* The GP being variable is an OSF abi thing. */
7633 if (! TARGET_ABI_OSF) 7558 if (! TARGET_ABI_OSF)
7634 return 0; 7559 return 0;
7635 7560
7656 insn = get_insns (); 7581 insn = get_insns ();
7657 pop_topmost_sequence (); 7582 pop_topmost_sequence ();
7658 7583
7659 for (; insn; insn = NEXT_INSN (insn)) 7584 for (; insn; insn = NEXT_INSN (insn))
7660 if (NONDEBUG_INSN_P (insn) 7585 if (NONDEBUG_INSN_P (insn)
7661 && ! JUMP_TABLE_DATA_P (insn)
7662 && GET_CODE (PATTERN (insn)) != USE 7586 && GET_CODE (PATTERN (insn)) != USE
7663 && GET_CODE (PATTERN (insn)) != CLOBBER 7587 && GET_CODE (PATTERN (insn)) != CLOBBER
7664 && get_attr_usegp (insn)) 7588 && get_attr_usegp (insn))
7665 return 1; 7589 return 1;
7666 7590
7669 7593
7670 7594
7671 /* Helper function to set RTX_FRAME_RELATED_P on instructions, including 7595 /* Helper function to set RTX_FRAME_RELATED_P on instructions, including
7672 sequences. */ 7596 sequences. */
7673 7597
7674 static rtx 7598 static rtx_insn *
7675 set_frame_related_p (void) 7599 set_frame_related_p (void)
7676 { 7600 {
7677 rtx seq = get_insns (); 7601 rtx_insn *seq = get_insns ();
7678 rtx insn; 7602 rtx_insn *insn;
7679 7603
7680 end_sequence (); 7604 end_sequence ();
7681 7605
7682 if (!seq) 7606 if (!seq)
7683 return NULL_RTX; 7607 return NULL;
7684 7608
7685 if (INSN_P (seq)) 7609 if (INSN_P (seq))
7686 { 7610 {
7687 insn = seq; 7611 insn = seq;
7688 while (insn != NULL_RTX) 7612 while (insn != NULL_RTX)
7710 7634
7711 static void 7635 static void
7712 emit_frame_store_1 (rtx value, rtx base_reg, HOST_WIDE_INT frame_bias, 7636 emit_frame_store_1 (rtx value, rtx base_reg, HOST_WIDE_INT frame_bias,
7713 HOST_WIDE_INT base_ofs, rtx frame_reg) 7637 HOST_WIDE_INT base_ofs, rtx frame_reg)
7714 { 7638 {
7715 rtx addr, mem, insn; 7639 rtx addr, mem;
7716 7640 rtx_insn *insn;
7717 addr = plus_constant (base_reg, base_ofs); 7641
7718 mem = gen_rtx_MEM (DImode, addr); 7642 addr = plus_constant (Pmode, base_reg, base_ofs);
7719 set_mem_alias_set (mem, alpha_sr_alias_set); 7643 mem = gen_frame_mem (DImode, addr);
7720 7644
7721 insn = emit_move_insn (mem, value); 7645 insn = emit_move_insn (mem, value);
7722 RTX_FRAME_RELATED_P (insn) = 1; 7646 RTX_FRAME_RELATED_P (insn) = 1;
7723 7647
7724 if (frame_bias || value != frame_reg) 7648 if (frame_bias || value != frame_reg)
7725 { 7649 {
7726 if (frame_bias) 7650 if (frame_bias)
7727 { 7651 {
7728 addr = plus_constant (stack_pointer_rtx, frame_bias + base_ofs); 7652 addr = plus_constant (Pmode, stack_pointer_rtx,
7653 frame_bias + base_ofs);
7729 mem = gen_rtx_MEM (DImode, addr); 7654 mem = gen_rtx_MEM (DImode, addr);
7730 } 7655 }
7731 7656
7732 add_reg_note (insn, REG_FRAME_RELATED_EXPR, 7657 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
7733 gen_rtx_SET (VOIDmode, mem, frame_reg)); 7658 gen_rtx_SET (mem, frame_reg));
7734 } 7659 }
7735 } 7660 }
7736 7661
7737 static void 7662 static void
7738 emit_frame_store (unsigned int regno, rtx base_reg, 7663 emit_frame_store (unsigned int regno, rtx base_reg,
7751 if (TARGET_ABI_OPEN_VMS) 7676 if (TARGET_ABI_OPEN_VMS)
7752 return ALPHA_ROUND (sa_size 7677 return ALPHA_ROUND (sa_size
7753 + (alpha_procedure_type == PT_STACK ? 8 : 0) 7678 + (alpha_procedure_type == PT_STACK ? 8 : 0)
7754 + size 7679 + size
7755 + crtl->args.pretend_args_size); 7680 + crtl->args.pretend_args_size);
7756 else if (TARGET_ABI_UNICOSMK)
7757 /* We have to allocate space for the DSIB if we generate a frame. */
7758 return ALPHA_ROUND (sa_size
7759 + (alpha_procedure_type == PT_STACK ? 48 : 0))
7760 + ALPHA_ROUND (size
7761 + crtl->outgoing_args_size);
7762 else 7681 else
7763 return ALPHA_ROUND (crtl->outgoing_args_size) 7682 return ALPHA_ROUND (crtl->outgoing_args_size)
7764 + sa_size 7683 + sa_size
7765 + ALPHA_ROUND (size 7684 + ALPHA_ROUND (size
7766 + crtl->args.pretend_args_size); 7685 + crtl->args.pretend_args_size);
7788 { 7707 {
7789 /* Registers to save. */ 7708 /* Registers to save. */
7790 unsigned long imask = 0; 7709 unsigned long imask = 0;
7791 unsigned long fmask = 0; 7710 unsigned long fmask = 0;
7792 /* Stack space needed for pushing registers clobbered by us. */ 7711 /* Stack space needed for pushing registers clobbered by us. */
7793 HOST_WIDE_INT sa_size; 7712 HOST_WIDE_INT sa_size, sa_bias;
7794 /* Complete stack size needed. */ 7713 /* Complete stack size needed. */
7795 HOST_WIDE_INT frame_size; 7714 HOST_WIDE_INT frame_size;
7796 /* Probed stack size; it additionally includes the size of 7715 /* Probed stack size; it additionally includes the size of
7797 the "reserve region" if any. */ 7716 the "reserve region" if any. */
7798 HOST_WIDE_INT probed_size; 7717 HOST_WIDE_INT probed_size;
7802 int i; 7721 int i;
7803 7722
7804 sa_size = alpha_sa_size (); 7723 sa_size = alpha_sa_size ();
7805 frame_size = compute_frame_size (get_frame_size (), sa_size); 7724 frame_size = compute_frame_size (get_frame_size (), sa_size);
7806 7725
7807 if (flag_stack_usage) 7726 if (flag_stack_usage_info)
7808 current_function_static_stack_size = frame_size; 7727 current_function_static_stack_size = frame_size;
7809 7728
7810 if (TARGET_ABI_OPEN_VMS) 7729 if (TARGET_ABI_OPEN_VMS)
7811 reg_offset = 8 + 8 * cfun->machine->uses_condition_handler; 7730 reg_offset = 8 + 8 * cfun->machine->uses_condition_handler;
7812 else 7731 else
7827 magically in response to -pg. Since _mcount has special linkage, 7746 magically in response to -pg. Since _mcount has special linkage,
7828 don't represent the call as a call. */ 7747 don't represent the call as a call. */
7829 if (TARGET_PROFILING_NEEDS_GP && crtl->profile) 7748 if (TARGET_PROFILING_NEEDS_GP && crtl->profile)
7830 emit_insn (gen_prologue_mcount ()); 7749 emit_insn (gen_prologue_mcount ());
7831 7750
7832 if (TARGET_ABI_UNICOSMK)
7833 unicosmk_gen_dsib (&imask);
7834
7835 /* Adjust the stack by the frame size. If the frame size is > 4096 7751 /* Adjust the stack by the frame size. If the frame size is > 4096
7836 bytes, we need to be sure we probe somewhere in the first and last 7752 bytes, we need to be sure we probe somewhere in the first and last
7837 4096 bytes (we can probably get away without the latter test) and 7753 4096 bytes (we can probably get away without the latter test) and
7838 every 8192 bytes in between. If the frame size is > 32768, we 7754 every 8192 bytes in between. If the frame size is > 32768, we
7839 do this in a loop. Otherwise, we generate the explicit probe 7755 do this in a loop. Otherwise, we generate the explicit probe
7840 instructions. 7756 instructions.
7841 7757
7842 Note that we are only allowed to adjust sp once in the prologue. */ 7758 Note that we are only allowed to adjust sp once in the prologue. */
7843 7759
7844 probed_size = frame_size; 7760 probed_size = frame_size;
7845 if (flag_stack_check) 7761 if (flag_stack_check || flag_stack_clash_protection)
7846 probed_size += STACK_CHECK_PROTECT; 7762 probed_size += get_stack_check_protect ();
7847 7763
7848 if (probed_size <= 32768) 7764 if (probed_size <= 32768)
7849 { 7765 {
7850 if (probed_size > 4096) 7766 if (probed_size > 4096)
7851 { 7767 {
7852 int probed; 7768 int probed;
7853 7769
7854 for (probed = 4096; probed < probed_size; probed += 8192) 7770 for (probed = 4096; probed < probed_size; probed += 8192)
7855 emit_insn (gen_probe_stack (GEN_INT (TARGET_ABI_UNICOSMK 7771 emit_insn (gen_probe_stack (GEN_INT (-probed)));
7856 ? -probed + 64
7857 : -probed)));
7858 7772
7859 /* We only have to do this probe if we aren't saving registers or 7773 /* We only have to do this probe if we aren't saving registers or
7860 if we are probing beyond the frame because of -fstack-check. */ 7774 if we are probing beyond the frame because of -fstack-check. */
7861 if ((sa_size == 0 && probed_size > probed - 4096) 7775 if ((sa_size == 0 && probed_size > probed - 4096)
7862 || flag_stack_check) 7776 || flag_stack_check || flag_stack_clash_protection)
7863 emit_insn (gen_probe_stack (GEN_INT (-probed_size))); 7777 emit_insn (gen_probe_stack (GEN_INT (-probed_size)));
7864 } 7778 }
7865 7779
7866 if (frame_size != 0) 7780 if (frame_size != 0)
7867 FRP (emit_insn (gen_adddi3 (stack_pointer_rtx, stack_pointer_rtx, 7781 FRP (emit_insn (gen_adddi3 (stack_pointer_rtx, stack_pointer_rtx,
7868 GEN_INT (TARGET_ABI_UNICOSMK 7782 GEN_INT (-frame_size))));
7869 ? -frame_size + 64
7870 : -frame_size))));
7871 } 7783 }
7872 else 7784 else
7873 { 7785 {
7874 /* Here we generate code to set R22 to SP + 4096 and set R23 to the 7786 /* Here we generate code to set R22 to SP + 4096 and set R23 to the
7875 number of 8192 byte blocks to probe. We then probe each block 7787 number of 8192 byte blocks to probe. We then probe each block
7883 rtx ptr = gen_rtx_REG (DImode, 22); 7795 rtx ptr = gen_rtx_REG (DImode, 22);
7884 rtx count = gen_rtx_REG (DImode, 23); 7796 rtx count = gen_rtx_REG (DImode, 23);
7885 rtx seq; 7797 rtx seq;
7886 7798
7887 emit_move_insn (count, GEN_INT (blocks)); 7799 emit_move_insn (count, GEN_INT (blocks));
7888 emit_insn (gen_adddi3 (ptr, stack_pointer_rtx, 7800 emit_insn (gen_adddi3 (ptr, stack_pointer_rtx, GEN_INT (4096)));
7889 GEN_INT (TARGET_ABI_UNICOSMK ? 4096 - 64 : 4096)));
7890 7801
7891 /* Because of the difficulty in emitting a new basic block this 7802 /* Because of the difficulty in emitting a new basic block this
7892 late in the compilation, generate the loop as a single insn. */ 7803 late in the compilation, generate the loop as a single insn. */
7893 emit_insn (gen_prologue_stack_probe_loop (count, ptr)); 7804 emit_insn (gen_prologue_stack_probe_loop (count, ptr));
7894 7805
7895 if ((leftover > 4096 && sa_size == 0) || flag_stack_check) 7806 if ((leftover > 4096 && sa_size == 0)
7896 { 7807 || flag_stack_check || flag_stack_clash_protection)
7897 rtx last = gen_rtx_MEM (DImode, plus_constant (ptr, -leftover)); 7808 {
7809 rtx last = gen_rtx_MEM (DImode,
7810 plus_constant (Pmode, ptr, -leftover));
7898 MEM_VOLATILE_P (last) = 1; 7811 MEM_VOLATILE_P (last) = 1;
7899 emit_move_insn (last, const0_rtx); 7812 emit_move_insn (last, const0_rtx);
7900 } 7813 }
7901 7814
7902 if (TARGET_ABI_WINDOWS_NT || flag_stack_check) 7815 if (flag_stack_check || flag_stack_clash_protection)
7903 { 7816 {
7904 /* For NT stack unwind (done by 'reverse execution'), it's 7817 /* If -fstack-check is specified we have to load the entire
7905 not OK to take the result of a loop, even though the value 7818 constant into a register and subtract from the sp in one go,
7906 is already in ptr, so we reload it via a single operation 7819 because the probed stack size is not equal to the frame size. */
7907 and subtract it to sp.
7908
7909 Same if -fstack-check is specified, because the probed stack
7910 size is not equal to the frame size.
7911
7912 Yes, that's correct -- we have to reload the whole constant
7913 into a temporary via ldah+lda then subtract from sp. */
7914
7915 HOST_WIDE_INT lo, hi; 7820 HOST_WIDE_INT lo, hi;
7916 lo = ((frame_size & 0xffff) ^ 0x8000) - 0x8000; 7821 lo = ((frame_size & 0xffff) ^ 0x8000) - 0x8000;
7917 hi = frame_size - lo; 7822 hi = frame_size - lo;
7918 7823
7919 emit_move_insn (ptr, GEN_INT (hi)); 7824 emit_move_insn (ptr, GEN_INT (hi));
7930 /* This alternative is special, because the DWARF code cannot 7835 /* This alternative is special, because the DWARF code cannot
7931 possibly intuit through the loop above. So we invent this 7836 possibly intuit through the loop above. So we invent this
7932 note it looks at instead. */ 7837 note it looks at instead. */
7933 RTX_FRAME_RELATED_P (seq) = 1; 7838 RTX_FRAME_RELATED_P (seq) = 1;
7934 add_reg_note (seq, REG_FRAME_RELATED_EXPR, 7839 add_reg_note (seq, REG_FRAME_RELATED_EXPR,
7935 gen_rtx_SET (VOIDmode, stack_pointer_rtx, 7840 gen_rtx_SET (stack_pointer_rtx,
7936 gen_rtx_PLUS (Pmode, stack_pointer_rtx, 7841 plus_constant (Pmode, stack_pointer_rtx,
7937 GEN_INT (TARGET_ABI_UNICOSMK 7842 -frame_size)));
7938 ? -frame_size + 64 7843 }
7939 : -frame_size)))); 7844
7940 } 7845 /* Cope with very large offsets to the register save area. */
7941 7846 sa_bias = 0;
7942 if (!TARGET_ABI_UNICOSMK) 7847 sa_reg = stack_pointer_rtx;
7943 { 7848 if (reg_offset + sa_size > 0x8000)
7944 HOST_WIDE_INT sa_bias = 0; 7849 {
7945 7850 int low = ((reg_offset & 0xffff) ^ 0x8000) - 0x8000;
7946 /* Cope with very large offsets to the register save area. */ 7851 rtx sa_bias_rtx;
7947 sa_reg = stack_pointer_rtx; 7852
7948 if (reg_offset + sa_size > 0x8000) 7853 if (low + sa_size <= 0x8000)
7949 { 7854 sa_bias = reg_offset - low, reg_offset = low;
7950 int low = ((reg_offset & 0xffff) ^ 0x8000) - 0x8000; 7855 else
7951 rtx sa_bias_rtx; 7856 sa_bias = reg_offset, reg_offset = 0;
7952 7857
7953 if (low + sa_size <= 0x8000) 7858 sa_reg = gen_rtx_REG (DImode, 24);
7954 sa_bias = reg_offset - low, reg_offset = low; 7859 sa_bias_rtx = GEN_INT (sa_bias);
7955 else 7860
7956 sa_bias = reg_offset, reg_offset = 0; 7861 if (add_operand (sa_bias_rtx, DImode))
7957 7862 emit_insn (gen_adddi3 (sa_reg, stack_pointer_rtx, sa_bias_rtx));
7958 sa_reg = gen_rtx_REG (DImode, 24); 7863 else
7959 sa_bias_rtx = GEN_INT (sa_bias); 7864 {
7960 7865 emit_move_insn (sa_reg, sa_bias_rtx);
7961 if (add_operand (sa_bias_rtx, DImode)) 7866 emit_insn (gen_adddi3 (sa_reg, stack_pointer_rtx, sa_reg));
7962 emit_insn (gen_adddi3 (sa_reg, stack_pointer_rtx, sa_bias_rtx)); 7867 }
7963 else 7868 }
7964 { 7869
7965 emit_move_insn (sa_reg, sa_bias_rtx); 7870 /* Save regs in stack order. Beginning with VMS PV. */
7966 emit_insn (gen_adddi3 (sa_reg, stack_pointer_rtx, sa_reg)); 7871 if (TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_STACK)
7967 } 7872 emit_frame_store (REG_PV, stack_pointer_rtx, 0, 0);
7968 } 7873
7969 7874 /* Save register RA next. */
7970 /* Save regs in stack order. Beginning with VMS PV. */ 7875 if (imask & (1UL << REG_RA))
7971 if (TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_STACK) 7876 {
7972 emit_frame_store (REG_PV, stack_pointer_rtx, 0, 0); 7877 emit_frame_store (REG_RA, sa_reg, sa_bias, reg_offset);
7973 7878 imask &= ~(1UL << REG_RA);
7974 /* Save register RA next. */ 7879 reg_offset += 8;
7975 if (imask & (1UL << REG_RA)) 7880 }
7976 { 7881
7977 emit_frame_store (REG_RA, sa_reg, sa_bias, reg_offset); 7882 /* Now save any other registers required to be saved. */
7978 imask &= ~(1UL << REG_RA); 7883 for (i = 0; i < 31; i++)
7979 reg_offset += 8; 7884 if (imask & (1UL << i))
7980 } 7885 {
7981 7886 emit_frame_store (i, sa_reg, sa_bias, reg_offset);
7982 /* Now save any other registers required to be saved. */ 7887 reg_offset += 8;
7983 for (i = 0; i < 31; i++) 7888 }
7984 if (imask & (1UL << i)) 7889
7985 { 7890 for (i = 0; i < 31; i++)
7986 emit_frame_store (i, sa_reg, sa_bias, reg_offset); 7891 if (fmask & (1UL << i))
7987 reg_offset += 8; 7892 {
7988 } 7893 emit_frame_store (i+32, sa_reg, sa_bias, reg_offset);
7989 7894 reg_offset += 8;
7990 for (i = 0; i < 31; i++) 7895 }
7991 if (fmask & (1UL << i))
7992 {
7993 emit_frame_store (i+32, sa_reg, sa_bias, reg_offset);
7994 reg_offset += 8;
7995 }
7996 }
7997 else if (TARGET_ABI_UNICOSMK && alpha_procedure_type == PT_STACK)
7998 {
7999 /* The standard frame on the T3E includes space for saving registers.
8000 We just have to use it. We don't have to save the return address and
8001 the old frame pointer here - they are saved in the DSIB. */
8002
8003 reg_offset = -56;
8004 for (i = 9; i < 15; i++)
8005 if (imask & (1UL << i))
8006 {
8007 emit_frame_store (i, hard_frame_pointer_rtx, 0, reg_offset);
8008 reg_offset -= 8;
8009 }
8010 for (i = 2; i < 10; i++)
8011 if (fmask & (1UL << i))
8012 {
8013 emit_frame_store (i+32, hard_frame_pointer_rtx, 0, reg_offset);
8014 reg_offset -= 8;
8015 }
8016 }
8017 7896
8018 if (TARGET_ABI_OPEN_VMS) 7897 if (TARGET_ABI_OPEN_VMS)
8019 { 7898 {
8020 /* Register frame procedures save the fp. */ 7899 /* Register frame procedures save the fp. */
8021 if (alpha_procedure_type == PT_REGISTER) 7900 if (alpha_procedure_type == PT_REGISTER)
8022 { 7901 {
8023 rtx insn = emit_move_insn (gen_rtx_REG (DImode, vms_save_fp_regno), 7902 rtx_insn *insn =
8024 hard_frame_pointer_rtx); 7903 emit_move_insn (gen_rtx_REG (DImode, vms_save_fp_regno),
7904 hard_frame_pointer_rtx);
8025 add_reg_note (insn, REG_CFA_REGISTER, NULL); 7905 add_reg_note (insn, REG_CFA_REGISTER, NULL);
8026 RTX_FRAME_RELATED_P (insn) = 1; 7906 RTX_FRAME_RELATED_P (insn) = 1;
8027 } 7907 }
8028 7908
8029 if (alpha_procedure_type != PT_NULL && vms_base_regno != REG_PV) 7909 if (alpha_procedure_type != PT_NULL && vms_base_regno != REG_PV)
8035 FRP (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx)); 7915 FRP (emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx));
8036 7916
8037 /* If we have to allocate space for outgoing args, do it now. */ 7917 /* If we have to allocate space for outgoing args, do it now. */
8038 if (crtl->outgoing_args_size != 0) 7918 if (crtl->outgoing_args_size != 0)
8039 { 7919 {
8040 rtx seq 7920 rtx_insn *seq
8041 = emit_move_insn (stack_pointer_rtx, 7921 = emit_move_insn (stack_pointer_rtx,
8042 plus_constant 7922 plus_constant
8043 (hard_frame_pointer_rtx, 7923 (Pmode, hard_frame_pointer_rtx,
8044 - (ALPHA_ROUND 7924 - (ALPHA_ROUND
8045 (crtl->outgoing_args_size)))); 7925 (crtl->outgoing_args_size))));
8046 7926
8047 /* Only set FRAME_RELATED_P on the stack adjustment we just emitted 7927 /* Only set FRAME_RELATED_P on the stack adjustment we just emitted
8048 if ! frame_pointer_needed. Setting the bit will change the CFA 7928 if ! frame_pointer_needed. Setting the bit will change the CFA
8060 so when we are not setting the bit here, we are guaranteed to 7940 so when we are not setting the bit here, we are guaranteed to
8061 have emitted an FRP frame pointer update just before. */ 7941 have emitted an FRP frame pointer update just before. */
8062 RTX_FRAME_RELATED_P (seq) = ! frame_pointer_needed; 7942 RTX_FRAME_RELATED_P (seq) = ! frame_pointer_needed;
8063 } 7943 }
8064 } 7944 }
8065 else if (!TARGET_ABI_UNICOSMK) 7945 else
8066 { 7946 {
8067 /* If we need a frame pointer, set it from the stack pointer. */ 7947 /* If we need a frame pointer, set it from the stack pointer. */
8068 if (frame_pointer_needed) 7948 if (frame_pointer_needed)
8069 { 7949 {
8070 if (TARGET_CAN_FAULT_IN_PROLOGUE) 7950 if (TARGET_CAN_FAULT_IN_PROLOGUE)
8104 unsigned long fmask = 0; 7984 unsigned long fmask = 0;
8105 /* Stack space needed for pushing registers clobbered by us. */ 7985 /* Stack space needed for pushing registers clobbered by us. */
8106 HOST_WIDE_INT sa_size; 7986 HOST_WIDE_INT sa_size;
8107 /* Complete stack size needed. */ 7987 /* Complete stack size needed. */
8108 unsigned HOST_WIDE_INT frame_size; 7988 unsigned HOST_WIDE_INT frame_size;
8109 /* The maximum debuggable frame size (512 Kbytes using Tru64 as). */ 7989 /* The maximum debuggable frame size. */
8110 unsigned HOST_WIDE_INT max_frame_size = TARGET_ABI_OSF && !TARGET_GAS 7990 unsigned HOST_WIDE_INT max_frame_size = 1UL << 31;
8111 ? 524288
8112 : 1UL << 31;
8113 /* Offset from base reg to register save area. */ 7991 /* Offset from base reg to register save area. */
8114 HOST_WIDE_INT reg_offset; 7992 HOST_WIDE_INT reg_offset;
8115 char *entry_label = (char *) alloca (strlen (fnname) + 6); 7993 char *entry_label = (char *) alloca (strlen (fnname) + 6);
8116 char *tramp_label = (char *) alloca (strlen (fnname) + 6); 7994 char *tramp_label = (char *) alloca (strlen (fnname) + 6);
8117 int i; 7995 int i;
8118 7996
8119 /* Don't emit an extern directive for functions defined in the same file. */
8120 if (TARGET_ABI_UNICOSMK)
8121 {
8122 tree name_tree;
8123 name_tree = get_identifier (fnname);
8124 TREE_ASM_WRITTEN (name_tree) = 1;
8125 }
8126
8127 #if TARGET_ABI_OPEN_VMS 7997 #if TARGET_ABI_OPEN_VMS
8128 if (vms_debug_main 7998 vms_start_function (fnname);
8129 && strncmp (vms_debug_main, fnname, strlen (vms_debug_main)) == 0)
8130 {
8131 targetm.asm_out.globalize_label (asm_out_file, VMS_DEBUG_MAIN_POINTER);
8132 ASM_OUTPUT_DEF (asm_out_file, VMS_DEBUG_MAIN_POINTER, fnname);
8133 switch_to_section (text_section);
8134 vms_debug_main = NULL;
8135 }
8136 #endif 7999 #endif
8137 8000
8138 alpha_fnname = fnname; 8001 alpha_fnname = fnname;
8139 sa_size = alpha_sa_size (); 8002 sa_size = alpha_sa_size ();
8140 frame_size = compute_frame_size (get_frame_size (), sa_size); 8003 frame_size = compute_frame_size (get_frame_size (), sa_size);
8144 else 8007 else
8145 reg_offset = ALPHA_ROUND (crtl->outgoing_args_size); 8008 reg_offset = ALPHA_ROUND (crtl->outgoing_args_size);
8146 8009
8147 alpha_sa_mask (&imask, &fmask); 8010 alpha_sa_mask (&imask, &fmask);
8148 8011
8149 /* Ecoff can handle multiple .file directives, so put out file and lineno.
8150 We have to do that before the .ent directive as we cannot switch
8151 files within procedures with native ecoff because line numbers are
8152 linked to procedure descriptors.
8153 Outputting the lineno helps debugging of one line functions as they
8154 would otherwise get no line number at all. Please note that we would
8155 like to put out last_linenum from final.c, but it is not accessible. */
8156
8157 if (write_symbols == SDB_DEBUG)
8158 {
8159 #ifdef ASM_OUTPUT_SOURCE_FILENAME
8160 ASM_OUTPUT_SOURCE_FILENAME (file,
8161 DECL_SOURCE_FILE (current_function_decl));
8162 #endif
8163 #ifdef SDB_OUTPUT_SOURCE_LINE
8164 if (debug_info_level != DINFO_LEVEL_TERSE)
8165 SDB_OUTPUT_SOURCE_LINE (file,
8166 DECL_SOURCE_LINE (current_function_decl));
8167 #endif
8168 }
8169
8170 /* Issue function start and label. */ 8012 /* Issue function start and label. */
8171 if (TARGET_ABI_OPEN_VMS 8013 if (TARGET_ABI_OPEN_VMS || !flag_inhibit_size_directive)
8172 || (!TARGET_ABI_UNICOSMK && !flag_inhibit_size_directive))
8173 { 8014 {
8174 fputs ("\t.ent ", file); 8015 fputs ("\t.ent ", file);
8175 assemble_name (file, fnname); 8016 assemble_name (file, fnname);
8176 putc ('\n', file); 8017 putc ('\n', file);
8177 8018
8190 get a special transfer entry point that loads the called functions 8031 get a special transfer entry point that loads the called functions
8191 procedure descriptor and static chain. */ 8032 procedure descriptor and static chain. */
8192 if (TARGET_ABI_OPEN_VMS 8033 if (TARGET_ABI_OPEN_VMS
8193 && !TREE_PUBLIC (decl) 8034 && !TREE_PUBLIC (decl)
8194 && DECL_CONTEXT (decl) 8035 && DECL_CONTEXT (decl)
8195 && !TYPE_P (DECL_CONTEXT (decl))) 8036 && !TYPE_P (DECL_CONTEXT (decl))
8037 && TREE_CODE (DECL_CONTEXT (decl)) != TRANSLATION_UNIT_DECL)
8196 { 8038 {
8197 strcpy (tramp_label, fnname); 8039 strcpy (tramp_label, fnname);
8198 strcat (tramp_label, "..tr"); 8040 strcat (tramp_label, "..tr");
8199 ASM_OUTPUT_LABEL (file, tramp_label); 8041 ASM_OUTPUT_LABEL (file, tramp_label);
8200 fprintf (file, "\tldq $1,24($27)\n"); 8042 fprintf (file, "\tldq $1,24($27)\n");
8203 8045
8204 strcpy (entry_label, fnname); 8046 strcpy (entry_label, fnname);
8205 if (TARGET_ABI_OPEN_VMS) 8047 if (TARGET_ABI_OPEN_VMS)
8206 strcat (entry_label, "..en"); 8048 strcat (entry_label, "..en");
8207 8049
8208 /* For public functions, the label must be globalized by appending an
8209 additional colon. */
8210 if (TARGET_ABI_UNICOSMK && TREE_PUBLIC (decl))
8211 strcat (entry_label, ":");
8212
8213 ASM_OUTPUT_LABEL (file, entry_label); 8050 ASM_OUTPUT_LABEL (file, entry_label);
8214 inside_function = TRUE; 8051 inside_function = TRUE;
8215 8052
8216 if (TARGET_ABI_OPEN_VMS) 8053 if (TARGET_ABI_OPEN_VMS)
8217 fprintf (file, "\t.base $%d\n", vms_base_regno); 8054 fprintf (file, "\t.base $%d\n", vms_base_regno);
8218 8055
8219 if (!TARGET_ABI_OPEN_VMS && !TARGET_ABI_UNICOSMK && TARGET_IEEE_CONFORMANT 8056 if (TARGET_ABI_OSF
8057 && TARGET_IEEE_CONFORMANT
8220 && !flag_inhibit_size_directive) 8058 && !flag_inhibit_size_directive)
8221 { 8059 {
8222 /* Set flags in procedure descriptor to request IEEE-conformant 8060 /* Set flags in procedure descriptor to request IEEE-conformant
8223 math-library routines. The value we set it to is PDSC_EXC_IEEE 8061 math-library routines. The value we set it to is PDSC_EXC_IEEE
8224 (/usr/include/pdsc.h). */ 8062 (/usr/include/pdsc.h). */
8230 alpha_arg_offset = -frame_size + 48; 8068 alpha_arg_offset = -frame_size + 48;
8231 8069
8232 /* Describe our frame. If the frame size is larger than an integer, 8070 /* Describe our frame. If the frame size is larger than an integer,
8233 print it as zero to avoid an assembler error. We won't be 8071 print it as zero to avoid an assembler error. We won't be
8234 properly describing such a frame, but that's the best we can do. */ 8072 properly describing such a frame, but that's the best we can do. */
8235 if (TARGET_ABI_UNICOSMK) 8073 if (TARGET_ABI_OPEN_VMS)
8236 ;
8237 else if (TARGET_ABI_OPEN_VMS)
8238 fprintf (file, "\t.frame $%d," HOST_WIDE_INT_PRINT_DEC ",$26," 8074 fprintf (file, "\t.frame $%d," HOST_WIDE_INT_PRINT_DEC ",$26,"
8239 HOST_WIDE_INT_PRINT_DEC "\n", 8075 HOST_WIDE_INT_PRINT_DEC "\n",
8240 vms_unwind_regno, 8076 vms_unwind_regno,
8241 frame_size >= (1UL << 31) ? 0 : frame_size, 8077 frame_size >= (1UL << 31) ? 0 : frame_size,
8242 reg_offset); 8078 reg_offset);
8246 ? HARD_FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM), 8082 ? HARD_FRAME_POINTER_REGNUM : STACK_POINTER_REGNUM),
8247 frame_size >= max_frame_size ? 0 : frame_size, 8083 frame_size >= max_frame_size ? 0 : frame_size,
8248 crtl->args.pretend_args_size); 8084 crtl->args.pretend_args_size);
8249 8085
8250 /* Describe which registers were spilled. */ 8086 /* Describe which registers were spilled. */
8251 if (TARGET_ABI_UNICOSMK) 8087 if (TARGET_ABI_OPEN_VMS)
8252 ;
8253 else if (TARGET_ABI_OPEN_VMS)
8254 { 8088 {
8255 if (imask) 8089 if (imask)
8256 /* ??? Does VMS care if mask contains ra? The old code didn't 8090 /* ??? Does VMS care if mask contains ra? The old code didn't
8257 set it, so I don't here. */ 8091 set it, so I don't here. */
8258 fprintf (file, "\t.mask 0x%lx,0\n", imask & ~(1UL << REG_RA)); 8092 fprintf (file, "\t.mask 0x%lx,0\n", imask & ~(1UL << REG_RA));
8287 { 8121 {
8288 fprintf (file, "\t.handler __gcc_shell_handler\n"); 8122 fprintf (file, "\t.handler __gcc_shell_handler\n");
8289 fprintf (file, "\t.handler_data %d\n", VMS_COND_HANDLER_FP_OFFSET); 8123 fprintf (file, "\t.handler_data %d\n", VMS_COND_HANDLER_FP_OFFSET);
8290 } 8124 }
8291 8125
8292 /* Ifdef'ed cause link_section are only available then. */ 8126 #ifdef TARGET_VMS_CRASH_DEBUG
8127 /* Support of minimal traceback info. */
8293 switch_to_section (readonly_data_section); 8128 switch_to_section (readonly_data_section);
8294 fprintf (file, "\t.align 3\n"); 8129 fprintf (file, "\t.align 3\n");
8295 assemble_name (file, fnname); fputs ("..na:\n", file); 8130 assemble_name (file, fnname); fputs ("..na:\n", file);
8296 fputs ("\t.ascii \"", file); 8131 fputs ("\t.ascii \"", file);
8297 assemble_name (file, fnname); 8132 assemble_name (file, fnname);
8298 fputs ("\\0\"\n", file); 8133 fputs ("\\0\"\n", file);
8299 alpha_need_linkage (fnname, 1);
8300 switch_to_section (text_section); 8134 switch_to_section (text_section);
8301 #endif 8135 #endif
8136 #endif /* TARGET_ABI_OPEN_VMS */
8302 } 8137 }
8303 8138
8304 /* Emit the .prologue note at the scheduled end of the prologue. */ 8139 /* Emit the .prologue note at the scheduled end of the prologue. */
8305 8140
8306 static void 8141 static void
8307 alpha_output_function_end_prologue (FILE *file) 8142 alpha_output_function_end_prologue (FILE *file)
8308 { 8143 {
8309 if (TARGET_ABI_UNICOSMK) 8144 if (TARGET_ABI_OPEN_VMS)
8310 ;
8311 else if (TARGET_ABI_OPEN_VMS)
8312 fputs ("\t.prologue\n", file); 8145 fputs ("\t.prologue\n", file);
8313 else if (TARGET_ABI_WINDOWS_NT)
8314 fputs ("\t.prologue 0\n", file);
8315 else if (!flag_inhibit_size_directive) 8146 else if (!flag_inhibit_size_directive)
8316 fprintf (file, "\t.prologue %d\n", 8147 fprintf (file, "\t.prologue %d\n",
8317 alpha_function_needs_gp || cfun->is_thunk); 8148 alpha_function_needs_gp || cfun->is_thunk);
8318 } 8149 }
8319 8150
8352 reg_offset = ALPHA_ROUND (crtl->outgoing_args_size); 8183 reg_offset = ALPHA_ROUND (crtl->outgoing_args_size);
8353 8184
8354 alpha_sa_mask (&imask, &fmask); 8185 alpha_sa_mask (&imask, &fmask);
8355 8186
8356 fp_is_frame_pointer 8187 fp_is_frame_pointer
8357 = ((TARGET_ABI_OPEN_VMS && alpha_procedure_type == PT_STACK) 8188 = (TARGET_ABI_OPEN_VMS
8358 || (!TARGET_ABI_OPEN_VMS && frame_pointer_needed)); 8189 ? alpha_procedure_type == PT_STACK
8190 : frame_pointer_needed);
8359 fp_offset = 0; 8191 fp_offset = 0;
8360 sa_reg = stack_pointer_rtx; 8192 sa_reg = stack_pointer_rtx;
8361 8193
8362 if (crtl->calls_eh_return) 8194 if (crtl->calls_eh_return)
8363 eh_ofs = EH_RETURN_STACKADJ_RTX; 8195 eh_ofs = EH_RETURN_STACKADJ_RTX;
8364 else 8196 else
8365 eh_ofs = NULL_RTX; 8197 eh_ofs = NULL_RTX;
8366 8198
8367 if (!TARGET_ABI_UNICOSMK && sa_size) 8199 if (sa_size)
8368 { 8200 {
8369 /* If we have a frame pointer, restore SP from it. */ 8201 /* If we have a frame pointer, restore SP from it. */
8370 if ((TARGET_ABI_OPEN_VMS 8202 if (TARGET_ABI_OPEN_VMS
8371 && vms_unwind_regno == HARD_FRAME_POINTER_REGNUM) 8203 ? vms_unwind_regno == HARD_FRAME_POINTER_REGNUM
8372 || (!TARGET_ABI_OPEN_VMS && frame_pointer_needed)) 8204 : frame_pointer_needed)
8373 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx); 8205 emit_move_insn (stack_pointer_rtx, hard_frame_pointer_rtx);
8374 8206
8375 /* Cope with very large offsets to the register save area. */ 8207 /* Cope with very large offsets to the register save area. */
8376 if (reg_offset + sa_size > 0x8000) 8208 if (reg_offset + sa_size > 0x8000)
8377 { 8209 {
8382 bias = reg_offset - low, reg_offset = low; 8214 bias = reg_offset - low, reg_offset = low;
8383 else 8215 else
8384 bias = reg_offset, reg_offset = 0; 8216 bias = reg_offset, reg_offset = 0;
8385 8217
8386 sa_reg = gen_rtx_REG (DImode, 22); 8218 sa_reg = gen_rtx_REG (DImode, 22);
8387 sa_reg_exp = plus_constant (stack_pointer_rtx, bias); 8219 sa_reg_exp = plus_constant (Pmode, stack_pointer_rtx, bias);
8388 8220
8389 emit_move_insn (sa_reg, sa_reg_exp); 8221 emit_move_insn (sa_reg, sa_reg_exp);
8390 } 8222 }
8391 8223
8392 /* Restore registers in order, excepting a true frame pointer. */ 8224 /* Restore registers in order, excepting a true frame pointer. */
8393 8225
8394 mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, reg_offset)); 8226 mem = gen_frame_mem (DImode, plus_constant (Pmode, sa_reg, reg_offset));
8395 if (! eh_ofs)
8396 set_mem_alias_set (mem, alpha_sr_alias_set);
8397 reg = gen_rtx_REG (DImode, REG_RA); 8227 reg = gen_rtx_REG (DImode, REG_RA);
8398 emit_move_insn (reg, mem); 8228 emit_move_insn (reg, mem);
8399 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg, cfa_restores); 8229 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg, cfa_restores);
8400 8230
8401 reg_offset += 8; 8231 reg_offset += 8;
8406 { 8236 {
8407 if (i == HARD_FRAME_POINTER_REGNUM && fp_is_frame_pointer) 8237 if (i == HARD_FRAME_POINTER_REGNUM && fp_is_frame_pointer)
8408 fp_offset = reg_offset; 8238 fp_offset = reg_offset;
8409 else 8239 else
8410 { 8240 {
8411 mem = gen_rtx_MEM (DImode, plus_constant(sa_reg, reg_offset)); 8241 mem = gen_frame_mem (DImode,
8412 set_mem_alias_set (mem, alpha_sr_alias_set); 8242 plus_constant (Pmode, sa_reg,
8243 reg_offset));
8413 reg = gen_rtx_REG (DImode, i); 8244 reg = gen_rtx_REG (DImode, i);
8414 emit_move_insn (reg, mem); 8245 emit_move_insn (reg, mem);
8415 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg, 8246 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg,
8416 cfa_restores); 8247 cfa_restores);
8417 } 8248 }
8419 } 8250 }
8420 8251
8421 for (i = 0; i < 31; ++i) 8252 for (i = 0; i < 31; ++i)
8422 if (fmask & (1UL << i)) 8253 if (fmask & (1UL << i))
8423 { 8254 {
8424 mem = gen_rtx_MEM (DFmode, plus_constant(sa_reg, reg_offset)); 8255 mem = gen_frame_mem (DFmode, plus_constant (Pmode, sa_reg,
8425 set_mem_alias_set (mem, alpha_sr_alias_set); 8256 reg_offset));
8426 reg = gen_rtx_REG (DFmode, i+32); 8257 reg = gen_rtx_REG (DFmode, i+32);
8427 emit_move_insn (reg, mem); 8258 emit_move_insn (reg, mem);
8428 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg, cfa_restores); 8259 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg, cfa_restores);
8429 reg_offset += 8; 8260 reg_offset += 8;
8430 } 8261 }
8431 } 8262 }
8432 else if (TARGET_ABI_UNICOSMK && alpha_procedure_type == PT_STACK)
8433 {
8434 /* Restore callee-saved general-purpose registers. */
8435
8436 reg_offset = -56;
8437
8438 for (i = 9; i < 15; i++)
8439 if (imask & (1UL << i))
8440 {
8441 mem = gen_rtx_MEM (DImode, plus_constant(hard_frame_pointer_rtx,
8442 reg_offset));
8443 set_mem_alias_set (mem, alpha_sr_alias_set);
8444 reg = gen_rtx_REG (DImode, i);
8445 emit_move_insn (reg, mem);
8446 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg, cfa_restores);
8447 reg_offset -= 8;
8448 }
8449
8450 for (i = 2; i < 10; i++)
8451 if (fmask & (1UL << i))
8452 {
8453 mem = gen_rtx_MEM (DFmode, plus_constant(hard_frame_pointer_rtx,
8454 reg_offset));
8455 set_mem_alias_set (mem, alpha_sr_alias_set);
8456 reg = gen_rtx_REG (DFmode, i+32);
8457 emit_move_insn (reg, mem);
8458 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg, cfa_restores);
8459 reg_offset -= 8;
8460 }
8461
8462 /* Restore the return address from the DSIB. */
8463 mem = gen_rtx_MEM (DImode, plus_constant (hard_frame_pointer_rtx, -8));
8464 set_mem_alias_set (mem, alpha_sr_alias_set);
8465 reg = gen_rtx_REG (DImode, REG_RA);
8466 emit_move_insn (reg, mem);
8467 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, reg, cfa_restores);
8468 }
8469 8263
8470 if (frame_size || eh_ofs) 8264 if (frame_size || eh_ofs)
8471 { 8265 {
8472 sp_adj1 = stack_pointer_rtx; 8266 sp_adj1 = stack_pointer_rtx;
8473 8267
8479 } 8273 }
8480 8274
8481 /* If the stack size is large, begin computation into a temporary 8275 /* If the stack size is large, begin computation into a temporary
8482 register so as not to interfere with a potential fp restore, 8276 register so as not to interfere with a potential fp restore,
8483 which must be consecutive with an SP restore. */ 8277 which must be consecutive with an SP restore. */
8484 if (frame_size < 32768 8278 if (frame_size < 32768 && !cfun->calls_alloca)
8485 && ! (TARGET_ABI_UNICOSMK && cfun->calls_alloca))
8486 sp_adj2 = GEN_INT (frame_size); 8279 sp_adj2 = GEN_INT (frame_size);
8487 else if (TARGET_ABI_UNICOSMK)
8488 {
8489 sp_adj1 = gen_rtx_REG (DImode, 23);
8490 emit_move_insn (sp_adj1, hard_frame_pointer_rtx);
8491 sp_adj2 = const0_rtx;
8492 }
8493 else if (frame_size < 0x40007fffL) 8280 else if (frame_size < 0x40007fffL)
8494 { 8281 {
8495 int low = ((frame_size & 0xffff) ^ 0x8000) - 0x8000; 8282 int low = ((frame_size & 0xffff) ^ 0x8000) - 0x8000;
8496 8283
8497 sp_adj2 = plus_constant (sp_adj1, frame_size - low); 8284 sp_adj2 = plus_constant (Pmode, sp_adj1, frame_size - low);
8498 if (sa_reg_exp && rtx_equal_p (sa_reg_exp, sp_adj2)) 8285 if (sa_reg_exp && rtx_equal_p (sa_reg_exp, sp_adj2))
8499 sp_adj1 = sa_reg; 8286 sp_adj1 = sa_reg;
8500 else 8287 else
8501 { 8288 {
8502 sp_adj1 = gen_rtx_REG (DImode, 23); 8289 sp_adj1 = gen_rtx_REG (DImode, 23);
8510 sp_adj2 = alpha_emit_set_const (tmp, DImode, frame_size, 3, false); 8297 sp_adj2 = alpha_emit_set_const (tmp, DImode, frame_size, 3, false);
8511 if (!sp_adj2) 8298 if (!sp_adj2)
8512 { 8299 {
8513 /* We can't drop new things to memory this late, afaik, 8300 /* We can't drop new things to memory this late, afaik,
8514 so build it up by pieces. */ 8301 so build it up by pieces. */
8515 sp_adj2 = alpha_emit_set_long_const (tmp, frame_size, 8302 sp_adj2 = alpha_emit_set_long_const (tmp, frame_size);
8516 -(frame_size < 0));
8517 gcc_assert (sp_adj2); 8303 gcc_assert (sp_adj2);
8518 } 8304 }
8519 } 8305 }
8520 8306
8521 /* From now on, things must be in order. So emit blockages. */ 8307 /* From now on, things must be in order. So emit blockages. */
8522 8308
8523 /* Restore the frame pointer. */ 8309 /* Restore the frame pointer. */
8524 if (TARGET_ABI_UNICOSMK) 8310 if (fp_is_frame_pointer)
8525 { 8311 {
8526 emit_insn (gen_blockage ()); 8312 emit_insn (gen_blockage ());
8527 mem = gen_rtx_MEM (DImode, 8313 mem = gen_frame_mem (DImode, plus_constant (Pmode, sa_reg,
8528 plus_constant (hard_frame_pointer_rtx, -16)); 8314 fp_offset));
8529 set_mem_alias_set (mem, alpha_sr_alias_set);
8530 emit_move_insn (hard_frame_pointer_rtx, mem);
8531 cfa_restores = alloc_reg_note (REG_CFA_RESTORE,
8532 hard_frame_pointer_rtx, cfa_restores);
8533 }
8534 else if (fp_is_frame_pointer)
8535 {
8536 emit_insn (gen_blockage ());
8537 mem = gen_rtx_MEM (DImode, plus_constant (sa_reg, fp_offset));
8538 set_mem_alias_set (mem, alpha_sr_alias_set);
8539 emit_move_insn (hard_frame_pointer_rtx, mem); 8315 emit_move_insn (hard_frame_pointer_rtx, mem);
8540 cfa_restores = alloc_reg_note (REG_CFA_RESTORE, 8316 cfa_restores = alloc_reg_note (REG_CFA_RESTORE,
8541 hard_frame_pointer_rtx, cfa_restores); 8317 hard_frame_pointer_rtx, cfa_restores);
8542 } 8318 }
8543 else if (TARGET_ABI_OPEN_VMS) 8319 else if (TARGET_ABI_OPEN_VMS)
8570 insn = emit_move_insn (hard_frame_pointer_rtx, 8346 insn = emit_move_insn (hard_frame_pointer_rtx,
8571 gen_rtx_REG (DImode, vms_save_fp_regno)); 8347 gen_rtx_REG (DImode, vms_save_fp_regno));
8572 add_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx); 8348 add_reg_note (insn, REG_CFA_RESTORE, hard_frame_pointer_rtx);
8573 RTX_FRAME_RELATED_P (insn) = 1; 8349 RTX_FRAME_RELATED_P (insn) = 1;
8574 } 8350 }
8575 else if (TARGET_ABI_UNICOSMK && alpha_procedure_type != PT_STACK)
8576 {
8577 /* Decrement the frame pointer if the function does not have a
8578 frame. */
8579 emit_insn (gen_blockage ());
8580 emit_insn (gen_adddi3 (hard_frame_pointer_rtx,
8581 hard_frame_pointer_rtx, constm1_rtx));
8582 }
8583 } 8351 }
8584 } 8352 }
8585 8353
8586 /* Output the rest of the textual info surrounding the epilogue. */ 8354 /* Output the rest of the textual info surrounding the epilogue. */
8587 8355
8588 void 8356 void
8589 alpha_end_function (FILE *file, const char *fnname, tree decl ATTRIBUTE_UNUSED) 8357 alpha_end_function (FILE *file, const char *fnname, tree decl ATTRIBUTE_UNUSED)
8590 { 8358 {
8591 rtx insn; 8359 rtx_insn *insn;
8592 8360
8593 /* We output a nop after noreturn calls at the very end of the function to 8361 /* We output a nop after noreturn calls at the very end of the function to
8594 ensure that the return address always remains in the caller's code range, 8362 ensure that the return address always remains in the caller's code range,
8595 as not doing so might confuse unwinding engines. */ 8363 as not doing so might confuse unwinding engines. */
8596 insn = get_last_insn (); 8364 insn = get_last_insn ();
8598 insn = prev_active_insn (insn); 8366 insn = prev_active_insn (insn);
8599 if (insn && CALL_P (insn)) 8367 if (insn && CALL_P (insn))
8600 output_asm_insn (get_insn_template (CODE_FOR_nop, NULL), NULL); 8368 output_asm_insn (get_insn_template (CODE_FOR_nop, NULL), NULL);
8601 8369
8602 #if TARGET_ABI_OPEN_VMS 8370 #if TARGET_ABI_OPEN_VMS
8603 alpha_write_linkage (file, fnname, decl); 8371 /* Write the linkage entries. */
8372 alpha_write_linkage (file, fnname);
8604 #endif 8373 #endif
8605 8374
8606 /* End the function. */ 8375 /* End the function. */
8607 if (!TARGET_ABI_UNICOSMK && !flag_inhibit_size_directive) 8376 if (TARGET_ABI_OPEN_VMS
8377 || !flag_inhibit_size_directive)
8608 { 8378 {
8609 fputs ("\t.end ", file); 8379 fputs ("\t.end ", file);
8610 assemble_name (file, fnname); 8380 assemble_name (file, fnname);
8611 putc ('\n', file); 8381 putc ('\n', file);
8612 } 8382 }
8613 inside_function = FALSE; 8383 inside_function = FALSE;
8614 8384 }
8615 /* Output jump tables and the static subroutine information block. */
8616 if (TARGET_ABI_UNICOSMK)
8617 {
8618 unicosmk_output_ssib (file, fnname);
8619 unicosmk_output_deferred_case_vectors (file);
8620 }
8621 }
8622
8623 #if TARGET_ABI_OPEN_VMS
8624 void avms_asm_output_external (FILE *file, tree decl ATTRIBUTE_UNUSED, const char *name)
8625 {
8626 #ifdef DO_CRTL_NAMES
8627 DO_CRTL_NAMES;
8628 #endif
8629 }
8630 #endif
8631 8385
8632 #if TARGET_ABI_OSF 8386 #if TARGET_ABI_OSF
8633 /* Emit a tail call to FUNCTION after adjusting THIS by DELTA. 8387 /* Emit a tail call to FUNCTION after adjusting THIS by DELTA.
8634 8388
8635 In order to avoid the hordes of differences between generated code 8389 In order to avoid the hordes of differences between generated code
8643 alpha_output_mi_thunk_osf (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED, 8397 alpha_output_mi_thunk_osf (FILE *file, tree thunk_fndecl ATTRIBUTE_UNUSED,
8644 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset, 8398 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
8645 tree function) 8399 tree function)
8646 { 8400 {
8647 HOST_WIDE_INT hi, lo; 8401 HOST_WIDE_INT hi, lo;
8648 rtx this_rtx, insn, funexp; 8402 rtx this_rtx, funexp;
8403 rtx_insn *insn;
8649 8404
8650 /* We always require a valid GP. */ 8405 /* We always require a valid GP. */
8651 emit_insn (gen_prologue_ldgp ()); 8406 emit_insn (gen_prologue_ldgp ());
8652 emit_note (NOTE_INSN_PROLOGUE_END); 8407 emit_note (NOTE_INSN_PROLOGUE_END);
8653 8408
8669 if (lo) 8424 if (lo)
8670 emit_insn (gen_adddi3 (this_rtx, this_rtx, GEN_INT (lo))); 8425 emit_insn (gen_adddi3 (this_rtx, this_rtx, GEN_INT (lo)));
8671 } 8426 }
8672 else 8427 else
8673 { 8428 {
8674 rtx tmp = alpha_emit_set_long_const (gen_rtx_REG (Pmode, 0), 8429 rtx tmp = alpha_emit_set_long_const (gen_rtx_REG (Pmode, 0), delta);
8675 delta, -(delta < 0));
8676 emit_insn (gen_adddi3 (this_rtx, this_rtx, tmp)); 8430 emit_insn (gen_adddi3 (this_rtx, this_rtx, tmp));
8677 } 8431 }
8678 8432
8679 /* Add a delta stored in the vtable at VCALL_OFFSET. */ 8433 /* Add a delta stored in the vtable at VCALL_OFFSET. */
8680 if (vcall_offset) 8434 if (vcall_offset)
8692 emit_insn (gen_adddi3 (tmp, tmp, GEN_INT (hi))); 8446 emit_insn (gen_adddi3 (tmp, tmp, GEN_INT (hi)));
8693 } 8447 }
8694 else 8448 else
8695 { 8449 {
8696 tmp2 = alpha_emit_set_long_const (gen_rtx_REG (Pmode, 1), 8450 tmp2 = alpha_emit_set_long_const (gen_rtx_REG (Pmode, 1),
8697 vcall_offset, -(vcall_offset < 0)); 8451 vcall_offset);
8698 emit_insn (gen_adddi3 (tmp, tmp, tmp2)); 8452 emit_insn (gen_adddi3 (tmp, tmp, tmp2));
8699 lo = 0; 8453 lo = 0;
8700 } 8454 }
8701 if (lo) 8455 if (lo)
8702 tmp2 = gen_rtx_PLUS (Pmode, tmp, GEN_INT (lo)); 8456 tmp2 = gen_rtx_PLUS (Pmode, tmp, GEN_INT (lo));
8721 /* Run just enough of rest_of_compilation to get the insns emitted. 8475 /* Run just enough of rest_of_compilation to get the insns emitted.
8722 There's not really enough bulk here to make other passes such as 8476 There's not really enough bulk here to make other passes such as
8723 instruction scheduling worth while. Note that use_thunk calls 8477 instruction scheduling worth while. Note that use_thunk calls
8724 assemble_start_function and assemble_end_function. */ 8478 assemble_start_function and assemble_end_function. */
8725 insn = get_insns (); 8479 insn = get_insns ();
8726 insn_locators_alloc ();
8727 shorten_branches (insn); 8480 shorten_branches (insn);
8728 final_start_function (insn, file, 1); 8481 final_start_function (insn, file, 1);
8729 final (insn, file, 1); 8482 final (insn, file, 1);
8730 final_end_function (); 8483 final_end_function ();
8731 } 8484 }
8732 #endif /* TARGET_ABI_OSF */ 8485 #endif /* TARGET_ABI_OSF */
8733 8486
8734 /* Debugging support. */ 8487 /* Debugging support. */
8735 8488
8736 #include "gstab.h" 8489 #include "gstab.h"
8737
8738 /* Count the number of sdb related labels are generated (to find block
8739 start and end boundaries). */
8740
8741 int sdb_label_count = 0;
8742 8490
8743 /* Name of the file containing the current function. */ 8491 /* Name of the file containing the current function. */
8744 8492
8745 static const char *current_function_file = ""; 8493 static const char *current_function_file = "";
8746 8494
8762 ++num_source_filenames; 8510 ++num_source_filenames;
8763 current_function_file = name; 8511 current_function_file = name;
8764 fprintf (stream, "\t.file\t%d ", num_source_filenames); 8512 fprintf (stream, "\t.file\t%d ", num_source_filenames);
8765 output_quoted_string (stream, name); 8513 output_quoted_string (stream, name);
8766 fprintf (stream, "\n"); 8514 fprintf (stream, "\n");
8767 if (!TARGET_GAS && write_symbols == DBX_DEBUG) 8515 }
8768 fprintf (stream, "\t#@stabs\n");
8769 }
8770
8771 else if (write_symbols == DBX_DEBUG)
8772 /* dbxout.c will emit an appropriate .stabs directive. */
8773 return;
8774 8516
8775 else if (name != current_function_file 8517 else if (name != current_function_file
8776 && strcmp (name, current_function_file) != 0) 8518 && strcmp (name, current_function_file) != 0)
8777 { 8519 {
8778 if (inside_function && ! TARGET_GAS) 8520 ++num_source_filenames;
8779 fprintf (stream, "\t#.file\t%d ", num_source_filenames); 8521 current_function_file = name;
8780 else 8522 fprintf (stream, "\t.file\t%d ", num_source_filenames);
8781 {
8782 ++num_source_filenames;
8783 current_function_file = name;
8784 fprintf (stream, "\t.file\t%d ", num_source_filenames);
8785 }
8786 8523
8787 output_quoted_string (stream, name); 8524 output_quoted_string (stream, name);
8788 fprintf (stream, "\n"); 8525 fprintf (stream, "\n");
8789 } 8526 }
8790 } 8527 }
8877 8614
8878 /* Find the regs used in memory address computation: */ 8615 /* Find the regs used in memory address computation: */
8879 summarize_insn (XEXP (x, 0), sum, 0); 8616 summarize_insn (XEXP (x, 0), sum, 0);
8880 break; 8617 break;
8881 8618
8882 case CONST_INT: case CONST_DOUBLE: 8619 case CONST_INT: case CONST_WIDE_INT: case CONST_DOUBLE:
8883 case SYMBOL_REF: case LABEL_REF: case CONST: 8620 case SYMBOL_REF: case LABEL_REF: case CONST:
8884 case SCRATCH: case ASM_INPUT: 8621 case SCRATCH: case ASM_INPUT:
8885 break; 8622 break;
8886 8623
8887 /* Handle common unary and binary ops for efficiency. */ 8624 /* Handle common unary and binary ops for efficiency. */
8888 case COMPARE: case PLUS: case MINUS: case MULT: case DIV: 8625 case COMPARE: case PLUS: case MINUS: case MULT: case DIV:
8961 static void 8698 static void
8962 alpha_handle_trap_shadows (void) 8699 alpha_handle_trap_shadows (void)
8963 { 8700 {
8964 struct shadow_summary shadow; 8701 struct shadow_summary shadow;
8965 int trap_pending, exception_nesting; 8702 int trap_pending, exception_nesting;
8966 rtx i, n; 8703 rtx_insn *i, *n;
8967 8704
8968 trap_pending = 0; 8705 trap_pending = 0;
8969 exception_nesting = 0; 8706 exception_nesting = 0;
8970 shadow.used.i = 0; 8707 shadow.used.i = 0;
8971 shadow.used.fp = 0; 8708 shadow.used.fp = 0;
9051 8788
9052 goto close_shadow; 8789 goto close_shadow;
9053 } 8790 }
9054 break; 8791 break;
9055 8792
8793 case BARRIER:
8794 /* __builtin_unreachable can expand to no code at all,
8795 leaving (barrier) RTXes in the instruction stream. */
8796 goto close_shadow_notrapb;
8797
9056 case JUMP_INSN: 8798 case JUMP_INSN:
9057 case CALL_INSN: 8799 case CALL_INSN:
9058 case CODE_LABEL: 8800 case CODE_LABEL:
9059 goto close_shadow; 8801 goto close_shadow;
9060 8802
9066 { 8808 {
9067 close_shadow: 8809 close_shadow:
9068 n = emit_insn_before (gen_trapb (), i); 8810 n = emit_insn_before (gen_trapb (), i);
9069 PUT_MODE (n, TImode); 8811 PUT_MODE (n, TImode);
9070 PUT_MODE (i, TImode); 8812 PUT_MODE (i, TImode);
8813 close_shadow_notrapb:
9071 trap_pending = 0; 8814 trap_pending = 0;
9072 shadow.used.i = 0; 8815 shadow.used.i = 0;
9073 shadow.used.fp = 0; 8816 shadow.used.fp = 0;
9074 shadow.used.mem = 0; 8817 shadow.used.mem = 0;
9075 shadow.defd = shadow.used; 8818 shadow.defd = shadow.used;
9115 EV5_FA = 32, 8858 EV5_FA = 32,
9116 EV5_FM = 64 8859 EV5_FM = 64
9117 }; 8860 };
9118 8861
9119 static enum alphaev4_pipe 8862 static enum alphaev4_pipe
9120 alphaev4_insn_pipe (rtx insn) 8863 alphaev4_insn_pipe (rtx_insn *insn)
9121 { 8864 {
9122 if (recog_memoized (insn) < 0) 8865 if (recog_memoized (insn) < 0)
9123 return EV4_STOP; 8866 return EV4_STOP;
9124 if (get_attr_length (insn) != 4) 8867 if (get_attr_length (insn) != 4)
9125 return EV4_STOP; 8868 return EV4_STOP;
9164 gcc_unreachable (); 8907 gcc_unreachable ();
9165 } 8908 }
9166 } 8909 }
9167 8910
9168 static enum alphaev5_pipe 8911 static enum alphaev5_pipe
9169 alphaev5_insn_pipe (rtx insn) 8912 alphaev5_insn_pipe (rtx_insn *insn)
9170 { 8913 {
9171 if (recog_memoized (insn) < 0) 8914 if (recog_memoized (insn) < 0)
9172 return EV5_STOP; 8915 return EV5_STOP;
9173 if (get_attr_length (insn) != 4) 8916 if (get_attr_length (insn) != 4)
9174 return EV5_STOP; 8917 return EV5_STOP;
9224 The mask bits come from alphaev4_pipe above. If EV4_IBX is set, then 8967 The mask bits come from alphaev4_pipe above. If EV4_IBX is set, then
9225 the insn in EV4_IB0 can be swapped by the hardware into EV4_IB1. 8968 the insn in EV4_IB0 can be swapped by the hardware into EV4_IB1.
9226 8969
9227 LEN is, of course, the length of the group in bytes. */ 8970 LEN is, of course, the length of the group in bytes. */
9228 8971
9229 static rtx 8972 static rtx_insn *
9230 alphaev4_next_group (rtx insn, int *pin_use, int *plen) 8973 alphaev4_next_group (rtx_insn *insn, int *pin_use, int *plen)
9231 { 8974 {
9232 int len, in_use; 8975 int len, in_use;
9233 8976
9234 len = in_use = 0; 8977 len = in_use = 0;
9235 8978
9322 The mask bits come from alphaev5_pipe above. If EV5_E01 is set, then 9065 The mask bits come from alphaev5_pipe above. If EV5_E01 is set, then
9323 the insn in EV5_E0 can be swapped by the hardware into EV5_E1. 9066 the insn in EV5_E0 can be swapped by the hardware into EV5_E1.
9324 9067
9325 LEN is, of course, the length of the group in bytes. */ 9068 LEN is, of course, the length of the group in bytes. */
9326 9069
9327 static rtx 9070 static rtx_insn *
9328 alphaev5_next_group (rtx insn, int *pin_use, int *plen) 9071 alphaev5_next_group (rtx_insn *insn, int *pin_use, int *plen)
9329 { 9072 {
9330 int len, in_use; 9073 int len, in_use;
9331 9074
9332 len = in_use = 0; 9075 len = in_use = 0;
9333 9076
9506 } 9249 }
9507 9250
9508 /* The instruction group alignment main loop. */ 9251 /* The instruction group alignment main loop. */
9509 9252
9510 static void 9253 static void
9511 alpha_align_insns (unsigned int max_align, 9254 alpha_align_insns_1 (unsigned int max_align,
9512 rtx (*next_group) (rtx, int *, int *), 9255 rtx_insn *(*next_group) (rtx_insn *, int *, int *),
9513 rtx (*next_nop) (int *)) 9256 rtx (*next_nop) (int *))
9514 { 9257 {
9515 /* ALIGN is the known alignment for the insn group. */ 9258 /* ALIGN is the known alignment for the insn group. */
9516 unsigned int align; 9259 unsigned int align;
9517 /* OFS is the offset of the current insn in the insn group. */ 9260 /* OFS is the offset of the current insn in the insn group. */
9518 int ofs; 9261 int ofs;
9519 int prev_in_use, in_use, len, ldgp; 9262 int prev_in_use, in_use, len, ldgp;
9520 rtx i, next; 9263 rtx_insn *i, *next;
9521 9264
9522 /* Let shorten branches care for assigning alignments to code labels. */ 9265 /* Let shorten branches care for assigning alignments to code labels. */
9523 shorten_branches (get_insns ()); 9266 shorten_branches (get_insns ());
9524 9267
9525 if (align_functions < 4) 9268 if (align_functions < 4)
9573 /* If the known alignment is smaller than the recognized insn group, 9316 /* If the known alignment is smaller than the recognized insn group,
9574 realign the output. */ 9317 realign the output. */
9575 else if ((int) align < len) 9318 else if ((int) align < len)
9576 { 9319 {
9577 unsigned int new_log_align = len > 8 ? 4 : 3; 9320 unsigned int new_log_align = len > 8 ? 4 : 3;
9578 rtx prev, where; 9321 rtx_insn *prev, *where;
9579 9322
9580 where = prev = prev_nonnote_insn (i); 9323 where = prev = prev_nonnote_insn (i);
9581 if (!where || !LABEL_P (where)) 9324 if (!where || !LABEL_P (where))
9582 where = i; 9325 where = i;
9583 9326
9602 were issued in the previous group to make sure that all of 9345 were issued in the previous group to make sure that all of
9603 the added nops are really free. */ 9346 the added nops are really free. */
9604 else if (ofs + len > (int) align) 9347 else if (ofs + len > (int) align)
9605 { 9348 {
9606 int nop_count = (align - ofs) / 4; 9349 int nop_count = (align - ofs) / 4;
9607 rtx where; 9350 rtx_insn *where;
9608 9351
9609 /* Insert nops before labels, branches, and calls to truly merge 9352 /* Insert nops before labels, branches, and calls to truly merge
9610 the execution of the nops with the previous instruction group. */ 9353 the execution of the nops with the previous instruction group. */
9611 where = prev_nonnote_insn (i); 9354 where = prev_nonnote_insn (i);
9612 if (where) 9355 if (where)
9613 { 9356 {
9614 if (LABEL_P (where)) 9357 if (LABEL_P (where))
9615 { 9358 {
9616 rtx where2 = prev_nonnote_insn (where); 9359 rtx_insn *where2 = prev_nonnote_insn (where);
9617 if (where2 && JUMP_P (where2)) 9360 if (where2 && JUMP_P (where2))
9618 where = where2; 9361 where = where2;
9619 } 9362 }
9620 else if (NONJUMP_INSN_P (where)) 9363 else if (NONJUMP_INSN_P (where))
9621 where = i; 9364 where = i;
9633 prev_in_use = in_use; 9376 prev_in_use = in_use;
9634 i = next; 9377 i = next;
9635 } 9378 }
9636 } 9379 }
9637 9380
9638 /* Insert an unop between a noreturn function call and GP load. */
9639
9640 static void 9381 static void
9641 alpha_pad_noreturn (void) 9382 alpha_align_insns (void)
9642 { 9383 {
9643 rtx insn, next; 9384 if (alpha_tune == PROCESSOR_EV4)
9385 alpha_align_insns_1 (8, alphaev4_next_group, alphaev4_next_nop);
9386 else if (alpha_tune == PROCESSOR_EV5)
9387 alpha_align_insns_1 (16, alphaev5_next_group, alphaev5_next_nop);
9388 else
9389 gcc_unreachable ();
9390 }
9391
9392 /* Insert an unop between sibcall or noreturn function call and GP load. */
9393
9394 static void
9395 alpha_pad_function_end (void)
9396 {
9397 rtx_insn *insn, *next;
9644 9398
9645 for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) 9399 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
9646 { 9400 {
9647 if (!CALL_P (insn) 9401 if (!CALL_P (insn)
9648 || !find_reg_note (insn, REG_NORETURN, NULL_RTX)) 9402 || !(SIBLING_CALL_P (insn)
9403 || find_reg_note (insn, REG_NORETURN, NULL_RTX)))
9649 continue; 9404 continue;
9650 9405
9406 /* Make sure we do not split a call and its corresponding
9407 CALL_ARG_LOCATION note. */
9408 next = NEXT_INSN (insn);
9409 if (next == NULL)
9410 continue;
9411 if (NOTE_P (next) && NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
9412 insn = next;
9413
9651 next = next_active_insn (insn); 9414 next = next_active_insn (insn);
9652
9653 if (next) 9415 if (next)
9654 { 9416 {
9655 rtx pat = PATTERN (next); 9417 rtx pat = PATTERN (next);
9656 9418
9657 if (GET_CODE (pat) == SET 9419 if (GET_CODE (pat) == SET
9665 /* Machine dependent reorg pass. */ 9427 /* Machine dependent reorg pass. */
9666 9428
9667 static void 9429 static void
9668 alpha_reorg (void) 9430 alpha_reorg (void)
9669 { 9431 {
9670 /* Workaround for a linker error that triggers when an 9432 /* Workaround for a linker error that triggers when an exception
9671 exception handler immediatelly follows a noreturn function. 9433 handler immediatelly follows a sibcall or a noreturn function.
9434
9435 In the sibcall case:
9436
9437 The instruction stream from an object file:
9438
9439 1d8: 00 00 fb 6b jmp (t12)
9440 1dc: 00 00 ba 27 ldah gp,0(ra)
9441 1e0: 00 00 bd 23 lda gp,0(gp)
9442 1e4: 00 00 7d a7 ldq t12,0(gp)
9443 1e8: 00 40 5b 6b jsr ra,(t12),1ec <__funcZ+0x1ec>
9444
9445 was converted in the final link pass to:
9446
9447 12003aa88: 67 fa ff c3 br 120039428 <...>
9448 12003aa8c: 00 00 fe 2f unop
9449 12003aa90: 00 00 fe 2f unop
9450 12003aa94: 48 83 7d a7 ldq t12,-31928(gp)
9451 12003aa98: 00 40 5b 6b jsr ra,(t12),12003aa9c <__func+0x1ec>
9452
9453 And in the noreturn case:
9672 9454
9673 The instruction stream from an object file: 9455 The instruction stream from an object file:
9674 9456
9675 54: 00 40 5b 6b jsr ra,(t12),58 <__func+0x58> 9457 54: 00 40 5b 6b jsr ra,(t12),58 <__func+0x58>
9676 58: 00 00 ba 27 ldah gp,0(ra) 9458 58: 00 00 ba 27 ldah gp,0(ra)
9686 fdb30: 30 82 7d a7 ldq t12,-32208(gp) 9468 fdb30: 30 82 7d a7 ldq t12,-32208(gp)
9687 fdb34: 00 40 5b 6b jsr ra,(t12),fdb38 <__func+0x68> 9469 fdb34: 00 40 5b 6b jsr ra,(t12),fdb38 <__func+0x68>
9688 9470
9689 GP load instructions were wrongly cleared by the linker relaxation 9471 GP load instructions were wrongly cleared by the linker relaxation
9690 pass. This workaround prevents removal of GP loads by inserting 9472 pass. This workaround prevents removal of GP loads by inserting
9691 an unop instruction between a noreturn function call and 9473 an unop instruction between a sibcall or noreturn function call and
9692 exception handler prologue. */ 9474 exception handler prologue. */
9693 9475
9694 if (current_function_has_exception_handlers ()) 9476 if (current_function_has_exception_handlers ())
9695 alpha_pad_noreturn (); 9477 alpha_pad_function_end ();
9696 9478
9697 if (alpha_tp != ALPHA_TP_PROG || flag_exceptions) 9479 /* CALL_PAL that implements trap insn, updates program counter to point
9698 alpha_handle_trap_shadows (); 9480 after the insn. In case trap is the last insn in the function,
9699 9481 emit NOP to guarantee that PC remains inside function boundaries.
9700 /* Due to the number of extra trapb insns, don't bother fixing up 9482 This workaround is needed to get reliable backtraces. */
9701 alignment when trap precision is instruction. Moreover, we can 9483
9702 only do our job when sched2 is run. */ 9484 rtx_insn *insn = prev_active_insn (get_last_insn ());
9703 if (optimize && !optimize_size 9485
9704 && alpha_tp != ALPHA_TP_INSN 9486 if (insn && NONJUMP_INSN_P (insn))
9705 && flag_schedule_insns_after_reload) 9487 {
9706 { 9488 rtx pat = PATTERN (insn);
9707 if (alpha_tune == PROCESSOR_EV4) 9489 if (GET_CODE (pat) == PARALLEL)
9708 alpha_align_insns (8, alphaev4_next_group, alphaev4_next_nop); 9490 {
9709 else if (alpha_tune == PROCESSOR_EV5) 9491 rtx vec = XVECEXP (pat, 0, 0);
9710 alpha_align_insns (16, alphaev5_next_group, alphaev5_next_nop); 9492 if (GET_CODE (vec) == TRAP_IF
9493 && XEXP (vec, 0) == const1_rtx)
9494 emit_insn_after (gen_unop (), insn);
9495 }
9711 } 9496 }
9712 } 9497 }
9713 9498
9714 #if !TARGET_ABI_UNICOSMK
9715
9716 #ifdef HAVE_STAMP_H
9717 #include <stamp.h>
9718 #endif
9719
9720 static void 9499 static void
9721 alpha_file_start (void) 9500 alpha_file_start (void)
9722 { 9501 {
9723 #ifdef OBJECT_FORMAT_ELF
9724 /* If emitting dwarf2 debug information, we cannot generate a .file
9725 directive to start the file, as it will conflict with dwarf2out
9726 file numbers. So it's only useful when emitting mdebug output. */
9727 targetm.asm_file_start_file_directive = (write_symbols == DBX_DEBUG);
9728 #endif
9729
9730 default_file_start (); 9502 default_file_start ();
9731 #ifdef MS_STAMP
9732 fprintf (asm_out_file, "\t.verstamp %d %d\n", MS_STAMP, LS_STAMP);
9733 #endif
9734 9503
9735 fputs ("\t.set noreorder\n", asm_out_file); 9504 fputs ("\t.set noreorder\n", asm_out_file);
9736 fputs ("\t.set volatile\n", asm_out_file); 9505 fputs ("\t.set volatile\n", asm_out_file);
9737 if (!TARGET_ABI_OPEN_VMS) 9506 if (TARGET_ABI_OSF)
9738 fputs ("\t.set noat\n", asm_out_file); 9507 fputs ("\t.set noat\n", asm_out_file);
9739 if (TARGET_EXPLICIT_RELOCS) 9508 if (TARGET_EXPLICIT_RELOCS)
9740 fputs ("\t.set nomacro\n", asm_out_file); 9509 fputs ("\t.set nomacro\n", asm_out_file);
9741 if (TARGET_SUPPORT_ARCH | TARGET_BWX | TARGET_MAX | TARGET_FIX | TARGET_CIX) 9510 if (TARGET_SUPPORT_ARCH | TARGET_BWX | TARGET_MAX | TARGET_FIX | TARGET_CIX)
9742 { 9511 {
9754 arch = "ev4"; 9523 arch = "ev4";
9755 9524
9756 fprintf (asm_out_file, "\t.arch %s\n", arch); 9525 fprintf (asm_out_file, "\t.arch %s\n", arch);
9757 } 9526 }
9758 } 9527 }
9759 #endif 9528
9760
9761 #ifdef OBJECT_FORMAT_ELF
9762 /* Since we don't have a .dynbss section, we should not allow global 9529 /* Since we don't have a .dynbss section, we should not allow global
9763 relocations in the .rodata section. */ 9530 relocations in the .rodata section. */
9764 9531
9765 static int 9532 static int
9766 alpha_elf_reloc_rw_mask (void) 9533 alpha_elf_reloc_rw_mask (void)
9770 9537
9771 /* Return a section for X. The only special thing we do here is to 9538 /* Return a section for X. The only special thing we do here is to
9772 honor small data. */ 9539 honor small data. */
9773 9540
9774 static section * 9541 static section *
9775 alpha_elf_select_rtx_section (enum machine_mode mode, rtx x, 9542 alpha_elf_select_rtx_section (machine_mode mode, rtx x,
9776 unsigned HOST_WIDE_INT align) 9543 unsigned HOST_WIDE_INT align)
9777 { 9544 {
9778 if (TARGET_SMALL_DATA && GET_MODE_SIZE (mode) <= g_switch_value) 9545 if (TARGET_SMALL_DATA && GET_MODE_SIZE (mode) <= g_switch_value)
9779 /* ??? Consider using mergeable sdata sections. */ 9546 /* ??? Consider using mergeable sdata sections. */
9780 return sdata_section; 9547 return sdata_section;
9796 flags = SECTION_SMALL; 9563 flags = SECTION_SMALL;
9797 9564
9798 flags |= default_section_type_flags (decl, name, reloc); 9565 flags |= default_section_type_flags (decl, name, reloc);
9799 return flags; 9566 return flags;
9800 } 9567 }
9801 #endif /* OBJECT_FORMAT_ELF */
9802 9568
9803 /* Structure to collect function names for final output in link section. */ 9569 /* Structure to collect function names for final output in link section. */
9804 /* Note that items marked with GTY can't be ifdef'ed out. */ 9570 /* Note that items marked with GTY can't be ifdef'ed out. */
9805 9571
9806 enum links_kind {KIND_UNUSED, KIND_LOCAL, KIND_EXTERN}; 9572 enum reloc_kind
9807 enum reloc_kind {KIND_LINKAGE, KIND_CODEADDR}; 9573 {
9574 KIND_LINKAGE,
9575 KIND_CODEADDR
9576 };
9808 9577
9809 struct GTY(()) alpha_links 9578 struct GTY(()) alpha_links
9810 { 9579 {
9811 int num; 9580 rtx func;
9812 const char *target;
9813 rtx linkage; 9581 rtx linkage;
9814 enum links_kind lkind;
9815 enum reloc_kind rkind; 9582 enum reloc_kind rkind;
9816 }; 9583 };
9817 9584
9818 struct GTY(()) alpha_funcs
9819 {
9820 int num;
9821 splay_tree GTY ((param1_is (char *), param2_is (struct alpha_links *)))
9822 links;
9823 };
9824
9825 static GTY ((param1_is (char *), param2_is (struct alpha_links *)))
9826 splay_tree alpha_links_tree;
9827 static GTY ((param1_is (tree), param2_is (struct alpha_funcs *)))
9828 splay_tree alpha_funcs_tree;
9829
9830 static GTY(()) int alpha_funcs_num;
9831
9832 #if TARGET_ABI_OPEN_VMS 9585 #if TARGET_ABI_OPEN_VMS
9833 9586
9834 /* Return the VMS argument type corresponding to MODE. */ 9587 /* Return the VMS argument type corresponding to MODE. */
9835 9588
9836 enum avms_arg_type 9589 enum avms_arg_type
9837 alpha_arg_type (enum machine_mode mode) 9590 alpha_arg_type (machine_mode mode)
9838 { 9591 {
9839 switch (mode) 9592 switch (mode)
9840 { 9593 {
9841 case SFmode: 9594 case E_SFmode:
9842 return TARGET_FLOAT_VAX ? FF : FS; 9595 return TARGET_FLOAT_VAX ? FF : FS;
9843 case DFmode: 9596 case E_DFmode:
9844 return TARGET_FLOAT_VAX ? FD : FT; 9597 return TARGET_FLOAT_VAX ? FD : FT;
9845 default: 9598 default:
9846 return I64; 9599 return I64;
9847 } 9600 }
9848 } 9601 }
9860 regval |= ((int) cum.atypes[i]) << (i * 3 + 8); 9613 regval |= ((int) cum.atypes[i]) << (i * 3 + 8);
9861 9614
9862 return GEN_INT (regval); 9615 return GEN_INT (regval);
9863 } 9616 }
9864 9617
9865 /* Register the need for a (fake) .linkage entry for calls to function NAME.
9866 IS_LOCAL is 1 if this is for a definition, 0 if this is for a real call.
9867 Return a SYMBOL_REF suited to the call instruction. */
9868
9869 rtx
9870 alpha_need_linkage (const char *name, int is_local)
9871 {
9872 splay_tree_node node;
9873 struct alpha_links *al;
9874 const char *target;
9875 tree id;
9876
9877 if (name[0] == '*')
9878 name++;
9879
9880 if (is_local)
9881 {
9882 struct alpha_funcs *cfaf;
9883
9884 if (!alpha_funcs_tree)
9885 alpha_funcs_tree = splay_tree_new_ggc
9886 (splay_tree_compare_pointers,
9887 ggc_alloc_splay_tree_tree_node_tree_node_splay_tree_s,
9888 ggc_alloc_splay_tree_tree_node_tree_node_splay_tree_node_s);
9889
9890
9891 cfaf = ggc_alloc_alpha_funcs ();
9892
9893 cfaf->links = 0;
9894 cfaf->num = ++alpha_funcs_num;
9895
9896 splay_tree_insert (alpha_funcs_tree,
9897 (splay_tree_key) current_function_decl,
9898 (splay_tree_value) cfaf);
9899 }
9900
9901 if (alpha_links_tree)
9902 {
9903 /* Is this name already defined? */
9904
9905 node = splay_tree_lookup (alpha_links_tree, (splay_tree_key) name);
9906 if (node)
9907 {
9908 al = (struct alpha_links *) node->value;
9909 if (is_local)
9910 {
9911 /* Defined here but external assumed. */
9912 if (al->lkind == KIND_EXTERN)
9913 al->lkind = KIND_LOCAL;
9914 }
9915 else
9916 {
9917 /* Used here but unused assumed. */
9918 if (al->lkind == KIND_UNUSED)
9919 al->lkind = KIND_LOCAL;
9920 }
9921 return al->linkage;
9922 }
9923 }
9924 else
9925 alpha_links_tree = splay_tree_new_ggc
9926 ((splay_tree_compare_fn) strcmp,
9927 ggc_alloc_splay_tree_str_alpha_links_splay_tree_s,
9928 ggc_alloc_splay_tree_str_alpha_links_splay_tree_node_s);
9929
9930 al = ggc_alloc_alpha_links ();
9931 name = ggc_strdup (name);
9932
9933 /* Assume external if no definition. */
9934 al->lkind = (is_local ? KIND_UNUSED : KIND_EXTERN);
9935
9936 /* Ensure we have an IDENTIFIER so assemble_name can mark it used
9937 and find the ultimate alias target like assemble_name. */
9938 id = get_identifier (name);
9939 target = NULL;
9940 while (IDENTIFIER_TRANSPARENT_ALIAS (id))
9941 {
9942 id = TREE_CHAIN (id);
9943 target = IDENTIFIER_POINTER (id);
9944 }
9945
9946 al->target = target ? target : name;
9947 al->linkage = gen_rtx_SYMBOL_REF (Pmode, name);
9948
9949 splay_tree_insert (alpha_links_tree, (splay_tree_key) name,
9950 (splay_tree_value) al);
9951
9952 return al->linkage;
9953 }
9954 9618
9955 /* Return a SYMBOL_REF representing the reference to the .linkage entry 9619 /* Return a SYMBOL_REF representing the reference to the .linkage entry
9956 of function FUNC built for calls made from CFUNDECL. LFLAG is 1 if 9620 of function FUNC built for calls made from CFUNDECL. LFLAG is 1 if
9957 this is the reference to the linkage pointer value, 0 if this is the 9621 this is the reference to the linkage pointer value, 0 if this is the
9958 reference to the function entry value. RFLAG is 1 if this a reduced 9622 reference to the function entry value. RFLAG is 1 if this a reduced
9959 reference (code address only), 0 if this is a full reference. */ 9623 reference (code address only), 0 if this is a full reference. */
9960 9624
9961 rtx 9625 rtx
9962 alpha_use_linkage (rtx func, tree cfundecl, int lflag, int rflag) 9626 alpha_use_linkage (rtx func, bool lflag, bool rflag)
9963 { 9627 {
9964 splay_tree_node cfunnode; 9628 struct alpha_links *al = NULL;
9965 struct alpha_funcs *cfaf;
9966 struct alpha_links *al;
9967 const char *name = XSTR (func, 0); 9629 const char *name = XSTR (func, 0);
9968 9630
9969 cfaf = (struct alpha_funcs *) 0; 9631 if (cfun->machine->links)
9970 al = (struct alpha_links *) 0; 9632 {
9971
9972 cfunnode = splay_tree_lookup (alpha_funcs_tree, (splay_tree_key) cfundecl);
9973 cfaf = (struct alpha_funcs *) cfunnode->value;
9974
9975 if (cfaf->links)
9976 {
9977 splay_tree_node lnode;
9978
9979 /* Is this name already defined? */ 9633 /* Is this name already defined? */
9980 9634 alpha_links **slot = cfun->machine->links->get (name);
9981 lnode = splay_tree_lookup (cfaf->links, (splay_tree_key) name); 9635 if (slot)
9982 if (lnode) 9636 al = *slot;
9983 al = (struct alpha_links *) lnode->value;
9984 } 9637 }
9985 else 9638 else
9986 cfaf->links = splay_tree_new_ggc 9639 cfun->machine->links
9987 ((splay_tree_compare_fn) strcmp, 9640 = hash_map<nofree_string_hash, alpha_links *>::create_ggc (64);
9988 ggc_alloc_splay_tree_str_alpha_links_splay_tree_s, 9641
9989 ggc_alloc_splay_tree_str_alpha_links_splay_tree_node_s); 9642 if (al == NULL)
9990 9643 {
9991 if (!al) 9644 size_t buf_len;
9992 {
9993 size_t name_len;
9994 size_t buflen;
9995 char *linksym; 9645 char *linksym;
9996 splay_tree_node node = 0; 9646 tree id;
9997 struct alpha_links *anl;
9998 9647
9999 if (name[0] == '*') 9648 if (name[0] == '*')
10000 name++; 9649 name++;
10001 9650
10002 name_len = strlen (name); 9651 /* Follow transparent alias, as this is used for CRTL translations. */
10003 linksym = (char *) alloca (name_len + 50); 9652 id = maybe_get_identifier (name);
10004 9653 if (id)
10005 al = ggc_alloc_alpha_links (); 9654 {
10006 al->num = cfaf->num; 9655 while (IDENTIFIER_TRANSPARENT_ALIAS (id))
10007 al->target = NULL; 9656 id = TREE_CHAIN (id);
10008 9657 name = IDENTIFIER_POINTER (id);
10009 node = splay_tree_lookup (alpha_links_tree, (splay_tree_key) name); 9658 }
10010 if (node) 9659
10011 { 9660 buf_len = strlen (name) + 8 + 9;
10012 anl = (struct alpha_links *) node->value; 9661 linksym = (char *) alloca (buf_len);
10013 al->lkind = anl->lkind; 9662 snprintf (linksym, buf_len, "$%d..%s..lk", cfun->funcdef_no, name);
10014 name = anl->target; 9663
10015 } 9664 al = ggc_alloc<alpha_links> ();
10016 9665 al->func = func;
10017 sprintf (linksym, "$%d..%s..lk", cfaf->num, name); 9666 al->linkage = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (linksym));
10018 buflen = strlen (linksym); 9667
10019 9668 cfun->machine->links->put (ggc_strdup (name), al);
10020 al->linkage = gen_rtx_SYMBOL_REF 9669 }
10021 (Pmode, ggc_alloc_string (linksym, buflen + 1)); 9670
10022 9671 al->rkind = rflag ? KIND_CODEADDR : KIND_LINKAGE;
10023 splay_tree_insert (cfaf->links, (splay_tree_key) name,
10024 (splay_tree_value) al);
10025 }
10026
10027 if (rflag)
10028 al->rkind = KIND_CODEADDR;
10029 else
10030 al->rkind = KIND_LINKAGE;
10031 9672
10032 if (lflag) 9673 if (lflag)
10033 return gen_rtx_MEM (Pmode, plus_constant (al->linkage, 8)); 9674 return gen_rtx_MEM (Pmode, plus_constant (Pmode, al->linkage, 8));
10034 else 9675 else
10035 return al->linkage; 9676 return al->linkage;
10036 } 9677 }
10037 9678
10038 static int 9679 static int
10039 alpha_write_one_linkage (splay_tree_node node, void *data) 9680 alpha_write_one_linkage (const char *name, alpha_links *link, FILE *stream)
10040 { 9681 {
10041 const char *const name = (const char *) node->key; 9682 ASM_OUTPUT_INTERNAL_LABEL (stream, XSTR (link->linkage, 0));
10042 struct alpha_links *link = (struct alpha_links *) node->value;
10043 FILE *stream = (FILE *) data;
10044
10045 fprintf (stream, "$%d..%s..lk:\n", link->num, name);
10046 if (link->rkind == KIND_CODEADDR) 9683 if (link->rkind == KIND_CODEADDR)
10047 { 9684 {
10048 if (link->lkind == KIND_LOCAL) 9685 /* External and used, request code address. */
10049 { 9686 fprintf (stream, "\t.code_address ");
10050 /* Local and used */ 9687 }
9688 else
9689 {
9690 if (!SYMBOL_REF_EXTERNAL_P (link->func)
9691 && SYMBOL_REF_LOCAL_P (link->func))
9692 {
9693 /* Locally defined, build linkage pair. */
10051 fprintf (stream, "\t.quad %s..en\n", name); 9694 fprintf (stream, "\t.quad %s..en\n", name);
9695 fprintf (stream, "\t.quad ");
10052 } 9696 }
10053 else 9697 else
10054 { 9698 {
10055 /* External and used, request code address. */ 9699 /* External, request linkage pair. */
10056 fprintf (stream, "\t.code_address %s\n", name); 9700 fprintf (stream, "\t.linkage ");
10057 } 9701 }
10058 } 9702 }
10059 else 9703 assemble_name (stream, name);
10060 { 9704 fputs ("\n", stream);
10061 if (link->lkind == KIND_LOCAL)
10062 {
10063 /* Local and used, build linkage pair. */
10064 fprintf (stream, "\t.quad %s..en\n", name);
10065 fprintf (stream, "\t.quad %s\n", name);
10066 }
10067 else
10068 {
10069 /* External and used, request linkage pair. */
10070 fprintf (stream, "\t.linkage %s\n", name);
10071 }
10072 }
10073 9705
10074 return 0; 9706 return 0;
10075 } 9707 }
10076 9708
10077 static void 9709 static void
10078 alpha_write_linkage (FILE *stream, const char *funname, tree fundecl) 9710 alpha_write_linkage (FILE *stream, const char *funname)
10079 { 9711 {
10080 splay_tree_node node;
10081 struct alpha_funcs *func;
10082
10083 fprintf (stream, "\t.link\n"); 9712 fprintf (stream, "\t.link\n");
10084 fprintf (stream, "\t.align 3\n"); 9713 fprintf (stream, "\t.align 3\n");
10085 in_section = NULL; 9714 in_section = NULL;
10086 9715
10087 node = splay_tree_lookup (alpha_funcs_tree, (splay_tree_key) fundecl); 9716 #ifdef TARGET_VMS_CRASH_DEBUG
10088 func = (struct alpha_funcs *) node->value;
10089
10090 fputs ("\t.name ", stream); 9717 fputs ("\t.name ", stream);
10091 assemble_name (stream, funname); 9718 assemble_name (stream, funname);
10092 fputs ("..na\n", stream); 9719 fputs ("..na\n", stream);
9720 #endif
9721
10093 ASM_OUTPUT_LABEL (stream, funname); 9722 ASM_OUTPUT_LABEL (stream, funname);
10094 fprintf (stream, "\t.pdesc "); 9723 fprintf (stream, "\t.pdesc ");
10095 assemble_name (stream, funname); 9724 assemble_name (stream, funname);
10096 fprintf (stream, "..en,%s\n", 9725 fprintf (stream, "..en,%s\n",
10097 alpha_procedure_type == PT_STACK ? "stack" 9726 alpha_procedure_type == PT_STACK ? "stack"
10098 : alpha_procedure_type == PT_REGISTER ? "reg" : "null"); 9727 : alpha_procedure_type == PT_REGISTER ? "reg" : "null");
10099 9728
10100 if (func->links) 9729 if (cfun->machine->links)
10101 { 9730 {
10102 splay_tree_foreach (func->links, alpha_write_one_linkage, stream); 9731 hash_map<nofree_string_hash, alpha_links *>::iterator iter
10103 /* splay_tree_delete (func->links); */ 9732 = cfun->machine->links->begin ();
9733 for (; iter != cfun->machine->links->end (); ++iter)
9734 alpha_write_one_linkage ((*iter).first, (*iter).second, stream);
10104 } 9735 }
10105 } 9736 }
10106 9737
10107 /* Switch to an arbitrary section NAME with attributes as specified 9738 /* Switch to an arbitrary section NAME with attributes as specified
10108 by FLAGS. ALIGN specifies any known alignment requirements for 9739 by FLAGS. ALIGN specifies any known alignment requirements for
10143 switch_to_section (dtors_section); 9774 switch_to_section (dtors_section);
10144 assemble_align (BITS_PER_WORD); 9775 assemble_align (BITS_PER_WORD);
10145 assemble_integer (symbol, UNITS_PER_WORD, BITS_PER_WORD, 1); 9776 assemble_integer (symbol, UNITS_PER_WORD, BITS_PER_WORD, 1);
10146 } 9777 }
10147 #else 9778 #else
10148
10149 rtx
10150 alpha_need_linkage (const char *name ATTRIBUTE_UNUSED,
10151 int is_local ATTRIBUTE_UNUSED)
10152 {
10153 return NULL_RTX;
10154 }
10155
10156 rtx 9779 rtx
10157 alpha_use_linkage (rtx func ATTRIBUTE_UNUSED, 9780 alpha_use_linkage (rtx func ATTRIBUTE_UNUSED,
10158 tree cfundecl ATTRIBUTE_UNUSED, 9781 bool lflag ATTRIBUTE_UNUSED,
10159 int lflag ATTRIBUTE_UNUSED, 9782 bool rflag ATTRIBUTE_UNUSED)
10160 int rflag ATTRIBUTE_UNUSED)
10161 { 9783 {
10162 return NULL_RTX; 9784 return NULL_RTX;
10163 } 9785 }
10164 9786
10165 #endif /* TARGET_ABI_OPEN_VMS */ 9787 #endif /* TARGET_ABI_OPEN_VMS */
10166 9788
10167 #if TARGET_ABI_UNICOSMK
10168
10169 /* This evaluates to true if we do not know how to pass TYPE solely in
10170 registers. This is the case for all arguments that do not fit in two
10171 registers. */
10172
10173 static bool
10174 unicosmk_must_pass_in_stack (enum machine_mode mode, const_tree type)
10175 {
10176 if (type == NULL)
10177 return false;
10178
10179 if (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
10180 return true;
10181 if (TREE_ADDRESSABLE (type))
10182 return true;
10183
10184 return ALPHA_ARG_SIZE (mode, type, 0) > 2;
10185 }
10186
10187 /* Define the offset between two registers, one to be eliminated, and the
10188 other its replacement, at the start of a routine. */
10189
10190 int
10191 unicosmk_initial_elimination_offset (int from, int to)
10192 {
10193 int fixed_size;
10194
10195 fixed_size = alpha_sa_size();
10196 if (fixed_size != 0)
10197 fixed_size += 48;
10198
10199 if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
10200 return -fixed_size;
10201 else if (from == ARG_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
10202 return 0;
10203 else if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
10204 return (ALPHA_ROUND (crtl->outgoing_args_size)
10205 + ALPHA_ROUND (get_frame_size()));
10206 else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM)
10207 return (ALPHA_ROUND (fixed_size)
10208 + ALPHA_ROUND (get_frame_size()
10209 + crtl->outgoing_args_size));
10210 else
10211 gcc_unreachable ();
10212 }
10213
10214 /* Output the module name for .ident and .end directives. We have to strip
10215 directories and add make sure that the module name starts with a letter
10216 or '$'. */
10217
10218 static void
10219 unicosmk_output_module_name (FILE *file)
10220 {
10221 const char *name = lbasename (main_input_filename);
10222 unsigned len = strlen (name);
10223 char *clean_name = alloca (len + 2);
10224 char *ptr = clean_name;
10225
10226 /* CAM only accepts module names that start with a letter or '$'. We
10227 prefix the module name with a '$' if necessary. */
10228
10229 if (!ISALPHA (*name))
10230 *ptr++ = '$';
10231 memcpy (ptr, name, len + 1);
10232 clean_symbol_name (clean_name);
10233 fputs (clean_name, file);
10234 }
10235
10236 /* Output the definition of a common variable. */
10237
10238 void
10239 unicosmk_output_common (FILE *file, const char *name, int size, int align)
10240 {
10241 tree name_tree;
10242 printf ("T3E__: common %s\n", name);
10243
10244 in_section = NULL;
10245 fputs("\t.endp\n\n\t.psect ", file);
10246 assemble_name(file, name);
10247 fprintf(file, ",%d,common\n", floor_log2 (align / BITS_PER_UNIT));
10248 fprintf(file, "\t.byte\t0:%d\n", size);
10249
10250 /* Mark the symbol as defined in this module. */
10251 name_tree = get_identifier (name);
10252 TREE_ASM_WRITTEN (name_tree) = 1;
10253 }
10254
10255 #define SECTION_PUBLIC SECTION_MACH_DEP
10256 #define SECTION_MAIN (SECTION_PUBLIC << 1)
10257 static int current_section_align;
10258
10259 /* A get_unnamed_section callback for switching to the text section. */
10260
10261 static void
10262 unicosmk_output_text_section_asm_op (const void *data ATTRIBUTE_UNUSED)
10263 {
10264 static int count = 0;
10265 fprintf (asm_out_file, "\t.endp\n\n\t.psect\tgcc@text___%d,code\n", count++);
10266 }
10267
10268 /* A get_unnamed_section callback for switching to the data section. */
10269
10270 static void
10271 unicosmk_output_data_section_asm_op (const void *data ATTRIBUTE_UNUSED)
10272 {
10273 static int count = 1;
10274 fprintf (asm_out_file, "\t.endp\n\n\t.psect\tgcc@data___%d,data\n", count++);
10275 }
10276
10277 /* Implement TARGET_ASM_INIT_SECTIONS.
10278
10279 The Cray assembler is really weird with respect to sections. It has only
10280 named sections and you can't reopen a section once it has been closed.
10281 This means that we have to generate unique names whenever we want to
10282 reenter the text or the data section. */
10283
10284 static void
10285 unicosmk_init_sections (void)
10286 {
10287 text_section = get_unnamed_section (SECTION_CODE,
10288 unicosmk_output_text_section_asm_op,
10289 NULL);
10290 data_section = get_unnamed_section (SECTION_WRITE,
10291 unicosmk_output_data_section_asm_op,
10292 NULL);
10293 readonly_data_section = data_section;
10294 }
10295
10296 static unsigned int
10297 unicosmk_section_type_flags (tree decl, const char *name,
10298 int reloc ATTRIBUTE_UNUSED)
10299 {
10300 unsigned int flags = default_section_type_flags (decl, name, reloc);
10301
10302 if (!decl)
10303 return flags;
10304
10305 if (TREE_CODE (decl) == FUNCTION_DECL)
10306 {
10307 current_section_align = floor_log2 (FUNCTION_BOUNDARY / BITS_PER_UNIT);
10308 if (align_functions_log > current_section_align)
10309 current_section_align = align_functions_log;
10310
10311 if (! strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl)), "main"))
10312 flags |= SECTION_MAIN;
10313 }
10314 else
10315 current_section_align = floor_log2 (DECL_ALIGN (decl) / BITS_PER_UNIT);
10316
10317 if (TREE_PUBLIC (decl))
10318 flags |= SECTION_PUBLIC;
10319
10320 return flags;
10321 }
10322
10323 /* Generate a section name for decl and associate it with the
10324 declaration. */
10325
10326 static void
10327 unicosmk_unique_section (tree decl, int reloc ATTRIBUTE_UNUSED)
10328 {
10329 const char *name;
10330 int len;
10331
10332 gcc_assert (decl);
10333
10334 name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
10335 name = default_strip_name_encoding (name);
10336 len = strlen (name);
10337
10338 if (TREE_CODE (decl) == FUNCTION_DECL)
10339 {
10340 char *string;
10341
10342 /* It is essential that we prefix the section name here because
10343 otherwise the section names generated for constructors and
10344 destructors confuse collect2. */
10345
10346 string = alloca (len + 6);
10347 sprintf (string, "code@%s", name);
10348 DECL_SECTION_NAME (decl) = build_string (len + 5, string);
10349 }
10350 else if (TREE_PUBLIC (decl))
10351 DECL_SECTION_NAME (decl) = build_string (len, name);
10352 else
10353 {
10354 char *string;
10355
10356 string = alloca (len + 6);
10357 sprintf (string, "data@%s", name);
10358 DECL_SECTION_NAME (decl) = build_string (len + 5, string);
10359 }
10360 }
10361
10362 /* Switch to an arbitrary section NAME with attributes as specified
10363 by FLAGS. ALIGN specifies any known alignment requirements for
10364 the section; 0 if the default should be used. */
10365
10366 static void
10367 unicosmk_asm_named_section (const char *name, unsigned int flags,
10368 tree decl ATTRIBUTE_UNUSED)
10369 {
10370 const char *kind;
10371
10372 /* Close the previous section. */
10373
10374 fputs ("\t.endp\n\n", asm_out_file);
10375
10376 /* Find out what kind of section we are opening. */
10377
10378 if (flags & SECTION_MAIN)
10379 fputs ("\t.start\tmain\n", asm_out_file);
10380
10381 if (flags & SECTION_CODE)
10382 kind = "code";
10383 else if (flags & SECTION_PUBLIC)
10384 kind = "common";
10385 else
10386 kind = "data";
10387
10388 if (current_section_align != 0)
10389 fprintf (asm_out_file, "\t.psect\t%s,%d,%s\n", name,
10390 current_section_align, kind);
10391 else
10392 fprintf (asm_out_file, "\t.psect\t%s,%s\n", name, kind);
10393 }
10394
10395 static void
10396 unicosmk_insert_attributes (tree decl, tree *attr_ptr ATTRIBUTE_UNUSED)
10397 {
10398 if (DECL_P (decl)
10399 && (TREE_PUBLIC (decl) || TREE_CODE (decl) == FUNCTION_DECL))
10400 unicosmk_unique_section (decl, 0);
10401 }
10402
10403 /* Output an alignment directive. We have to use the macro 'gcc@code@align'
10404 in code sections because .align fill unused space with zeroes. */
10405
10406 void
10407 unicosmk_output_align (FILE *file, int align)
10408 {
10409 if (inside_function)
10410 fprintf (file, "\tgcc@code@align\t%d\n", align);
10411 else
10412 fprintf (file, "\t.align\t%d\n", align);
10413 }
10414
10415 /* Add a case vector to the current function's list of deferred case
10416 vectors. Case vectors have to be put into a separate section because CAM
10417 does not allow data definitions in code sections. */
10418
10419 void
10420 unicosmk_defer_case_vector (rtx lab, rtx vec)
10421 {
10422 struct machine_function *machine = cfun->machine;
10423
10424 vec = gen_rtx_EXPR_LIST (VOIDmode, lab, vec);
10425 machine->addr_list = gen_rtx_EXPR_LIST (VOIDmode, vec,
10426 machine->addr_list);
10427 }
10428
10429 /* Output a case vector. */
10430
10431 static void
10432 unicosmk_output_addr_vec (FILE *file, rtx vec)
10433 {
10434 rtx lab = XEXP (vec, 0);
10435 rtx body = XEXP (vec, 1);
10436 int vlen = XVECLEN (body, 0);
10437 int idx;
10438
10439 (*targetm.asm_out.internal_label) (file, "L", CODE_LABEL_NUMBER (lab));
10440
10441 for (idx = 0; idx < vlen; idx++)
10442 {
10443 ASM_OUTPUT_ADDR_VEC_ELT
10444 (file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
10445 }
10446 }
10447
10448 /* Output current function's deferred case vectors. */
10449
10450 static void
10451 unicosmk_output_deferred_case_vectors (FILE *file)
10452 {
10453 struct machine_function *machine = cfun->machine;
10454 rtx t;
10455
10456 if (machine->addr_list == NULL_RTX)
10457 return;
10458
10459 switch_to_section (data_section);
10460 for (t = machine->addr_list; t; t = XEXP (t, 1))
10461 unicosmk_output_addr_vec (file, XEXP (t, 0));
10462 }
10463
10464 /* Generate the name of the SSIB section for the current function. */
10465
10466 #define SSIB_PREFIX "__SSIB_"
10467 #define SSIB_PREFIX_LEN 7
10468
10469 static const char *
10470 unicosmk_ssib_name (void)
10471 {
10472 /* This is ok since CAM won't be able to deal with names longer than that
10473 anyway. */
10474
10475 static char name[256];
10476
10477 rtx x;
10478 const char *fnname;
10479 int len;
10480
10481 x = DECL_RTL (cfun->decl);
10482 gcc_assert (MEM_P (x));
10483 x = XEXP (x, 0);
10484 gcc_assert (GET_CODE (x) == SYMBOL_REF);
10485 fnname = XSTR (x, 0);
10486
10487 len = strlen (fnname);
10488 if (len + SSIB_PREFIX_LEN > 255)
10489 len = 255 - SSIB_PREFIX_LEN;
10490
10491 strcpy (name, SSIB_PREFIX);
10492 strncpy (name + SSIB_PREFIX_LEN, fnname, len);
10493 name[len + SSIB_PREFIX_LEN] = 0;
10494
10495 return name;
10496 }
10497
10498 /* Set up the dynamic subprogram information block (DSIB) and update the
10499 frame pointer register ($15) for subroutines which have a frame. If the
10500 subroutine doesn't have a frame, simply increment $15. */
10501
10502 static void
10503 unicosmk_gen_dsib (unsigned long *imaskP)
10504 {
10505 if (alpha_procedure_type == PT_STACK)
10506 {
10507 const char *ssib_name;
10508 rtx mem;
10509
10510 /* Allocate 64 bytes for the DSIB. */
10511
10512 FRP (emit_insn (gen_adddi3 (stack_pointer_rtx, stack_pointer_rtx,
10513 GEN_INT (-64))));
10514 emit_insn (gen_blockage ());
10515
10516 /* Save the return address. */
10517
10518 mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 56));
10519 set_mem_alias_set (mem, alpha_sr_alias_set);
10520 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, REG_RA)));
10521 (*imaskP) &= ~(1UL << REG_RA);
10522
10523 /* Save the old frame pointer. */
10524
10525 mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 48));
10526 set_mem_alias_set (mem, alpha_sr_alias_set);
10527 FRP (emit_move_insn (mem, hard_frame_pointer_rtx));
10528 (*imaskP) &= ~(1UL << HARD_FRAME_POINTER_REGNUM);
10529
10530 emit_insn (gen_blockage ());
10531
10532 /* Store the SSIB pointer. */
10533
10534 ssib_name = ggc_strdup (unicosmk_ssib_name ());
10535 mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 32));
10536 set_mem_alias_set (mem, alpha_sr_alias_set);
10537
10538 FRP (emit_move_insn (gen_rtx_REG (DImode, 5),
10539 gen_rtx_SYMBOL_REF (Pmode, ssib_name)));
10540 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, 5)));
10541
10542 /* Save the CIW index. */
10543
10544 mem = gen_rtx_MEM (DImode, plus_constant (stack_pointer_rtx, 24));
10545 set_mem_alias_set (mem, alpha_sr_alias_set);
10546 FRP (emit_move_insn (mem, gen_rtx_REG (DImode, 25)));
10547
10548 emit_insn (gen_blockage ());
10549
10550 /* Set the new frame pointer. */
10551 FRP (emit_insn (gen_adddi3 (hard_frame_pointer_rtx,
10552 stack_pointer_rtx, GEN_INT (64))));
10553 }
10554 else
10555 {
10556 /* Increment the frame pointer register to indicate that we do not
10557 have a frame. */
10558 emit_insn (gen_adddi3 (hard_frame_pointer_rtx,
10559 hard_frame_pointer_rtx, const1_rtx));
10560 }
10561 }
10562
10563 /* Output the static subroutine information block for the current
10564 function. */
10565
10566 static void
10567 unicosmk_output_ssib (FILE *file, const char *fnname)
10568 {
10569 int len;
10570 int i;
10571 rtx x;
10572 rtx ciw;
10573 struct machine_function *machine = cfun->machine;
10574
10575 in_section = NULL;
10576 fprintf (file, "\t.endp\n\n\t.psect\t%s%s,data\n", user_label_prefix,
10577 unicosmk_ssib_name ());
10578
10579 /* Some required stuff and the function name length. */
10580
10581 len = strlen (fnname);
10582 fprintf (file, "\t.quad\t^X20008%2.2X28\n", len);
10583
10584 /* Saved registers
10585 ??? We don't do that yet. */
10586
10587 fputs ("\t.quad\t0\n", file);
10588
10589 /* Function address. */
10590
10591 fputs ("\t.quad\t", file);
10592 assemble_name (file, fnname);
10593 putc ('\n', file);
10594
10595 fputs ("\t.quad\t0\n", file);
10596 fputs ("\t.quad\t0\n", file);
10597
10598 /* Function name.
10599 ??? We do it the same way Cray CC does it but this could be
10600 simplified. */
10601
10602 for( i = 0; i < len; i++ )
10603 fprintf (file, "\t.byte\t%d\n", (int)(fnname[i]));
10604 if( (len % 8) == 0 )
10605 fputs ("\t.quad\t0\n", file);
10606 else
10607 fprintf (file, "\t.bits\t%d : 0\n", (8 - (len % 8))*8);
10608
10609 /* All call information words used in the function. */
10610
10611 for (x = machine->first_ciw; x; x = XEXP (x, 1))
10612 {
10613 ciw = XEXP (x, 0);
10614 #if HOST_BITS_PER_WIDE_INT == 32
10615 fprintf (file, "\t.quad\t" HOST_WIDE_INT_PRINT_DOUBLE_HEX "\n",
10616 CONST_DOUBLE_HIGH (ciw), CONST_DOUBLE_LOW (ciw));
10617 #else
10618 fprintf (file, "\t.quad\t" HOST_WIDE_INT_PRINT_HEX "\n", INTVAL (ciw));
10619 #endif
10620 }
10621 }
10622
10623 /* Add a call information word (CIW) to the list of the current function's
10624 CIWs and return its index.
10625
10626 X is a CONST_INT or CONST_DOUBLE representing the CIW. */
10627
10628 rtx
10629 unicosmk_add_call_info_word (rtx x)
10630 {
10631 rtx node;
10632 struct machine_function *machine = cfun->machine;
10633
10634 node = gen_rtx_EXPR_LIST (VOIDmode, x, NULL_RTX);
10635 if (machine->first_ciw == NULL_RTX)
10636 machine->first_ciw = node;
10637 else
10638 XEXP (machine->last_ciw, 1) = node;
10639
10640 machine->last_ciw = node;
10641 ++machine->ciw_count;
10642
10643 return GEN_INT (machine->ciw_count
10644 + strlen (current_function_name ())/8 + 5);
10645 }
10646
10647 /* The Cray assembler doesn't accept extern declarations for symbols which
10648 are defined in the same file. We have to keep track of all global
10649 symbols which are referenced and/or defined in a source file and output
10650 extern declarations for those which are referenced but not defined at
10651 the end of file. */
10652
10653 /* List of identifiers for which an extern declaration might have to be
10654 emitted. */
10655 /* FIXME: needs to use GC, so it can be saved and restored for PCH. */
10656
10657 struct unicosmk_extern_list
10658 {
10659 struct unicosmk_extern_list *next;
10660 const char *name;
10661 };
10662
10663 static struct unicosmk_extern_list *unicosmk_extern_head = 0;
10664
10665 /* Output extern declarations which are required for every asm file. */
10666
10667 static void
10668 unicosmk_output_default_externs (FILE *file)
10669 {
10670 static const char *const externs[] =
10671 { "__T3E_MISMATCH" };
10672
10673 int i;
10674 int n;
10675
10676 n = ARRAY_SIZE (externs);
10677
10678 for (i = 0; i < n; i++)
10679 fprintf (file, "\t.extern\t%s\n", externs[i]);
10680 }
10681
10682 /* Output extern declarations for global symbols which are have been
10683 referenced but not defined. */
10684
10685 static void
10686 unicosmk_output_externs (FILE *file)
10687 {
10688 struct unicosmk_extern_list *p;
10689 const char *real_name;
10690 int len;
10691 tree name_tree;
10692
10693 len = strlen (user_label_prefix);
10694 for (p = unicosmk_extern_head; p != 0; p = p->next)
10695 {
10696 /* We have to strip the encoding and possibly remove user_label_prefix
10697 from the identifier in order to handle -fleading-underscore and
10698 explicit asm names correctly (cf. gcc.dg/asm-names-1.c). */
10699 real_name = default_strip_name_encoding (p->name);
10700 if (len && p->name[0] == '*'
10701 && !memcmp (real_name, user_label_prefix, len))
10702 real_name += len;
10703
10704 name_tree = get_identifier (real_name);
10705 if (! TREE_ASM_WRITTEN (name_tree))
10706 {
10707 TREE_ASM_WRITTEN (name_tree) = 1;
10708 fputs ("\t.extern\t", file);
10709 assemble_name (file, p->name);
10710 putc ('\n', file);
10711 }
10712 }
10713 }
10714
10715 /* Record an extern. */
10716
10717 void
10718 unicosmk_add_extern (const char *name)
10719 {
10720 struct unicosmk_extern_list *p;
10721
10722 p = (struct unicosmk_extern_list *)
10723 xmalloc (sizeof (struct unicosmk_extern_list));
10724 p->next = unicosmk_extern_head;
10725 p->name = name;
10726 unicosmk_extern_head = p;
10727 }
10728
10729 /* The Cray assembler generates incorrect code if identifiers which
10730 conflict with register names are used as instruction operands. We have
10731 to replace such identifiers with DEX expressions. */
10732
10733 /* Structure to collect identifiers which have been replaced by DEX
10734 expressions. */
10735 /* FIXME: needs to use GC, so it can be saved and restored for PCH. */
10736
10737 struct unicosmk_dex {
10738 struct unicosmk_dex *next;
10739 const char *name;
10740 };
10741
10742 /* List of identifiers which have been replaced by DEX expressions. The DEX
10743 number is determined by the position in the list. */
10744
10745 static struct unicosmk_dex *unicosmk_dex_list = NULL;
10746
10747 /* The number of elements in the DEX list. */
10748
10749 static int unicosmk_dex_count = 0;
10750
10751 /* Check if NAME must be replaced by a DEX expression. */
10752
10753 static int
10754 unicosmk_special_name (const char *name)
10755 {
10756 if (name[0] == '*')
10757 ++name;
10758
10759 if (name[0] == '$')
10760 ++name;
10761
10762 if (name[0] != 'r' && name[0] != 'f' && name[0] != 'R' && name[0] != 'F')
10763 return 0;
10764
10765 switch (name[1])
10766 {
10767 case '1': case '2':
10768 return (name[2] == '\0' || (ISDIGIT (name[2]) && name[3] == '\0'));
10769
10770 case '3':
10771 return (name[2] == '\0'
10772 || ((name[2] == '0' || name[2] == '1') && name[3] == '\0'));
10773
10774 default:
10775 return (ISDIGIT (name[1]) && name[2] == '\0');
10776 }
10777 }
10778
10779 /* Return the DEX number if X must be replaced by a DEX expression and 0
10780 otherwise. */
10781
10782 static int
10783 unicosmk_need_dex (rtx x)
10784 {
10785 struct unicosmk_dex *dex;
10786 const char *name;
10787 int i;
10788
10789 if (GET_CODE (x) != SYMBOL_REF)
10790 return 0;
10791
10792 name = XSTR (x,0);
10793 if (! unicosmk_special_name (name))
10794 return 0;
10795
10796 i = unicosmk_dex_count;
10797 for (dex = unicosmk_dex_list; dex; dex = dex->next)
10798 {
10799 if (! strcmp (name, dex->name))
10800 return i;
10801 --i;
10802 }
10803
10804 dex = (struct unicosmk_dex *) xmalloc (sizeof (struct unicosmk_dex));
10805 dex->name = name;
10806 dex->next = unicosmk_dex_list;
10807 unicosmk_dex_list = dex;
10808
10809 ++unicosmk_dex_count;
10810 return unicosmk_dex_count;
10811 }
10812
10813 /* Output the DEX definitions for this file. */
10814
10815 static void
10816 unicosmk_output_dex (FILE *file)
10817 {
10818 struct unicosmk_dex *dex;
10819 int i;
10820
10821 if (unicosmk_dex_list == NULL)
10822 return;
10823
10824 fprintf (file, "\t.dexstart\n");
10825
10826 i = unicosmk_dex_count;
10827 for (dex = unicosmk_dex_list; dex; dex = dex->next)
10828 {
10829 fprintf (file, "\tDEX (%d) = ", i);
10830 assemble_name (file, dex->name);
10831 putc ('\n', file);
10832 --i;
10833 }
10834
10835 fprintf (file, "\t.dexend\n");
10836 }
10837
10838 /* Output text that to appear at the beginning of an assembler file. */
10839
10840 static void
10841 unicosmk_file_start (void)
10842 {
10843 int i;
10844
10845 fputs ("\t.ident\t", asm_out_file);
10846 unicosmk_output_module_name (asm_out_file);
10847 fputs ("\n\n", asm_out_file);
10848
10849 /* The Unicos/Mk assembler uses different register names. Instead of trying
10850 to support them, we simply use micro definitions. */
10851
10852 /* CAM has different register names: rN for the integer register N and fN
10853 for the floating-point register N. Instead of trying to use these in
10854 alpha.md, we define the symbols $N and $fN to refer to the appropriate
10855 register. */
10856
10857 for (i = 0; i < 32; ++i)
10858 fprintf (asm_out_file, "$%d <- r%d\n", i, i);
10859
10860 for (i = 0; i < 32; ++i)
10861 fprintf (asm_out_file, "$f%d <- f%d\n", i, i);
10862
10863 putc ('\n', asm_out_file);
10864
10865 /* The .align directive fill unused space with zeroes which does not work
10866 in code sections. We define the macro 'gcc@code@align' which uses nops
10867 instead. Note that it assumes that code sections always have the
10868 biggest possible alignment since . refers to the current offset from
10869 the beginning of the section. */
10870
10871 fputs ("\t.macro gcc@code@align n\n", asm_out_file);
10872 fputs ("gcc@n@bytes = 1 << n\n", asm_out_file);
10873 fputs ("gcc@here = . % gcc@n@bytes\n", asm_out_file);
10874 fputs ("\t.if ne, gcc@here, 0\n", asm_out_file);
10875 fputs ("\t.repeat (gcc@n@bytes - gcc@here) / 4\n", asm_out_file);
10876 fputs ("\tbis r31,r31,r31\n", asm_out_file);
10877 fputs ("\t.endr\n", asm_out_file);
10878 fputs ("\t.endif\n", asm_out_file);
10879 fputs ("\t.endm gcc@code@align\n\n", asm_out_file);
10880
10881 /* Output extern declarations which should always be visible. */
10882 unicosmk_output_default_externs (asm_out_file);
10883
10884 /* Open a dummy section. We always need to be inside a section for the
10885 section-switching code to work correctly.
10886 ??? This should be a module id or something like that. I still have to
10887 figure out what the rules for those are. */
10888 fputs ("\n\t.psect\t$SG00000,data\n", asm_out_file);
10889 }
10890
10891 /* Output text to appear at the end of an assembler file. This includes all
10892 pending extern declarations and DEX expressions. */
10893
10894 static void
10895 unicosmk_file_end (void)
10896 {
10897 fputs ("\t.endp\n\n", asm_out_file);
10898
10899 /* Output all pending externs. */
10900
10901 unicosmk_output_externs (asm_out_file);
10902
10903 /* Output dex definitions used for functions whose names conflict with
10904 register names. */
10905
10906 unicosmk_output_dex (asm_out_file);
10907
10908 fputs ("\t.end\t", asm_out_file);
10909 unicosmk_output_module_name (asm_out_file);
10910 putc ('\n', asm_out_file);
10911 }
10912
10913 #else
10914
10915 static void
10916 unicosmk_output_deferred_case_vectors (FILE *file ATTRIBUTE_UNUSED)
10917 {}
10918
10919 static void
10920 unicosmk_gen_dsib (unsigned long *imaskP ATTRIBUTE_UNUSED)
10921 {}
10922
10923 static void
10924 unicosmk_output_ssib (FILE * file ATTRIBUTE_UNUSED,
10925 const char * fnname ATTRIBUTE_UNUSED)
10926 {}
10927
10928 rtx
10929 unicosmk_add_call_info_word (rtx x ATTRIBUTE_UNUSED)
10930 {
10931 return NULL_RTX;
10932 }
10933
10934 static int
10935 unicosmk_need_dex (rtx x ATTRIBUTE_UNUSED)
10936 {
10937 return 0;
10938 }
10939
10940 #endif /* TARGET_ABI_UNICOSMK */
10941
10942 static void 9789 static void
10943 alpha_init_libfuncs (void) 9790 alpha_init_libfuncs (void)
10944 { 9791 {
10945 if (TARGET_ABI_UNICOSMK) 9792 if (TARGET_ABI_OPEN_VMS)
10946 {
10947 /* Prevent gcc from generating calls to __divsi3. */
10948 set_optab_libfunc (sdiv_optab, SImode, 0);
10949 set_optab_libfunc (udiv_optab, SImode, 0);
10950
10951 /* Use the functions provided by the system library
10952 for DImode integer division. */
10953 set_optab_libfunc (sdiv_optab, DImode, "$sldiv");
10954 set_optab_libfunc (udiv_optab, DImode, "$uldiv");
10955 }
10956 else if (TARGET_ABI_OPEN_VMS)
10957 { 9793 {
10958 /* Use the VMS runtime library functions for division and 9794 /* Use the VMS runtime library functions for division and
10959 remainder. */ 9795 remainder. */
10960 set_optab_libfunc (sdiv_optab, SImode, "OTS$DIV_I"); 9796 set_optab_libfunc (sdiv_optab, SImode, "OTS$DIV_I");
10961 set_optab_libfunc (sdiv_optab, DImode, "OTS$DIV_L"); 9797 set_optab_libfunc (sdiv_optab, DImode, "OTS$DIV_L");
10963 set_optab_libfunc (udiv_optab, DImode, "OTS$DIV_UL"); 9799 set_optab_libfunc (udiv_optab, DImode, "OTS$DIV_UL");
10964 set_optab_libfunc (smod_optab, SImode, "OTS$REM_I"); 9800 set_optab_libfunc (smod_optab, SImode, "OTS$REM_I");
10965 set_optab_libfunc (smod_optab, DImode, "OTS$REM_L"); 9801 set_optab_libfunc (smod_optab, DImode, "OTS$REM_L");
10966 set_optab_libfunc (umod_optab, SImode, "OTS$REM_UI"); 9802 set_optab_libfunc (umod_optab, SImode, "OTS$REM_UI");
10967 set_optab_libfunc (umod_optab, DImode, "OTS$REM_UL"); 9803 set_optab_libfunc (umod_optab, DImode, "OTS$REM_UL");
10968 abort_libfunc = init_one_libfunc ("decc$abort");
10969 memcmp_libfunc = init_one_libfunc ("decc$memcmp");
10970 #ifdef MEM_LIBFUNCS_INIT 9804 #ifdef MEM_LIBFUNCS_INIT
10971 MEM_LIBFUNCS_INIT; 9805 MEM_LIBFUNCS_INIT;
10972 #endif 9806 #endif
10973 } 9807 }
10974 } 9808 }
10981 { 9815 {
10982 int i; 9816 int i;
10983 if (! TARGET_FPREGS) 9817 if (! TARGET_FPREGS)
10984 for (i = 32; i < 63; i++) 9818 for (i = 32; i < 63; i++)
10985 fixed_regs[i] = call_used_regs[i] = 1; 9819 fixed_regs[i] = call_used_regs[i] = 1;
9820 }
9821
9822 /* Canonicalize a comparison from one we don't have to one we do have. */
9823
9824 static void
9825 alpha_canonicalize_comparison (int *code, rtx *op0, rtx *op1,
9826 bool op0_preserve_value)
9827 {
9828 if (!op0_preserve_value
9829 && (*code == GE || *code == GT || *code == GEU || *code == GTU)
9830 && (REG_P (*op1) || *op1 == const0_rtx))
9831 {
9832 std::swap (*op0, *op1);
9833 *code = (int)swap_condition ((enum rtx_code)*code);
9834 }
9835
9836 if ((*code == LT || *code == LTU)
9837 && CONST_INT_P (*op1) && INTVAL (*op1) == 256)
9838 {
9839 *code = *code == LT ? LE : LEU;
9840 *op1 = GEN_INT (255);
9841 }
9842 }
9843
9844 /* Implement TARGET_ATOMIC_ASSIGN_EXPAND_FENV. */
9845
9846 static void
9847 alpha_atomic_assign_expand_fenv (tree *hold, tree *clear, tree *update)
9848 {
9849 const unsigned HOST_WIDE_INT SWCR_STATUS_MASK = (0x3fUL << 17);
9850
9851 tree fenv_var, get_fpscr, set_fpscr, mask, ld_fenv, masked_fenv;
9852 tree new_fenv_var, reload_fenv, restore_fnenv;
9853 tree update_call, atomic_feraiseexcept, hold_fnclex;
9854
9855 /* Assume OSF/1 compatible interfaces. */
9856 if (!TARGET_ABI_OSF)
9857 return;
9858
9859 /* Generate the equivalent of :
9860 unsigned long fenv_var;
9861 fenv_var = __ieee_get_fp_control ();
9862
9863 unsigned long masked_fenv;
9864 masked_fenv = fenv_var & mask;
9865
9866 __ieee_set_fp_control (masked_fenv); */
9867
9868 fenv_var = create_tmp_var_raw (long_unsigned_type_node);
9869 get_fpscr
9870 = build_fn_decl ("__ieee_get_fp_control",
9871 build_function_type_list (long_unsigned_type_node, NULL));
9872 set_fpscr
9873 = build_fn_decl ("__ieee_set_fp_control",
9874 build_function_type_list (void_type_node, NULL));
9875 mask = build_int_cst (long_unsigned_type_node, ~SWCR_STATUS_MASK);
9876 ld_fenv = build2 (MODIFY_EXPR, long_unsigned_type_node,
9877 fenv_var, build_call_expr (get_fpscr, 0));
9878 masked_fenv = build2 (BIT_AND_EXPR, long_unsigned_type_node, fenv_var, mask);
9879 hold_fnclex = build_call_expr (set_fpscr, 1, masked_fenv);
9880 *hold = build2 (COMPOUND_EXPR, void_type_node,
9881 build2 (COMPOUND_EXPR, void_type_node, masked_fenv, ld_fenv),
9882 hold_fnclex);
9883
9884 /* Store the value of masked_fenv to clear the exceptions:
9885 __ieee_set_fp_control (masked_fenv); */
9886
9887 *clear = build_call_expr (set_fpscr, 1, masked_fenv);
9888
9889 /* Generate the equivalent of :
9890 unsigned long new_fenv_var;
9891 new_fenv_var = __ieee_get_fp_control ();
9892
9893 __ieee_set_fp_control (fenv_var);
9894
9895 __atomic_feraiseexcept (new_fenv_var); */
9896
9897 new_fenv_var = create_tmp_var_raw (long_unsigned_type_node);
9898 reload_fenv = build2 (MODIFY_EXPR, long_unsigned_type_node, new_fenv_var,
9899 build_call_expr (get_fpscr, 0));
9900 restore_fnenv = build_call_expr (set_fpscr, 1, fenv_var);
9901 atomic_feraiseexcept = builtin_decl_implicit (BUILT_IN_ATOMIC_FERAISEEXCEPT);
9902 update_call
9903 = build_call_expr (atomic_feraiseexcept, 1,
9904 fold_convert (integer_type_node, new_fenv_var));
9905 *update = build2 (COMPOUND_EXPR, void_type_node,
9906 build2 (COMPOUND_EXPR, void_type_node,
9907 reload_fenv, restore_fnenv), update_call);
9908 }
9909
9910 /* Implement TARGET_HARD_REGNO_MODE_OK. On Alpha, the integer registers
9911 can hold any mode. The floating-point registers can hold 64-bit
9912 integers as well, but not smaller values. */
9913
9914 static bool
9915 alpha_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
9916 {
9917 if (IN_RANGE (regno, 32, 62))
9918 return (mode == SFmode
9919 || mode == DFmode
9920 || mode == DImode
9921 || mode == SCmode
9922 || mode == DCmode);
9923 return true;
9924 }
9925
9926 /* Implement TARGET_MODES_TIEABLE_P. This asymmetric test is true when
9927 MODE1 could be put in an FP register but MODE2 could not. */
9928
9929 static bool
9930 alpha_modes_tieable_p (machine_mode mode1, machine_mode mode2)
9931 {
9932 return (alpha_hard_regno_mode_ok (32, mode1)
9933 ? alpha_hard_regno_mode_ok (32, mode2)
9934 : true);
9935 }
9936
9937 /* Implement TARGET_CAN_CHANGE_MODE_CLASS. */
9938
9939 static bool
9940 alpha_can_change_mode_class (machine_mode from, machine_mode to,
9941 reg_class_t rclass)
9942 {
9943 return (GET_MODE_SIZE (from) == GET_MODE_SIZE (to)
9944 || !reg_classes_intersect_p (FLOAT_REGS, rclass));
10986 } 9945 }
10987 9946
10988 /* Initialize the GCC target structure. */ 9947 /* Initialize the GCC target structure. */
10989 #if TARGET_ABI_OPEN_VMS 9948 #if TARGET_ABI_OPEN_VMS
10990 # undef TARGET_ATTRIBUTE_TABLE 9949 # undef TARGET_ATTRIBUTE_TABLE
10994 #endif 9953 #endif
10995 9954
10996 #undef TARGET_IN_SMALL_DATA_P 9955 #undef TARGET_IN_SMALL_DATA_P
10997 #define TARGET_IN_SMALL_DATA_P alpha_in_small_data_p 9956 #define TARGET_IN_SMALL_DATA_P alpha_in_small_data_p
10998 9957
10999 #if TARGET_ABI_UNICOSMK
11000 # undef TARGET_INSERT_ATTRIBUTES
11001 # define TARGET_INSERT_ATTRIBUTES unicosmk_insert_attributes
11002 # undef TARGET_SECTION_TYPE_FLAGS
11003 # define TARGET_SECTION_TYPE_FLAGS unicosmk_section_type_flags
11004 # undef TARGET_ASM_UNIQUE_SECTION
11005 # define TARGET_ASM_UNIQUE_SECTION unicosmk_unique_section
11006 #undef TARGET_ASM_FUNCTION_RODATA_SECTION
11007 #define TARGET_ASM_FUNCTION_RODATA_SECTION default_no_function_rodata_section
11008 # undef TARGET_ASM_GLOBALIZE_LABEL
11009 # define TARGET_ASM_GLOBALIZE_LABEL hook_void_FILEptr_constcharptr
11010 # undef TARGET_MUST_PASS_IN_STACK
11011 # define TARGET_MUST_PASS_IN_STACK unicosmk_must_pass_in_stack
11012 #endif
11013
11014 #undef TARGET_ASM_ALIGNED_HI_OP 9958 #undef TARGET_ASM_ALIGNED_HI_OP
11015 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t" 9959 #define TARGET_ASM_ALIGNED_HI_OP "\t.word\t"
11016 #undef TARGET_ASM_ALIGNED_DI_OP 9960 #undef TARGET_ASM_ALIGNED_DI_OP
11017 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t" 9961 #define TARGET_ASM_ALIGNED_DI_OP "\t.quad\t"
11018 9962
11019 /* Default unaligned ops are provided for ELF systems. To get unaligned 9963 /* Default unaligned ops are provided for ELF systems. To get unaligned
11020 data for non-ELF systems, we have to turn off auto alignment. */ 9964 data for non-ELF systems, we have to turn off auto alignment. */
11021 #if !defined (OBJECT_FORMAT_ELF) || TARGET_ABI_OPEN_VMS 9965 #if TARGET_ABI_OPEN_VMS
11022 #undef TARGET_ASM_UNALIGNED_HI_OP 9966 #undef TARGET_ASM_UNALIGNED_HI_OP
11023 #define TARGET_ASM_UNALIGNED_HI_OP "\t.align 0\n\t.word\t" 9967 #define TARGET_ASM_UNALIGNED_HI_OP "\t.align 0\n\t.word\t"
11024 #undef TARGET_ASM_UNALIGNED_SI_OP 9968 #undef TARGET_ASM_UNALIGNED_SI_OP
11025 #define TARGET_ASM_UNALIGNED_SI_OP "\t.align 0\n\t.long\t" 9969 #define TARGET_ASM_UNALIGNED_SI_OP "\t.align 0\n\t.long\t"
11026 #undef TARGET_ASM_UNALIGNED_DI_OP 9970 #undef TARGET_ASM_UNALIGNED_DI_OP
11027 #define TARGET_ASM_UNALIGNED_DI_OP "\t.align 0\n\t.quad\t" 9971 #define TARGET_ASM_UNALIGNED_DI_OP "\t.align 0\n\t.quad\t"
11028 #endif 9972 #endif
11029 9973
11030 #ifdef OBJECT_FORMAT_ELF
11031 #undef TARGET_ASM_RELOC_RW_MASK 9974 #undef TARGET_ASM_RELOC_RW_MASK
11032 #define TARGET_ASM_RELOC_RW_MASK alpha_elf_reloc_rw_mask 9975 #define TARGET_ASM_RELOC_RW_MASK alpha_elf_reloc_rw_mask
11033 #undef TARGET_ASM_SELECT_RTX_SECTION 9976 #undef TARGET_ASM_SELECT_RTX_SECTION
11034 #define TARGET_ASM_SELECT_RTX_SECTION alpha_elf_select_rtx_section 9977 #define TARGET_ASM_SELECT_RTX_SECTION alpha_elf_select_rtx_section
11035 #undef TARGET_SECTION_TYPE_FLAGS 9978 #undef TARGET_SECTION_TYPE_FLAGS
11036 #define TARGET_SECTION_TYPE_FLAGS alpha_elf_section_type_flags 9979 #define TARGET_SECTION_TYPE_FLAGS alpha_elf_section_type_flags
11037 #endif
11038 9980
11039 #undef TARGET_ASM_FUNCTION_END_PROLOGUE 9981 #undef TARGET_ASM_FUNCTION_END_PROLOGUE
11040 #define TARGET_ASM_FUNCTION_END_PROLOGUE alpha_output_function_end_prologue 9982 #define TARGET_ASM_FUNCTION_END_PROLOGUE alpha_output_function_end_prologue
11041 9983
11042 #undef TARGET_INIT_LIBFUNCS 9984 #undef TARGET_INIT_LIBFUNCS
11043 #define TARGET_INIT_LIBFUNCS alpha_init_libfuncs 9985 #define TARGET_INIT_LIBFUNCS alpha_init_libfuncs
11044 9986
11045 #undef TARGET_LEGITIMIZE_ADDRESS 9987 #undef TARGET_LEGITIMIZE_ADDRESS
11046 #define TARGET_LEGITIMIZE_ADDRESS alpha_legitimize_address 9988 #define TARGET_LEGITIMIZE_ADDRESS alpha_legitimize_address
11047 9989 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
11048 #if TARGET_ABI_UNICOSMK 9990 #define TARGET_MODE_DEPENDENT_ADDRESS_P alpha_mode_dependent_address_p
11049 #undef TARGET_ASM_FILE_START 9991
11050 #define TARGET_ASM_FILE_START unicosmk_file_start
11051 #undef TARGET_ASM_FILE_END
11052 #define TARGET_ASM_FILE_END unicosmk_file_end
11053 #else
11054 #undef TARGET_ASM_FILE_START 9992 #undef TARGET_ASM_FILE_START
11055 #define TARGET_ASM_FILE_START alpha_file_start 9993 #define TARGET_ASM_FILE_START alpha_file_start
11056 #undef TARGET_ASM_FILE_START_FILE_DIRECTIVE
11057 #define TARGET_ASM_FILE_START_FILE_DIRECTIVE true
11058 #endif
11059 9994
11060 #undef TARGET_SCHED_ADJUST_COST 9995 #undef TARGET_SCHED_ADJUST_COST
11061 #define TARGET_SCHED_ADJUST_COST alpha_adjust_cost 9996 #define TARGET_SCHED_ADJUST_COST alpha_adjust_cost
11062 #undef TARGET_SCHED_ISSUE_RATE 9997 #undef TARGET_SCHED_ISSUE_RATE
11063 #define TARGET_SCHED_ISSUE_RATE alpha_issue_rate 9998 #define TARGET_SCHED_ISSUE_RATE alpha_issue_rate
11074 #define TARGET_INIT_BUILTINS alpha_init_builtins 10009 #define TARGET_INIT_BUILTINS alpha_init_builtins
11075 #undef TARGET_EXPAND_BUILTIN 10010 #undef TARGET_EXPAND_BUILTIN
11076 #define TARGET_EXPAND_BUILTIN alpha_expand_builtin 10011 #define TARGET_EXPAND_BUILTIN alpha_expand_builtin
11077 #undef TARGET_FOLD_BUILTIN 10012 #undef TARGET_FOLD_BUILTIN
11078 #define TARGET_FOLD_BUILTIN alpha_fold_builtin 10013 #define TARGET_FOLD_BUILTIN alpha_fold_builtin
10014 #undef TARGET_GIMPLE_FOLD_BUILTIN
10015 #define TARGET_GIMPLE_FOLD_BUILTIN alpha_gimple_fold_builtin
11079 10016
11080 #undef TARGET_FUNCTION_OK_FOR_SIBCALL 10017 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
11081 #define TARGET_FUNCTION_OK_FOR_SIBCALL alpha_function_ok_for_sibcall 10018 #define TARGET_FUNCTION_OK_FOR_SIBCALL alpha_function_ok_for_sibcall
11082 #undef TARGET_CANNOT_COPY_INSN_P 10019 #undef TARGET_CANNOT_COPY_INSN_P
11083 #define TARGET_CANNOT_COPY_INSN_P alpha_cannot_copy_insn_p 10020 #define TARGET_CANNOT_COPY_INSN_P alpha_cannot_copy_insn_p
10021 #undef TARGET_LEGITIMATE_CONSTANT_P
10022 #define TARGET_LEGITIMATE_CONSTANT_P alpha_legitimate_constant_p
11084 #undef TARGET_CANNOT_FORCE_CONST_MEM 10023 #undef TARGET_CANNOT_FORCE_CONST_MEM
11085 #define TARGET_CANNOT_FORCE_CONST_MEM alpha_cannot_force_const_mem 10024 #define TARGET_CANNOT_FORCE_CONST_MEM alpha_cannot_force_const_mem
11086 10025
11087 #if TARGET_ABI_OSF 10026 #if TARGET_ABI_OSF
11088 #undef TARGET_ASM_OUTPUT_MI_THUNK 10027 #undef TARGET_ASM_OUTPUT_MI_THUNK
11091 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true 10030 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
11092 #undef TARGET_STDARG_OPTIMIZE_HOOK 10031 #undef TARGET_STDARG_OPTIMIZE_HOOK
11093 #define TARGET_STDARG_OPTIMIZE_HOOK alpha_stdarg_optimize_hook 10032 #define TARGET_STDARG_OPTIMIZE_HOOK alpha_stdarg_optimize_hook
11094 #endif 10033 #endif
11095 10034
10035 #undef TARGET_PRINT_OPERAND
10036 #define TARGET_PRINT_OPERAND alpha_print_operand
10037 #undef TARGET_PRINT_OPERAND_ADDRESS
10038 #define TARGET_PRINT_OPERAND_ADDRESS alpha_print_operand_address
10039 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
10040 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P alpha_print_operand_punct_valid_p
10041
10042 /* Use 16-bits anchor. */
10043 #undef TARGET_MIN_ANCHOR_OFFSET
10044 #define TARGET_MIN_ANCHOR_OFFSET -0x7fff - 1
10045 #undef TARGET_MAX_ANCHOR_OFFSET
10046 #define TARGET_MAX_ANCHOR_OFFSET 0x7fff
10047 #undef TARGET_USE_BLOCKS_FOR_CONSTANT_P
10048 #define TARGET_USE_BLOCKS_FOR_CONSTANT_P hook_bool_mode_const_rtx_true
10049
10050 #undef TARGET_REGISTER_MOVE_COST
10051 #define TARGET_REGISTER_MOVE_COST alpha_register_move_cost
10052 #undef TARGET_MEMORY_MOVE_COST
10053 #define TARGET_MEMORY_MOVE_COST alpha_memory_move_cost
11096 #undef TARGET_RTX_COSTS 10054 #undef TARGET_RTX_COSTS
11097 #define TARGET_RTX_COSTS alpha_rtx_costs 10055 #define TARGET_RTX_COSTS alpha_rtx_costs
11098 #undef TARGET_ADDRESS_COST 10056 #undef TARGET_ADDRESS_COST
11099 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0 10057 #define TARGET_ADDRESS_COST hook_int_rtx_mode_as_bool_0
11100 10058
11101 #undef TARGET_MACHINE_DEPENDENT_REORG 10059 #undef TARGET_MACHINE_DEPENDENT_REORG
11102 #define TARGET_MACHINE_DEPENDENT_REORG alpha_reorg 10060 #define TARGET_MACHINE_DEPENDENT_REORG alpha_reorg
11103 10061
11104 #undef TARGET_PROMOTE_FUNCTION_MODE 10062 #undef TARGET_PROMOTE_FUNCTION_MODE
11105 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote 10063 #define TARGET_PROMOTE_FUNCTION_MODE default_promote_function_mode_always_promote
11106 #undef TARGET_PROMOTE_PROTOTYPES 10064 #undef TARGET_PROMOTE_PROTOTYPES
11107 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_false 10065 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_false
10066
10067 #undef TARGET_FUNCTION_VALUE
10068 #define TARGET_FUNCTION_VALUE alpha_function_value
10069 #undef TARGET_LIBCALL_VALUE
10070 #define TARGET_LIBCALL_VALUE alpha_libcall_value
10071 #undef TARGET_FUNCTION_VALUE_REGNO_P
10072 #define TARGET_FUNCTION_VALUE_REGNO_P alpha_function_value_regno_p
11108 #undef TARGET_RETURN_IN_MEMORY 10073 #undef TARGET_RETURN_IN_MEMORY
11109 #define TARGET_RETURN_IN_MEMORY alpha_return_in_memory 10074 #define TARGET_RETURN_IN_MEMORY alpha_return_in_memory
11110 #undef TARGET_PASS_BY_REFERENCE 10075 #undef TARGET_PASS_BY_REFERENCE
11111 #define TARGET_PASS_BY_REFERENCE alpha_pass_by_reference 10076 #define TARGET_PASS_BY_REFERENCE alpha_pass_by_reference
11112 #undef TARGET_SETUP_INCOMING_VARARGS 10077 #undef TARGET_SETUP_INCOMING_VARARGS
11126 #undef TARGET_FUNCTION_ARG_ADVANCE 10091 #undef TARGET_FUNCTION_ARG_ADVANCE
11127 #define TARGET_FUNCTION_ARG_ADVANCE alpha_function_arg_advance 10092 #define TARGET_FUNCTION_ARG_ADVANCE alpha_function_arg_advance
11128 #undef TARGET_TRAMPOLINE_INIT 10093 #undef TARGET_TRAMPOLINE_INIT
11129 #define TARGET_TRAMPOLINE_INIT alpha_trampoline_init 10094 #define TARGET_TRAMPOLINE_INIT alpha_trampoline_init
11130 10095
10096 #undef TARGET_INSTANTIATE_DECLS
10097 #define TARGET_INSTANTIATE_DECLS alpha_instantiate_decls
10098
11131 #undef TARGET_SECONDARY_RELOAD 10099 #undef TARGET_SECONDARY_RELOAD
11132 #define TARGET_SECONDARY_RELOAD alpha_secondary_reload 10100 #define TARGET_SECONDARY_RELOAD alpha_secondary_reload
10101 #undef TARGET_SECONDARY_MEMORY_NEEDED
10102 #define TARGET_SECONDARY_MEMORY_NEEDED alpha_secondary_memory_needed
10103 #undef TARGET_SECONDARY_MEMORY_NEEDED_MODE
10104 #define TARGET_SECONDARY_MEMORY_NEEDED_MODE alpha_secondary_memory_needed_mode
11133 10105
11134 #undef TARGET_SCALAR_MODE_SUPPORTED_P 10106 #undef TARGET_SCALAR_MODE_SUPPORTED_P
11135 #define TARGET_SCALAR_MODE_SUPPORTED_P alpha_scalar_mode_supported_p 10107 #define TARGET_SCALAR_MODE_SUPPORTED_P alpha_scalar_mode_supported_p
11136 #undef TARGET_VECTOR_MODE_SUPPORTED_P 10108 #undef TARGET_VECTOR_MODE_SUPPORTED_P
11137 #define TARGET_VECTOR_MODE_SUPPORTED_P alpha_vector_mode_supported_p 10109 #define TARGET_VECTOR_MODE_SUPPORTED_P alpha_vector_mode_supported_p
11140 #define TARGET_BUILD_BUILTIN_VA_LIST alpha_build_builtin_va_list 10112 #define TARGET_BUILD_BUILTIN_VA_LIST alpha_build_builtin_va_list
11141 10113
11142 #undef TARGET_EXPAND_BUILTIN_VA_START 10114 #undef TARGET_EXPAND_BUILTIN_VA_START
11143 #define TARGET_EXPAND_BUILTIN_VA_START alpha_va_start 10115 #define TARGET_EXPAND_BUILTIN_VA_START alpha_va_start
11144 10116
11145 /* The Alpha architecture does not require sequential consistency. See
11146 http://www.cs.umd.edu/~pugh/java/memoryModel/AlphaReordering.html
11147 for an example of how it can be violated in practice. */
11148 #undef TARGET_RELAXED_ORDERING
11149 #define TARGET_RELAXED_ORDERING true
11150
11151 #undef TARGET_DEFAULT_TARGET_FLAGS
11152 #define TARGET_DEFAULT_TARGET_FLAGS \
11153 (TARGET_DEFAULT | TARGET_CPU_DEFAULT | TARGET_DEFAULT_EXPLICIT_RELOCS)
11154 #undef TARGET_HANDLE_OPTION
11155 #define TARGET_HANDLE_OPTION alpha_handle_option
11156
11157 #undef TARGET_OPTION_OVERRIDE 10117 #undef TARGET_OPTION_OVERRIDE
11158 #define TARGET_OPTION_OVERRIDE alpha_option_override 10118 #define TARGET_OPTION_OVERRIDE alpha_option_override
11159 10119
11160 #undef TARGET_OPTION_OPTIMIZATION_TABLE 10120 #undef TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE
11161 #define TARGET_OPTION_OPTIMIZATION_TABLE alpha_option_optimization_table 10121 #define TARGET_OVERRIDE_OPTIONS_AFTER_CHANGE \
10122 alpha_override_options_after_change
11162 10123
11163 #ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING 10124 #ifdef TARGET_ALTERNATE_LONG_DOUBLE_MANGLING
11164 #undef TARGET_MANGLE_TYPE 10125 #undef TARGET_MANGLE_TYPE
11165 #define TARGET_MANGLE_TYPE alpha_mangle_type 10126 #define TARGET_MANGLE_TYPE alpha_mangle_type
11166 #endif 10127 #endif
11167 10128
10129 #undef TARGET_LRA_P
10130 #define TARGET_LRA_P hook_bool_void_false
10131
11168 #undef TARGET_LEGITIMATE_ADDRESS_P 10132 #undef TARGET_LEGITIMATE_ADDRESS_P
11169 #define TARGET_LEGITIMATE_ADDRESS_P alpha_legitimate_address_p 10133 #define TARGET_LEGITIMATE_ADDRESS_P alpha_legitimate_address_p
11170 10134
11171 #undef TARGET_CONDITIONAL_REGISTER_USAGE 10135 #undef TARGET_CONDITIONAL_REGISTER_USAGE
11172 #define TARGET_CONDITIONAL_REGISTER_USAGE alpha_conditional_register_usage 10136 #define TARGET_CONDITIONAL_REGISTER_USAGE alpha_conditional_register_usage
11173 10137
10138 #undef TARGET_CANONICALIZE_COMPARISON
10139 #define TARGET_CANONICALIZE_COMPARISON alpha_canonicalize_comparison
10140
10141 #undef TARGET_ATOMIC_ASSIGN_EXPAND_FENV
10142 #define TARGET_ATOMIC_ASSIGN_EXPAND_FENV alpha_atomic_assign_expand_fenv
10143
10144 #undef TARGET_HARD_REGNO_MODE_OK
10145 #define TARGET_HARD_REGNO_MODE_OK alpha_hard_regno_mode_ok
10146
10147 #undef TARGET_MODES_TIEABLE_P
10148 #define TARGET_MODES_TIEABLE_P alpha_modes_tieable_p
10149
10150 #undef TARGET_CAN_CHANGE_MODE_CLASS
10151 #define TARGET_CAN_CHANGE_MODE_CLASS alpha_can_change_mode_class
10152
11174 struct gcc_target targetm = TARGET_INITIALIZER; 10153 struct gcc_target targetm = TARGET_INITIALIZER;
11175 10154
11176 10155
11177 #include "gt-alpha.h" 10156 #include "gt-alpha.h"