comparison gcc/config/rs6000/rs6000.c @ 67:f6334be47118

update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
author nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
date Tue, 22 Mar 2011 17:18:12 +0900
parents b7f97abdc517
children 04ced10e8804
comparison
equal deleted inserted replaced
65:65488c3d617d 67:f6334be47118
1 /* Subroutines used for code generation on IBM RS/6000. 1 /* Subroutines used for code generation on IBM RS/6000.
2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2 Copyright (C) 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
4 Free Software Foundation, Inc. 4 Free Software Foundation, Inc.
5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu) 5 Contributed by Richard Kenner (kenner@vlsi1.ultra.nyu.edu)
6 6
7 This file is part of GCC. 7 This file is part of GCC.
8 8
39 #include "except.h" 39 #include "except.h"
40 #include "function.h" 40 #include "function.h"
41 #include "output.h" 41 #include "output.h"
42 #include "basic-block.h" 42 #include "basic-block.h"
43 #include "integrate.h" 43 #include "integrate.h"
44 #include "diagnostic-core.h"
44 #include "toplev.h" 45 #include "toplev.h"
45 #include "ggc.h" 46 #include "ggc.h"
46 #include "hashtab.h" 47 #include "hashtab.h"
47 #include "tm_p.h" 48 #include "tm_p.h"
48 #include "target.h" 49 #include "target.h"
49 #include "target-def.h" 50 #include "target-def.h"
50 #include "langhooks.h" 51 #include "langhooks.h"
51 #include "reload.h" 52 #include "reload.h"
52 #include "cfglayout.h" 53 #include "cfglayout.h"
54 #include "cfgloop.h"
53 #include "sched-int.h" 55 #include "sched-int.h"
54 #include "gimple.h" 56 #include "gimple.h"
55 #include "tree-flow.h" 57 #include "tree-flow.h"
56 #include "intl.h" 58 #include "intl.h"
57 #include "params.h" 59 #include "params.h"
70 #define min(A,B) ((A) < (B) ? (A) : (B)) 72 #define min(A,B) ((A) < (B) ? (A) : (B))
71 #define max(A,B) ((A) > (B) ? (A) : (B)) 73 #define max(A,B) ((A) > (B) ? (A) : (B))
72 74
73 /* Structure used to define the rs6000 stack */ 75 /* Structure used to define the rs6000 stack */
74 typedef struct rs6000_stack { 76 typedef struct rs6000_stack {
77 int reload_completed; /* stack info won't change from here on */
75 int first_gp_reg_save; /* first callee saved GP register used */ 78 int first_gp_reg_save; /* first callee saved GP register used */
76 int first_fp_reg_save; /* first callee saved FP register used */ 79 int first_fp_reg_save; /* first callee saved FP register used */
77 int first_altivec_reg_save; /* first callee saved AltiVec register used */ 80 int first_altivec_reg_save; /* first callee saved AltiVec register used */
78 int lr_save_p; /* true if the link reg needs to be saved */ 81 int lr_save_p; /* true if the link reg needs to be saved */
79 int cr_save_p; /* true if the CR reg needs to be saved */ 82 int cr_save_p; /* true if the CR reg needs to be saved */
106 not in save_size */ 109 not in save_size */
107 int spe_gp_size; /* size of 64-bit GPR save size for SPE */ 110 int spe_gp_size; /* size of 64-bit GPR save size for SPE */
108 int spe_padding_size; 111 int spe_padding_size;
109 HOST_WIDE_INT total_size; /* total bytes allocated for stack */ 112 HOST_WIDE_INT total_size; /* total bytes allocated for stack */
110 int spe_64bit_regs_used; 113 int spe_64bit_regs_used;
114 int savres_strategy;
111 } rs6000_stack_t; 115 } rs6000_stack_t;
112 116
113 /* A C structure for machine-specific, per-function data. 117 /* A C structure for machine-specific, per-function data.
114 This is added to the cfun structure. */ 118 This is added to the cfun structure. */
115 typedef struct GTY(()) machine_function 119 typedef struct GTY(()) machine_function
133 rtx sdmode_stack_slot; 137 rtx sdmode_stack_slot;
134 } machine_function; 138 } machine_function;
135 139
136 /* Target cpu type */ 140 /* Target cpu type */
137 141
138 enum processor_type rs6000_cpu;
139 struct rs6000_cpu_select rs6000_select[3] = 142 struct rs6000_cpu_select rs6000_select[3] =
140 { 143 {
141 /* switch name, tune arch */ 144 /* switch name, tune arch */
142 { (const char *)0, "--with-cpu=", 1, 1 }, 145 { (const char *)0, "--with-cpu=", 1, 1 },
143 { (const char *)0, "-mcpu=", 1, 1 }, 146 { (const char *)0, "-mcpu=", 1, 1 },
144 { (const char *)0, "-mtune=", 1, 0 }, 147 { (const char *)0, "-mtune=", 1, 0 },
145 }; 148 };
146 149
147 /* Always emit branch hint bits. */ 150 /* String variables to hold the various options. */
148 static GTY(()) bool rs6000_always_hint; 151 static const char *rs6000_sched_insert_nops_str;
149 152 static const char *rs6000_sched_costly_dep_str;
150 /* Schedule instructions for group formation. */ 153 static const char *rs6000_recip_name;
151 static GTY(()) bool rs6000_sched_groups; 154
152 155 #ifdef USING_ELFOS_H
153 /* Align branch targets. */ 156 static const char *rs6000_abi_name;
154 static GTY(()) bool rs6000_align_branch_targets; 157 static const char *rs6000_sdata_name;
155 158 #endif
156 /* Support for -msched-costly-dep option. */
157 const char *rs6000_sched_costly_dep_str;
158 enum rs6000_dependence_cost rs6000_sched_costly_dep;
159
160 /* Support for -minsert-sched-nops option. */
161 const char *rs6000_sched_insert_nops_str;
162 enum rs6000_nop_insertion rs6000_sched_insert_nops;
163 159
164 /* Support targetm.vectorize.builtin_mask_for_load. */ 160 /* Support targetm.vectorize.builtin_mask_for_load. */
165 static GTY(()) tree altivec_builtin_mask_for_load; 161 static GTY(()) tree altivec_builtin_mask_for_load;
166 162
167 /* Size of long double. */
168 int rs6000_long_double_type_size;
169
170 /* IEEE quad extended precision long double. */
171 int rs6000_ieeequad;
172
173 /* Nonzero to use AltiVec ABI. */
174 int rs6000_altivec_abi;
175
176 /* Nonzero if we want SPE SIMD instructions. */
177 int rs6000_spe;
178
179 /* Nonzero if we want SPE ABI extensions. */
180 int rs6000_spe_abi;
181
182 /* Nonzero if floating point operations are done in the GPRs. */
183 int rs6000_float_gprs = 0;
184
185 /* Nonzero if we want Darwin's struct-by-value-in-regs ABI. */
186 int rs6000_darwin64_abi;
187
188 /* Set to nonzero once AIX common-mode calls have been defined. */ 163 /* Set to nonzero once AIX common-mode calls have been defined. */
189 static GTY(()) int common_mode_defined; 164 static GTY(()) int common_mode_defined;
190 165
191 /* Label number of label created for -mrelocatable, to call to so we can 166 /* Label number of label created for -mrelocatable, to call to so we can
192 get the address of the GOT section */ 167 get the address of the GOT section */
193 int rs6000_pic_labelno; 168 static int rs6000_pic_labelno;
194 169
195 #ifdef USING_ELFOS_H 170 #ifdef USING_ELFOS_H
196 /* Which abi to adhere to */
197 const char *rs6000_abi_name;
198
199 /* Semantics of the small data area */
200 enum rs6000_sdata_type rs6000_sdata = SDATA_DATA;
201
202 /* Which small data model to use */
203 const char *rs6000_sdata_name = (char *)0;
204
205 /* Counter for labels which are to be placed in .fixup. */ 171 /* Counter for labels which are to be placed in .fixup. */
206 int fixuplabelno = 0; 172 int fixuplabelno = 0;
207 #endif 173 #endif
208 174
209 /* Bit size of immediate TLS offsets and string from which it is decoded. */
210 int rs6000_tls_size = 32;
211 const char *rs6000_tls_size_string;
212
213 /* ABI enumeration available for subtarget to use. */
214 enum rs6000_abi rs6000_current_abi;
215
216 /* Whether to use variant of AIX ABI for PowerPC64 Linux. */ 175 /* Whether to use variant of AIX ABI for PowerPC64 Linux. */
217 int dot_symbols; 176 int dot_symbols;
218
219 /* Debug flags */
220 const char *rs6000_debug_name;
221 int rs6000_debug_stack; /* debug stack applications */
222 int rs6000_debug_arg; /* debug argument handling */
223 int rs6000_debug_reg; /* debug register classes */
224 int rs6000_debug_addr; /* debug memory addressing */
225 int rs6000_debug_cost; /* debug rtx_costs */
226 177
227 /* Specify the machine mode that pointers have. After generation of rtl, the 178 /* Specify the machine mode that pointers have. After generation of rtl, the
228 compiler makes no further distinction between pointers and any other objects 179 compiler makes no further distinction between pointers and any other objects
229 of this machine mode. The type is unsigned since not all things that 180 of this machine mode. The type is unsigned since not all things that
230 include rs6000.h also include machmode.h. */ 181 include rs6000.h also include machmode.h. */
231 unsigned rs6000_pmode; 182 unsigned rs6000_pmode;
232 183
233 /* Width in bits of a pointer. */ 184 /* Width in bits of a pointer. */
234 unsigned rs6000_pointer_size; 185 unsigned rs6000_pointer_size;
235 186
187 #ifdef HAVE_AS_GNU_ATTRIBUTE
188 /* Flag whether floating point values have been passed/returned. */
189 static bool rs6000_passes_float;
190 /* Flag whether vector values have been passed/returned. */
191 static bool rs6000_passes_vector;
192 /* Flag whether small (<= 8 byte) structures have been returned. */
193 static bool rs6000_returns_struct;
194 #endif
236 195
237 /* Value is TRUE if register/mode pair is acceptable. */ 196 /* Value is TRUE if register/mode pair is acceptable. */
238 bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER]; 197 bool rs6000_hard_regno_mode_ok_p[NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
239 198
240 /* Maximum number of registers needed for a given register class and mode. */ 199 /* Maximum number of registers needed for a given register class and mode. */
250 static enum insn_code rs6000_vector_reload[NUM_MACHINE_MODES][2]; 209 static enum insn_code rs6000_vector_reload[NUM_MACHINE_MODES][2];
251 210
252 /* Built in types. */ 211 /* Built in types. */
253 tree rs6000_builtin_types[RS6000_BTI_MAX]; 212 tree rs6000_builtin_types[RS6000_BTI_MAX];
254 tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT]; 213 tree rs6000_builtin_decls[RS6000_BUILTIN_COUNT];
255
256 const char *rs6000_traceback_name;
257 static enum {
258 traceback_default = 0,
259 traceback_none,
260 traceback_part,
261 traceback_full
262 } rs6000_traceback;
263 214
264 /* Flag to say the TOC is initialized */ 215 /* Flag to say the TOC is initialized */
265 int toc_initialized; 216 int toc_initialized;
266 char toc_label_name[10]; 217 char toc_label_name[10];
267 218
272 static GTY(()) section *read_only_data_section; 223 static GTY(()) section *read_only_data_section;
273 static GTY(()) section *private_data_section; 224 static GTY(()) section *private_data_section;
274 static GTY(()) section *read_only_private_data_section; 225 static GTY(()) section *read_only_private_data_section;
275 static GTY(()) section *sdata2_section; 226 static GTY(()) section *sdata2_section;
276 static GTY(()) section *toc_section; 227 static GTY(()) section *toc_section;
277
278 /* Control alignment for fields within structures. */
279 /* String from -malign-XXXXX. */
280 int rs6000_alignment_flags;
281 228
282 /* True for any options that were explicitly set. */ 229 /* True for any options that were explicitly set. */
283 static struct { 230 static struct {
284 bool aix_struct_ret; /* True if -maix-struct-ret was used. */ 231 bool aix_struct_ret; /* True if -maix-struct-ret was used. */
285 bool alignment; /* True if -malign- was used. */ 232 bool alignment; /* True if -malign- was used. */
288 bool spe; /* True if -mspe= was used. */ 235 bool spe; /* True if -mspe= was used. */
289 bool float_gprs; /* True if -mfloat-gprs= was used. */ 236 bool float_gprs; /* True if -mfloat-gprs= was used. */
290 bool long_double; /* True if -mlong-double- was used. */ 237 bool long_double; /* True if -mlong-double- was used. */
291 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */ 238 bool ieee; /* True if -mabi=ieee/ibmlongdouble used. */
292 bool vrsave; /* True if -mvrsave was used. */ 239 bool vrsave; /* True if -mvrsave was used. */
240 bool cmodel; /* True if -mcmodel was used. */
293 } rs6000_explicit_options; 241 } rs6000_explicit_options;
294 242
295 struct builtin_description 243 struct builtin_description
296 { 244 {
297 /* mask is not const because we're going to alter it below. This 245 /* mask is not const because we're going to alter it below. This
314 /* Describe the alignment of a vector. */ 262 /* Describe the alignment of a vector. */
315 int rs6000_vector_align[NUM_MACHINE_MODES]; 263 int rs6000_vector_align[NUM_MACHINE_MODES];
316 264
317 /* Map selected modes to types for builtins. */ 265 /* Map selected modes to types for builtins. */
318 static GTY(()) tree builtin_mode_to_type[MAX_MACHINE_MODE][2]; 266 static GTY(()) tree builtin_mode_to_type[MAX_MACHINE_MODE][2];
267
268 /* What modes to automatically generate reciprocal divide estimate (fre) and
269 reciprocal sqrt (frsqrte) for. */
270 unsigned char rs6000_recip_bits[MAX_MACHINE_MODE];
271
272 /* Masks to determine which reciprocal esitmate instructions to generate
273 automatically. */
274 enum rs6000_recip_mask {
275 RECIP_SF_DIV = 0x001, /* Use divide estimate */
276 RECIP_DF_DIV = 0x002,
277 RECIP_V4SF_DIV = 0x004,
278 RECIP_V2DF_DIV = 0x008,
279
280 RECIP_SF_RSQRT = 0x010, /* Use reciprocal sqrt estimate. */
281 RECIP_DF_RSQRT = 0x020,
282 RECIP_V4SF_RSQRT = 0x040,
283 RECIP_V2DF_RSQRT = 0x080,
284
285 /* Various combination of flags for -mrecip=xxx. */
286 RECIP_NONE = 0,
287 RECIP_ALL = (RECIP_SF_DIV | RECIP_DF_DIV | RECIP_V4SF_DIV
288 | RECIP_V2DF_DIV | RECIP_SF_RSQRT | RECIP_DF_RSQRT
289 | RECIP_V4SF_RSQRT | RECIP_V2DF_RSQRT),
290
291 RECIP_HIGH_PRECISION = RECIP_ALL,
292
293 /* On low precision machines like the power5, don't enable double precision
294 reciprocal square root estimate, since it isn't accurate enough. */
295 RECIP_LOW_PRECISION = (RECIP_ALL & ~(RECIP_DF_RSQRT | RECIP_V2DF_RSQRT))
296 };
297
298 /* -mrecip options. */
299 static struct
300 {
301 const char *string; /* option name */
302 unsigned int mask; /* mask bits to set */
303 } recip_options[] = {
304 { "all", RECIP_ALL },
305 { "none", RECIP_NONE },
306 { "div", (RECIP_SF_DIV | RECIP_DF_DIV | RECIP_V4SF_DIV
307 | RECIP_V2DF_DIV) },
308 { "divf", (RECIP_SF_DIV | RECIP_V4SF_DIV) },
309 { "divd", (RECIP_DF_DIV | RECIP_V2DF_DIV) },
310 { "rsqrt", (RECIP_SF_RSQRT | RECIP_DF_RSQRT | RECIP_V4SF_RSQRT
311 | RECIP_V2DF_RSQRT) },
312 { "rsqrtf", (RECIP_SF_RSQRT | RECIP_V4SF_RSQRT) },
313 { "rsqrtd", (RECIP_DF_RSQRT | RECIP_V2DF_RSQRT) },
314 };
315
316 /* 2 argument gen function typedef. */
317 typedef rtx (*gen_2arg_fn_t) (rtx, rtx, rtx);
318
319 319
320 /* Target cpu costs. */ 320 /* Target cpu costs. */
321 321
322 struct processor_costs { 322 struct processor_costs {
323 const int mulsi; /* cost of SImode multiplication. */ 323 const int mulsi; /* cost of SImode multiplication. */
777 32, /* l1 cache */ 777 32, /* l1 cache */
778 128, /* l2 cache */ 778 128, /* l2 cache */
779 1, /* prefetch streams /*/ 779 1, /* prefetch streams /*/
780 }; 780 };
781 781
782 /* Instruction costs on AppliedMicro Titan processors. */
783 static const
784 struct processor_costs titan_cost = {
785 COSTS_N_INSNS (5), /* mulsi */
786 COSTS_N_INSNS (5), /* mulsi_const */
787 COSTS_N_INSNS (5), /* mulsi_const9 */
788 COSTS_N_INSNS (5), /* muldi */
789 COSTS_N_INSNS (18), /* divsi */
790 COSTS_N_INSNS (18), /* divdi */
791 COSTS_N_INSNS (10), /* fp */
792 COSTS_N_INSNS (10), /* dmul */
793 COSTS_N_INSNS (46), /* sdiv */
794 COSTS_N_INSNS (72), /* ddiv */
795 32, /* cache line size */
796 32, /* l1 cache */
797 512, /* l2 cache */
798 1, /* prefetch streams /*/
799 };
800
782 /* Instruction costs on POWER4 and POWER5 processors. */ 801 /* Instruction costs on POWER4 and POWER5 processors. */
783 static const 802 static const
784 struct processor_costs power4_cost = { 803 struct processor_costs power4_cost = {
785 COSTS_N_INSNS (3), /* mulsi */ 804 COSTS_N_INSNS (3), /* mulsi */
786 COSTS_N_INSNS (2), /* mulsi_const */ 805 COSTS_N_INSNS (2), /* mulsi_const */
867 #include "rs6000-builtin.def" 886 #include "rs6000-builtin.def"
868 }; 887 };
869 888
870 #undef RS6000_BUILTIN 889 #undef RS6000_BUILTIN
871 #undef RS6000_BUILTIN_EQUATE 890 #undef RS6000_BUILTIN_EQUATE
891
892 /* Support for -mveclibabi=<xxx> to control which vector library to use. */
893 static tree (*rs6000_veclib_handler) (tree, tree, tree);
872 894
873 895
874 static bool rs6000_function_ok_for_sibcall (tree, tree); 896 static bool rs6000_function_ok_for_sibcall (tree, tree);
875 static const char *rs6000_invalid_within_doloop (const_rtx); 897 static const char *rs6000_invalid_within_doloop (const_rtx);
876 static bool rs6000_legitimate_address_p (enum machine_mode, rtx, bool); 898 static bool rs6000_legitimate_address_p (enum machine_mode, rtx, bool);
908 static rtx rs6000_savres_routine_sym (rs6000_stack_t *, bool, bool, bool); 930 static rtx rs6000_savres_routine_sym (rs6000_stack_t *, bool, bool, bool);
909 static rtx rs6000_emit_stack_reset (rs6000_stack_t *, rtx, rtx, int, bool); 931 static rtx rs6000_emit_stack_reset (rs6000_stack_t *, rtx, rtx, int, bool);
910 static rtx rs6000_make_savres_rtx (rs6000_stack_t *, rtx, int, 932 static rtx rs6000_make_savres_rtx (rs6000_stack_t *, rtx, int,
911 enum machine_mode, bool, bool, bool); 933 enum machine_mode, bool, bool, bool);
912 static bool rs6000_reg_live_or_pic_offset_p (int); 934 static bool rs6000_reg_live_or_pic_offset_p (int);
935 static tree rs6000_builtin_vectorized_libmass (tree, tree, tree);
913 static tree rs6000_builtin_vectorized_function (tree, tree, tree); 936 static tree rs6000_builtin_vectorized_function (tree, tree, tree);
914 static int rs6000_savres_strategy (rs6000_stack_t *, bool, int, int);
915 static void rs6000_restore_saved_cr (rtx, int); 937 static void rs6000_restore_saved_cr (rtx, int);
938 static bool rs6000_output_addr_const_extra (FILE *, rtx);
916 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT); 939 static void rs6000_output_function_prologue (FILE *, HOST_WIDE_INT);
917 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT); 940 static void rs6000_output_function_epilogue (FILE *, HOST_WIDE_INT);
918 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT, 941 static void rs6000_output_mi_thunk (FILE *, tree, HOST_WIDE_INT, HOST_WIDE_INT,
919 tree); 942 tree);
920 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT); 943 static rtx rs6000_emit_set_long_const (rtx, HOST_WIDE_INT, HOST_WIDE_INT);
921 static bool rs6000_return_in_memory (const_tree, const_tree); 944 static bool rs6000_return_in_memory (const_tree, const_tree);
922 static rtx rs6000_function_value (const_tree, const_tree, bool); 945 static rtx rs6000_function_value (const_tree, const_tree, bool);
923 static void rs6000_file_start (void); 946 static void rs6000_file_start (void);
924 #if TARGET_ELF 947 #if TARGET_ELF
925 static int rs6000_elf_reloc_rw_mask (void); 948 static int rs6000_elf_reloc_rw_mask (void);
926 static void rs6000_elf_asm_out_constructor (rtx, int); 949 static void rs6000_elf_asm_out_constructor (rtx, int) ATTRIBUTE_UNUSED;
927 static void rs6000_elf_asm_out_destructor (rtx, int); 950 static void rs6000_elf_asm_out_destructor (rtx, int) ATTRIBUTE_UNUSED;
928 static void rs6000_elf_end_indicate_exec_stack (void) ATTRIBUTE_UNUSED; 951 static void rs6000_elf_file_end (void) ATTRIBUTE_UNUSED;
929 static void rs6000_elf_asm_init_sections (void); 952 static void rs6000_elf_asm_init_sections (void);
930 static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx, 953 static section *rs6000_elf_select_rtx_section (enum machine_mode, rtx,
931 unsigned HOST_WIDE_INT); 954 unsigned HOST_WIDE_INT);
932 static void rs6000_elf_encode_section_info (tree, rtx, int) 955 static void rs6000_elf_encode_section_info (tree, rtx, int)
933 ATTRIBUTE_UNUSED; 956 ATTRIBUTE_UNUSED;
950 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int); 973 static unsigned int rs6000_xcoff_section_type_flags (tree, const char *, int);
951 static void rs6000_xcoff_file_start (void); 974 static void rs6000_xcoff_file_start (void);
952 static void rs6000_xcoff_file_end (void); 975 static void rs6000_xcoff_file_end (void);
953 #endif 976 #endif
954 static int rs6000_variable_issue (FILE *, int, rtx, int); 977 static int rs6000_variable_issue (FILE *, int, rtx, int);
978 static int rs6000_register_move_cost (enum machine_mode,
979 reg_class_t, reg_class_t);
980 static int rs6000_memory_move_cost (enum machine_mode, reg_class_t, bool);
955 static bool rs6000_rtx_costs (rtx, int, int, int *, bool); 981 static bool rs6000_rtx_costs (rtx, int, int, int *, bool);
956 static bool rs6000_debug_rtx_costs (rtx, int, int, int *, bool); 982 static bool rs6000_debug_rtx_costs (rtx, int, int, int *, bool);
957 static int rs6000_debug_address_cost (rtx, bool); 983 static int rs6000_debug_address_cost (rtx, bool);
958 static int rs6000_adjust_cost (rtx, rtx, rtx, int); 984 static int rs6000_adjust_cost (rtx, rtx, rtx, int);
959 static int rs6000_debug_adjust_cost (rtx, rtx, rtx, int); 985 static int rs6000_debug_adjust_cost (rtx, rtx, rtx, int);
995 static tree rs6000_builtin_vec_perm (tree, tree *); 1021 static tree rs6000_builtin_vec_perm (tree, tree *);
996 static bool rs6000_builtin_support_vector_misalignment (enum 1022 static bool rs6000_builtin_support_vector_misalignment (enum
997 machine_mode, 1023 machine_mode,
998 const_tree, 1024 const_tree,
999 int, bool); 1025 int, bool);
1026 static int rs6000_builtin_vectorization_cost (enum vect_cost_for_stmt,
1027 tree, int);
1028 static enum machine_mode rs6000_preferred_simd_mode (enum machine_mode);
1000 1029
1001 static void def_builtin (int, const char *, tree, int); 1030 static void def_builtin (int, const char *, tree, int);
1002 static bool rs6000_vector_alignment_reachable (const_tree, bool); 1031 static bool rs6000_vector_alignment_reachable (const_tree, bool);
1003 static void rs6000_init_builtins (void); 1032 static void rs6000_init_builtins (void);
1004 static tree rs6000_builtin_decl (unsigned, bool); 1033 static tree rs6000_builtin_decl (unsigned, bool);
1043 static rtx altivec_expand_stv_builtin (enum insn_code, tree); 1072 static rtx altivec_expand_stv_builtin (enum insn_code, tree);
1044 static rtx altivec_expand_vec_init_builtin (tree, tree, rtx); 1073 static rtx altivec_expand_vec_init_builtin (tree, tree, rtx);
1045 static rtx altivec_expand_vec_set_builtin (tree); 1074 static rtx altivec_expand_vec_set_builtin (tree);
1046 static rtx altivec_expand_vec_ext_builtin (tree, rtx); 1075 static rtx altivec_expand_vec_ext_builtin (tree, rtx);
1047 static int get_element_number (tree, tree); 1076 static int get_element_number (tree, tree);
1077 static void rs6000_option_override (void);
1078 static void rs6000_option_init_struct (struct gcc_options *);
1079 static void rs6000_option_default_params (void);
1048 static bool rs6000_handle_option (size_t, const char *, int); 1080 static bool rs6000_handle_option (size_t, const char *, int);
1049 static void rs6000_parse_tls_size_option (void); 1081 static int rs6000_loop_align_max_skip (rtx);
1050 static void rs6000_parse_yes_no_option (const char *, const char *, int *); 1082 static void rs6000_parse_yes_no_option (const char *, const char *, int *);
1051 static int first_altivec_reg_to_save (void); 1083 static int first_altivec_reg_to_save (void);
1052 static unsigned int compute_vrsave_mask (void); 1084 static unsigned int compute_vrsave_mask (void);
1053 static void compute_save_world_info (rs6000_stack_t *info_ptr); 1085 static void compute_save_world_info (rs6000_stack_t *info_ptr);
1054 static void is_altivec_return_reg (rtx, void *); 1086 static void is_altivec_return_reg (rtx, void *);
1065 static rtx rs6000_got_sym (void); 1097 static rtx rs6000_got_sym (void);
1066 static int rs6000_tls_symbol_ref_1 (rtx *, void *); 1098 static int rs6000_tls_symbol_ref_1 (rtx *, void *);
1067 static const char *rs6000_get_some_local_dynamic_name (void); 1099 static const char *rs6000_get_some_local_dynamic_name (void);
1068 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *); 1100 static int rs6000_get_some_local_dynamic_name_1 (rtx *, void *);
1069 static rtx rs6000_complex_function_value (enum machine_mode); 1101 static rtx rs6000_complex_function_value (enum machine_mode);
1070 static rtx rs6000_spe_function_arg (CUMULATIVE_ARGS *, 1102 static rtx rs6000_spe_function_arg (const CUMULATIVE_ARGS *,
1071 enum machine_mode, tree); 1103 enum machine_mode, const_tree);
1072 static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *, 1104 static void rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *,
1073 HOST_WIDE_INT); 1105 HOST_WIDE_INT, int);
1074 static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *, 1106 static void rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *,
1075 tree, HOST_WIDE_INT); 1107 const_tree,
1108 HOST_WIDE_INT);
1076 static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *, 1109 static void rs6000_darwin64_record_arg_flush (CUMULATIVE_ARGS *,
1077 HOST_WIDE_INT, 1110 HOST_WIDE_INT,
1078 rtx[], int *); 1111 rtx[], int *);
1079 static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *, 1112 static void rs6000_darwin64_record_arg_recurse (CUMULATIVE_ARGS *,
1080 const_tree, HOST_WIDE_INT, 1113 const_tree, HOST_WIDE_INT,
1081 rtx[], int *); 1114 rtx[], int *);
1082 static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, const_tree, int, bool); 1115 static rtx rs6000_darwin64_record_arg (CUMULATIVE_ARGS *, const_tree, bool, bool);
1083 static rtx rs6000_mixed_function_arg (enum machine_mode, tree, int); 1116 static rtx rs6000_mixed_function_arg (enum machine_mode, const_tree, int);
1117 static void rs6000_function_arg_advance (CUMULATIVE_ARGS *, enum machine_mode,
1118 const_tree, bool);
1119 static rtx rs6000_function_arg (CUMULATIVE_ARGS *, enum machine_mode,
1120 const_tree, bool);
1121 static unsigned int rs6000_function_arg_boundary (enum machine_mode,
1122 const_tree);
1084 static void rs6000_move_block_from_reg (int regno, rtx x, int nregs); 1123 static void rs6000_move_block_from_reg (int regno, rtx x, int nregs);
1085 static void setup_incoming_varargs (CUMULATIVE_ARGS *, 1124 static void setup_incoming_varargs (CUMULATIVE_ARGS *,
1086 enum machine_mode, tree, 1125 enum machine_mode, tree,
1087 int *, int); 1126 int *, int);
1088 static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode, 1127 static bool rs6000_pass_by_reference (CUMULATIVE_ARGS *, enum machine_mode,
1116 1155
1117 rtx (*rs6000_legitimize_reload_address_ptr) (rtx, enum machine_mode, int, int, 1156 rtx (*rs6000_legitimize_reload_address_ptr) (rtx, enum machine_mode, int, int,
1118 int, int *) 1157 int, int *)
1119 = rs6000_legitimize_reload_address; 1158 = rs6000_legitimize_reload_address;
1120 1159
1160 static bool rs6000_mode_dependent_address_p (const_rtx);
1121 static bool rs6000_mode_dependent_address (const_rtx); 1161 static bool rs6000_mode_dependent_address (const_rtx);
1122 static bool rs6000_debug_mode_dependent_address (const_rtx); 1162 static bool rs6000_debug_mode_dependent_address (const_rtx);
1123 bool (*rs6000_mode_dependent_address_ptr) (const_rtx) 1163 static bool (*rs6000_mode_dependent_address_ptr) (const_rtx)
1124 = rs6000_mode_dependent_address; 1164 = rs6000_mode_dependent_address;
1125 1165
1126 static enum reg_class rs6000_secondary_reload_class (enum reg_class, 1166 static enum reg_class rs6000_secondary_reload_class (enum reg_class,
1127 enum machine_mode, rtx); 1167 enum machine_mode, rtx);
1128 static enum reg_class rs6000_debug_secondary_reload_class (enum reg_class, 1168 static enum reg_class rs6000_debug_secondary_reload_class (enum reg_class,
1159 bool (*rs6000_cannot_change_mode_class_ptr) (enum machine_mode, 1199 bool (*rs6000_cannot_change_mode_class_ptr) (enum machine_mode,
1160 enum machine_mode, 1200 enum machine_mode,
1161 enum reg_class) 1201 enum reg_class)
1162 = rs6000_cannot_change_mode_class; 1202 = rs6000_cannot_change_mode_class;
1163 1203
1164 static enum reg_class rs6000_secondary_reload (bool, rtx, enum reg_class, 1204 static reg_class_t rs6000_secondary_reload (bool, rtx, reg_class_t,
1165 enum machine_mode, 1205 enum machine_mode,
1166 struct secondary_reload_info *); 1206 struct secondary_reload_info *);
1167 1207
1168 static const enum reg_class *rs6000_ira_cover_classes (void); 1208 static const reg_class_t *rs6000_ira_cover_classes (void);
1169 1209
1170 const int INSN_NOT_AVAILABLE = -1; 1210 const int INSN_NOT_AVAILABLE = -1;
1171 static enum machine_mode rs6000_eh_return_filter_mode (void); 1211 static enum machine_mode rs6000_eh_return_filter_mode (void);
1172 static bool rs6000_can_eliminate (const int, const int); 1212 static bool rs6000_can_eliminate (const int, const int);
1213 static void rs6000_conditional_register_usage (void);
1173 static void rs6000_trampoline_init (rtx, tree, rtx); 1214 static void rs6000_trampoline_init (rtx, tree, rtx);
1174 1215
1175 /* Hash table stuff for keeping track of TOC entries. */ 1216 /* Hash table stuff for keeping track of TOC entries. */
1176 1217
1177 struct GTY(()) toc_hash_struct 1218 struct GTY(()) toc_hash_struct
1193 enum machine_mode mode[4]; /* return value + 3 arguments. */ 1234 enum machine_mode mode[4]; /* return value + 3 arguments. */
1194 unsigned char uns_p[4]; /* and whether the types are unsigned. */ 1235 unsigned char uns_p[4]; /* and whether the types are unsigned. */
1195 }; 1236 };
1196 1237
1197 static GTY ((param_is (struct builtin_hash_struct))) htab_t builtin_hash_table; 1238 static GTY ((param_is (struct builtin_hash_struct))) htab_t builtin_hash_table;
1239
1240 static bool rs6000_valid_attribute_p (tree, tree, tree, int);
1241 static void rs6000_function_specific_save (struct cl_target_option *);
1242 static void rs6000_function_specific_restore (struct cl_target_option *);
1243 static void rs6000_function_specific_print (FILE *, int,
1244 struct cl_target_option *);
1245 static bool rs6000_can_inline_p (tree, tree);
1246 static void rs6000_set_current_function (tree);
1247
1198 1248
1199 /* Default register names. */ 1249 /* Default register names. */
1200 char rs6000_reg_names[][8] = 1250 char rs6000_reg_names[][8] =
1201 { 1251 {
1202 "0", "1", "2", "3", "4", "5", "6", "7", 1252 "0", "1", "2", "3", "4", "5", "6", "7",
1207 "8", "9", "10", "11", "12", "13", "14", "15", 1257 "8", "9", "10", "11", "12", "13", "14", "15",
1208 "16", "17", "18", "19", "20", "21", "22", "23", 1258 "16", "17", "18", "19", "20", "21", "22", "23",
1209 "24", "25", "26", "27", "28", "29", "30", "31", 1259 "24", "25", "26", "27", "28", "29", "30", "31",
1210 "mq", "lr", "ctr","ap", 1260 "mq", "lr", "ctr","ap",
1211 "0", "1", "2", "3", "4", "5", "6", "7", 1261 "0", "1", "2", "3", "4", "5", "6", "7",
1212 "xer", 1262 "ca",
1213 /* AltiVec registers. */ 1263 /* AltiVec registers. */
1214 "0", "1", "2", "3", "4", "5", "6", "7", 1264 "0", "1", "2", "3", "4", "5", "6", "7",
1215 "8", "9", "10", "11", "12", "13", "14", "15", 1265 "8", "9", "10", "11", "12", "13", "14", "15",
1216 "16", "17", "18", "19", "20", "21", "22", "23", 1266 "16", "17", "18", "19", "20", "21", "22", "23",
1217 "24", "25", "26", "27", "28", "29", "30", "31", 1267 "24", "25", "26", "27", "28", "29", "30", "31",
1233 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15", 1283 "%f8", "%f9", "%f10", "%f11", "%f12", "%f13", "%f14", "%f15",
1234 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23", 1284 "%f16", "%f17", "%f18", "%f19", "%f20", "%f21", "%f22", "%f23",
1235 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31", 1285 "%f24", "%f25", "%f26", "%f27", "%f28", "%f29", "%f30", "%f31",
1236 "mq", "lr", "ctr", "ap", 1286 "mq", "lr", "ctr", "ap",
1237 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7", 1287 "%cr0", "%cr1", "%cr2", "%cr3", "%cr4", "%cr5", "%cr6", "%cr7",
1238 "xer", 1288 "ca",
1239 /* AltiVec registers. */ 1289 /* AltiVec registers. */
1240 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7", 1290 "%v0", "%v1", "%v2", "%v3", "%v4", "%v5", "%v6", "%v7",
1241 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15", 1291 "%v8", "%v9", "%v10", "%v11", "%v12", "%v13", "%v14", "%v15",
1242 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23", 1292 "%v16", "%v17", "%v18", "%v19", "%v20", "%v21", "%v22", "%v23",
1243 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31", 1293 "%v24", "%v25", "%v26", "%v27", "%v28", "%v29", "%v30", "%v31",
1262 #ifdef SUBTARGET_ATTRIBUTE_TABLE 1312 #ifdef SUBTARGET_ATTRIBUTE_TABLE
1263 SUBTARGET_ATTRIBUTE_TABLE, 1313 SUBTARGET_ATTRIBUTE_TABLE,
1264 #endif 1314 #endif
1265 { NULL, 0, 0, false, false, false, NULL } 1315 { NULL, 0, 0, false, false, false, NULL }
1266 }; 1316 };
1317
1318 /* Implement TARGET_OPTION_OPTIMIZATION_TABLE. */
1319 static const struct default_options rs6000_option_optimization_table[] =
1320 {
1321 { OPT_LEVELS_1_PLUS, OPT_fomit_frame_pointer, NULL, 1 },
1322 { OPT_LEVELS_NONE, 0, NULL, 0 }
1323 };
1267 1324
1268 #ifndef MASK_STRICT_ALIGN 1325 #ifndef MASK_STRICT_ALIGN
1269 #define MASK_STRICT_ALIGN 0 1326 #define MASK_STRICT_ALIGN 0
1270 #endif 1327 #endif
1271 #ifndef TARGET_PROFILE_KERNEL 1328 #ifndef TARGET_PROFILE_KERNEL
1333 #undef TARGET_ASM_FUNCTION_PROLOGUE 1390 #undef TARGET_ASM_FUNCTION_PROLOGUE
1334 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue 1391 #define TARGET_ASM_FUNCTION_PROLOGUE rs6000_output_function_prologue
1335 #undef TARGET_ASM_FUNCTION_EPILOGUE 1392 #undef TARGET_ASM_FUNCTION_EPILOGUE
1336 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue 1393 #define TARGET_ASM_FUNCTION_EPILOGUE rs6000_output_function_epilogue
1337 1394
1395 #undef TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA
1396 #define TARGET_ASM_OUTPUT_ADDR_CONST_EXTRA rs6000_output_addr_const_extra
1397
1338 #undef TARGET_LEGITIMIZE_ADDRESS 1398 #undef TARGET_LEGITIMIZE_ADDRESS
1339 #define TARGET_LEGITIMIZE_ADDRESS rs6000_legitimize_address 1399 #define TARGET_LEGITIMIZE_ADDRESS rs6000_legitimize_address
1340 1400
1341 #undef TARGET_SCHED_VARIABLE_ISSUE 1401 #undef TARGET_SCHED_VARIABLE_ISSUE
1342 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue 1402 #define TARGET_SCHED_VARIABLE_ISSUE rs6000_variable_issue
1381 #define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd 1441 #define TARGET_VECTORIZE_BUILTIN_MUL_WIDEN_ODD rs6000_builtin_mul_widen_odd
1382 #undef TARGET_VECTORIZE_BUILTIN_CONVERSION 1442 #undef TARGET_VECTORIZE_BUILTIN_CONVERSION
1383 #define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion 1443 #define TARGET_VECTORIZE_BUILTIN_CONVERSION rs6000_builtin_conversion
1384 #undef TARGET_VECTORIZE_BUILTIN_VEC_PERM 1444 #undef TARGET_VECTORIZE_BUILTIN_VEC_PERM
1385 #define TARGET_VECTORIZE_BUILTIN_VEC_PERM rs6000_builtin_vec_perm 1445 #define TARGET_VECTORIZE_BUILTIN_VEC_PERM rs6000_builtin_vec_perm
1386 #undef TARGET_SUPPORT_VECTOR_MISALIGNMENT 1446 #undef TARGET_VECTORIZE_SUPPORT_VECTOR_MISALIGNMENT
1387 #define TARGET_SUPPORT_VECTOR_MISALIGNMENT \ 1447 #define TARGET_VECTORIZE_SUPPORT_VECTOR_MISALIGNMENT \
1388 rs6000_builtin_support_vector_misalignment 1448 rs6000_builtin_support_vector_misalignment
1389 #undef TARGET_VECTOR_ALIGNMENT_REACHABLE 1449 #undef TARGET_VECTORIZE_VECTOR_ALIGNMENT_REACHABLE
1390 #define TARGET_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable 1450 #define TARGET_VECTORIZE_VECTOR_ALIGNMENT_REACHABLE rs6000_vector_alignment_reachable
1451 #undef TARGET_VECTORIZE_BUILTIN_VECTORIZATION_COST
1452 #define TARGET_VECTORIZE_BUILTIN_VECTORIZATION_COST \
1453 rs6000_builtin_vectorization_cost
1454 #undef TARGET_VECTORIZE_PREFERRED_SIMD_MODE
1455 #define TARGET_VECTORIZE_PREFERRED_SIMD_MODE \
1456 rs6000_preferred_simd_mode
1391 1457
1392 #undef TARGET_INIT_BUILTINS 1458 #undef TARGET_INIT_BUILTINS
1393 #define TARGET_INIT_BUILTINS rs6000_init_builtins 1459 #define TARGET_INIT_BUILTINS rs6000_init_builtins
1394 #undef TARGET_BUILTIN_DECL 1460 #undef TARGET_BUILTIN_DECL
1395 #define TARGET_BUILTIN_DECL rs6000_builtin_decl 1461 #define TARGET_BUILTIN_DECL rs6000_builtin_decl
1421 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall 1487 #define TARGET_FUNCTION_OK_FOR_SIBCALL rs6000_function_ok_for_sibcall
1422 1488
1423 #undef TARGET_INVALID_WITHIN_DOLOOP 1489 #undef TARGET_INVALID_WITHIN_DOLOOP
1424 #define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop 1490 #define TARGET_INVALID_WITHIN_DOLOOP rs6000_invalid_within_doloop
1425 1491
1492 #undef TARGET_REGISTER_MOVE_COST
1493 #define TARGET_REGISTER_MOVE_COST rs6000_register_move_cost
1494 #undef TARGET_MEMORY_MOVE_COST
1495 #define TARGET_MEMORY_MOVE_COST rs6000_memory_move_cost
1426 #undef TARGET_RTX_COSTS 1496 #undef TARGET_RTX_COSTS
1427 #define TARGET_RTX_COSTS rs6000_rtx_costs 1497 #define TARGET_RTX_COSTS rs6000_rtx_costs
1428 #undef TARGET_ADDRESS_COST 1498 #undef TARGET_ADDRESS_COST
1429 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0 1499 #define TARGET_ADDRESS_COST hook_int_rtx_bool_0
1430 1500
1456 #define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack 1526 #define TARGET_MUST_PASS_IN_STACK rs6000_must_pass_in_stack
1457 #undef TARGET_PASS_BY_REFERENCE 1527 #undef TARGET_PASS_BY_REFERENCE
1458 #define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference 1528 #define TARGET_PASS_BY_REFERENCE rs6000_pass_by_reference
1459 #undef TARGET_ARG_PARTIAL_BYTES 1529 #undef TARGET_ARG_PARTIAL_BYTES
1460 #define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes 1530 #define TARGET_ARG_PARTIAL_BYTES rs6000_arg_partial_bytes
1531 #undef TARGET_FUNCTION_ARG_ADVANCE
1532 #define TARGET_FUNCTION_ARG_ADVANCE rs6000_function_arg_advance
1533 #undef TARGET_FUNCTION_ARG
1534 #define TARGET_FUNCTION_ARG rs6000_function_arg
1535 #undef TARGET_FUNCTION_ARG_BOUNDARY
1536 #define TARGET_FUNCTION_ARG_BOUNDARY rs6000_function_arg_boundary
1461 1537
1462 #undef TARGET_BUILD_BUILTIN_VA_LIST 1538 #undef TARGET_BUILD_BUILTIN_VA_LIST
1463 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list 1539 #define TARGET_BUILD_BUILTIN_VA_LIST rs6000_build_builtin_va_list
1464 1540
1465 #undef TARGET_EXPAND_BUILTIN_VA_START 1541 #undef TARGET_EXPAND_BUILTIN_VA_START
1480 #undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN 1556 #undef TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN
1481 #define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn 1557 #define TARGET_INVALID_ARG_FOR_UNPROTOTYPED_FN invalid_arg_for_unprototyped_fn
1482 1558
1483 #undef TARGET_HANDLE_OPTION 1559 #undef TARGET_HANDLE_OPTION
1484 #define TARGET_HANDLE_OPTION rs6000_handle_option 1560 #define TARGET_HANDLE_OPTION rs6000_handle_option
1561
1562 #undef TARGET_ASM_LOOP_ALIGN_MAX_SKIP
1563 #define TARGET_ASM_LOOP_ALIGN_MAX_SKIP rs6000_loop_align_max_skip
1564
1565 #undef TARGET_OPTION_OVERRIDE
1566 #define TARGET_OPTION_OVERRIDE rs6000_option_override
1567
1568 #undef TARGET_OPTION_INIT_STRUCT
1569 #define TARGET_OPTION_INIT_STRUCT rs6000_option_init_struct
1570
1571 #undef TARGET_OPTION_DEFAULT_PARAMS
1572 #define TARGET_OPTION_DEFAULT_PARAMS rs6000_option_default_params
1573
1574 #undef TARGET_OPTION_OPTIMIZATION_TABLE
1575 #define TARGET_OPTION_OPTIMIZATION_TABLE rs6000_option_optimization_table
1485 1576
1486 #undef TARGET_VECTORIZE_BUILTIN_VECTORIZED_FUNCTION 1577 #undef TARGET_VECTORIZE_BUILTIN_VECTORIZED_FUNCTION
1487 #define TARGET_VECTORIZE_BUILTIN_VECTORIZED_FUNCTION \ 1578 #define TARGET_VECTORIZE_BUILTIN_VECTORIZED_FUNCTION \
1488 rs6000_builtin_vectorized_function 1579 rs6000_builtin_vectorized_function
1489 1580
1540 #define TARGET_IRA_COVER_CLASSES rs6000_ira_cover_classes 1631 #define TARGET_IRA_COVER_CLASSES rs6000_ira_cover_classes
1541 1632
1542 #undef TARGET_LEGITIMATE_ADDRESS_P 1633 #undef TARGET_LEGITIMATE_ADDRESS_P
1543 #define TARGET_LEGITIMATE_ADDRESS_P rs6000_legitimate_address_p 1634 #define TARGET_LEGITIMATE_ADDRESS_P rs6000_legitimate_address_p
1544 1635
1636 #undef TARGET_MODE_DEPENDENT_ADDRESS_P
1637 #define TARGET_MODE_DEPENDENT_ADDRESS_P rs6000_mode_dependent_address_p
1638
1545 #undef TARGET_CAN_ELIMINATE 1639 #undef TARGET_CAN_ELIMINATE
1546 #define TARGET_CAN_ELIMINATE rs6000_can_eliminate 1640 #define TARGET_CAN_ELIMINATE rs6000_can_eliminate
1547 1641
1642 #undef TARGET_CONDITIONAL_REGISTER_USAGE
1643 #define TARGET_CONDITIONAL_REGISTER_USAGE rs6000_conditional_register_usage
1644
1548 #undef TARGET_TRAMPOLINE_INIT 1645 #undef TARGET_TRAMPOLINE_INIT
1549 #define TARGET_TRAMPOLINE_INIT rs6000_trampoline_init 1646 #define TARGET_TRAMPOLINE_INIT rs6000_trampoline_init
1550 1647
1551 #undef TARGET_FUNCTION_VALUE 1648 #undef TARGET_FUNCTION_VALUE
1552 #define TARGET_FUNCTION_VALUE rs6000_function_value 1649 #define TARGET_FUNCTION_VALUE rs6000_function_value
1553 1650
1651 #undef TARGET_OPTION_VALID_ATTRIBUTE_P
1652 #define TARGET_OPTION_VALID_ATTRIBUTE_P rs6000_valid_attribute_p
1653
1654 #undef TARGET_OPTION_SAVE
1655 #define TARGET_OPTION_SAVE rs6000_function_specific_save
1656
1657 #undef TARGET_OPTION_RESTORE
1658 #define TARGET_OPTION_RESTORE rs6000_function_specific_restore
1659
1660 #undef TARGET_OPTION_PRINT
1661 #define TARGET_OPTION_PRINT rs6000_function_specific_print
1662
1663 #undef TARGET_CAN_INLINE_P
1664 #define TARGET_CAN_INLINE_P rs6000_can_inline_p
1665
1666 #undef TARGET_SET_CURRENT_FUNCTION
1667 #define TARGET_SET_CURRENT_FUNCTION rs6000_set_current_function
1668
1554 struct gcc_target targetm = TARGET_INITIALIZER; 1669 struct gcc_target targetm = TARGET_INITIALIZER;
1670
1671
1672 /* Simplifications for entries below. */
1673
1674 enum {
1675 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS,
1676 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
1677 };
1678
1679 /* Some OSs don't support saving the high part of 64-bit registers on context
1680 switch. Other OSs don't support saving Altivec registers. On those OSs, we
1681 don't touch the MASK_POWERPC64 or MASK_ALTIVEC settings; if the user wants
1682 either, the user must explicitly specify them and we won't interfere with
1683 the user's specification. */
1684
1685 enum {
1686 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
1687 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
1688 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
1689 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
1690 | MASK_DLMZB | MASK_CMPB | MASK_MFPGPR | MASK_DFP
1691 | MASK_POPCNTD | MASK_VSX | MASK_ISEL | MASK_NO_UPDATE
1692 | MASK_RECIP_PRECISION)
1693 };
1694
1695 /* Masks for instructions set at various powerpc ISAs. */
1696 enum {
1697 ISA_2_1_MASKS = MASK_MFCRF,
1698 ISA_2_2_MASKS = (ISA_2_1_MASKS | MASK_POPCNTB),
1699 ISA_2_4_MASKS = (ISA_2_2_MASKS | MASK_FPRND),
1700
1701 /* For ISA 2.05, do not add MFPGPR, since it isn't in ISA 2.06, and don't add
1702 ALTIVEC, since in general it isn't a win on power6. In ISA 2.04, fsel,
1703 fre, fsqrt, etc. were no longer documented as optional. Group masks by
1704 server and embedded. */
1705 ISA_2_5_MASKS_EMBEDDED = (ISA_2_2_MASKS | MASK_CMPB | MASK_RECIP_PRECISION
1706 | MASK_PPC_GFXOPT | MASK_PPC_GPOPT),
1707 ISA_2_5_MASKS_SERVER = (ISA_2_5_MASKS_EMBEDDED | MASK_DFP),
1708
1709 /* For ISA 2.06, don't add ISEL, since in general it isn't a win, but
1710 altivec is a win so enable it. */
1711 ISA_2_6_MASKS_EMBEDDED = (ISA_2_5_MASKS_EMBEDDED | MASK_POPCNTD),
1712 ISA_2_6_MASKS_SERVER = (ISA_2_5_MASKS_SERVER | MASK_POPCNTD | MASK_ALTIVEC
1713 | MASK_VSX)
1714 };
1715
1716 /* This table occasionally claims that a processor does not support a
1717 particular feature even though it does, but the feature is slower than the
1718 alternative. Thus, it shouldn't be relied on as a complete description of
1719 the processor's support.
1720
1721 Please keep this list in order, and don't forget to update the documentation
1722 in invoke.texi when adding a new processor or flag. */
1723
1724 struct rs6000_ptt
1725 {
1726 const char *const name; /* Canonical processor name. */
1727 const enum processor_type processor; /* Processor type enum value. */
1728 const int target_enable; /* Target flags to enable. */
1729 };
1730
1731 static struct rs6000_ptt const processor_target_table[] =
1732 {
1733 {"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1734 {"403", PROCESSOR_PPC403,
1735 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
1736 {"405", PROCESSOR_PPC405,
1737 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1738 {"405fp", PROCESSOR_PPC405,
1739 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
1740 {"440", PROCESSOR_PPC440,
1741 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1742 {"440fp", PROCESSOR_PPC440,
1743 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
1744 {"464", PROCESSOR_PPC440,
1745 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
1746 {"464fp", PROCESSOR_PPC440,
1747 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
1748 {"476", PROCESSOR_PPC476,
1749 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_PPC_GFXOPT | MASK_MFCRF
1750 | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_MULHW | MASK_DLMZB},
1751 {"476fp", PROCESSOR_PPC476,
1752 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POPCNTB
1753 | MASK_FPRND | MASK_CMPB | MASK_MULHW | MASK_DLMZB},
1754 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
1755 {"601", PROCESSOR_PPC601,
1756 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
1757 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1758 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1759 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1760 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1761 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1762 {"620", PROCESSOR_PPC620,
1763 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1764 {"630", PROCESSOR_PPC630,
1765 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1766 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1767 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
1768 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1769 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1770 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1771 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1772 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1773 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN
1774 | MASK_ISEL},
1775 /* 8548 has a dummy entry for now. */
1776 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN
1777 | MASK_ISEL},
1778 {"a2", PROCESSOR_PPCA2,
1779 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_POPCNTB
1780 | MASK_CMPB | MASK_NO_UPDATE },
1781 {"e300c2", PROCESSOR_PPCE300C2, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1782 {"e300c3", PROCESSOR_PPCE300C3, POWERPC_BASE_MASK},
1783 {"e500mc", PROCESSOR_PPCE500MC, POWERPC_BASE_MASK | MASK_PPC_GFXOPT
1784 | MASK_ISEL},
1785 {"e500mc64", PROCESSOR_PPCE500MC64, POWERPC_BASE_MASK | MASK_POWERPC64
1786 | MASK_PPC_GFXOPT | MASK_ISEL},
1787 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1788 {"970", PROCESSOR_POWER4,
1789 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1790 {"cell", PROCESSOR_CELL,
1791 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1792 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
1793 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
1794 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
1795 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
1796 {"G5", PROCESSOR_POWER4,
1797 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
1798 {"titan", PROCESSOR_TITAN,
1799 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
1800 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1801 {"power2", PROCESSOR_POWER,
1802 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1803 {"power3", PROCESSOR_PPC630,
1804 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1805 {"power4", PROCESSOR_POWER4,
1806 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1807 | MASK_MFCRF},
1808 {"power5", PROCESSOR_POWER5,
1809 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1810 | MASK_MFCRF | MASK_POPCNTB},
1811 {"power5+", PROCESSOR_POWER5,
1812 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1813 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
1814 {"power6", PROCESSOR_POWER6,
1815 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1816 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP
1817 | MASK_RECIP_PRECISION},
1818 {"power6x", PROCESSOR_POWER6,
1819 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
1820 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP
1821 | MASK_MFPGPR | MASK_RECIP_PRECISION},
1822 {"power7", PROCESSOR_POWER7, /* Don't add MASK_ISEL by default */
1823 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_MFCRF
1824 | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP | MASK_POPCNTD
1825 | MASK_VSX | MASK_RECIP_PRECISION},
1826 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
1827 {"powerpc64", PROCESSOR_POWERPC64,
1828 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
1829 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1830 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1831 {"rios2", PROCESSOR_RIOS2,
1832 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
1833 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1834 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
1835 {"rs64", PROCESSOR_RS64A,
1836 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
1837 };
1838
1839 /* Look up a processor name for -mcpu=xxx and -mtune=xxx. Return -1 if the
1840 name is invalid. */
1841
1842 static int
1843 rs6000_cpu_name_lookup (const char *name)
1844 {
1845 size_t i;
1846
1847 if (name != NULL)
1848 {
1849 for (i = 0; i < ARRAY_SIZE (processor_target_table); i++)
1850 if (! strcmp (name, processor_target_table[i].name))
1851 return (int)i;
1852 }
1853
1854 return -1;
1855 }
1856
1555 1857
1556 /* Return number of consecutive hard regs needed starting at reg REGNO 1858 /* Return number of consecutive hard regs needed starting at reg REGNO
1557 to hold something of mode MODE. 1859 to hold something of mode MODE.
1558 This is ordinarily the length in words of a value of mode MODE 1860 This is ordinarily the length in words of a value of mode MODE
1559 but can be less for certain modes in special long registers. 1861 but can be less for certain modes in special long registers.
1643 1945
1644 /* The CR register can only hold CC modes. */ 1946 /* The CR register can only hold CC modes. */
1645 if (CR_REGNO_P (regno)) 1947 if (CR_REGNO_P (regno))
1646 return GET_MODE_CLASS (mode) == MODE_CC; 1948 return GET_MODE_CLASS (mode) == MODE_CC;
1647 1949
1648 if (XER_REGNO_P (regno)) 1950 if (CA_REGNO_P (regno))
1649 return mode == PSImode; 1951 return mode == BImode;
1650 1952
1651 /* AltiVec only in AldyVec registers. */ 1953 /* AltiVec only in AldyVec registers. */
1652 if (ALTIVEC_REGNO_P (regno)) 1954 if (ALTIVEC_REGNO_P (regno))
1653 return VECTOR_MEM_ALTIVEC_OR_VSX_P (mode); 1955 return VECTOR_MEM_ALTIVEC_OR_VSX_P (mode);
1654 1956
1733 2035
1734 fprintf (stderr, "%sregno = %d\n", comma, r); 2036 fprintf (stderr, "%sregno = %d\n", comma, r);
1735 } 2037 }
1736 } 2038 }
1737 2039
2040 #define DEBUG_FMT_D "%-32s= %d\n"
2041 #define DEBUG_FMT_S "%-32s= %s\n"
2042
1738 /* Print various interesting information with -mdebug=reg. */ 2043 /* Print various interesting information with -mdebug=reg. */
1739 static void 2044 static void
1740 rs6000_debug_reg_global (void) 2045 rs6000_debug_reg_global (void)
1741 { 2046 {
2047 static const char *const tf[2] = { "false", "true" };
1742 const char *nl = (const char *)0; 2048 const char *nl = (const char *)0;
1743 int m; 2049 int m;
1744 char costly_num[20]; 2050 char costly_num[20];
1745 char nop_num[20]; 2051 char nop_num[20];
1746 const char *costly_str; 2052 const char *costly_str;
1747 const char *nop_str; 2053 const char *nop_str;
2054 const char *trace_str;
2055 const char *abi_str;
2056 const char *cmodel_str;
1748 2057
1749 /* Map enum rs6000_vector to string. */ 2058 /* Map enum rs6000_vector to string. */
1750 static const char *rs6000_debug_vector_unit[] = { 2059 static const char *rs6000_debug_vector_unit[] = {
1751 "none", 2060 "none",
1752 "altivec", 2061 "altivec",
1765 "vs"); 2074 "vs");
1766 rs6000_debug_reg_print (LR_REGNO, LR_REGNO, "lr"); 2075 rs6000_debug_reg_print (LR_REGNO, LR_REGNO, "lr");
1767 rs6000_debug_reg_print (CTR_REGNO, CTR_REGNO, "ctr"); 2076 rs6000_debug_reg_print (CTR_REGNO, CTR_REGNO, "ctr");
1768 rs6000_debug_reg_print (CR0_REGNO, CR7_REGNO, "cr"); 2077 rs6000_debug_reg_print (CR0_REGNO, CR7_REGNO, "cr");
1769 rs6000_debug_reg_print (MQ_REGNO, MQ_REGNO, "mq"); 2078 rs6000_debug_reg_print (MQ_REGNO, MQ_REGNO, "mq");
1770 rs6000_debug_reg_print (XER_REGNO, XER_REGNO, "xer"); 2079 rs6000_debug_reg_print (CA_REGNO, CA_REGNO, "ca");
1771 rs6000_debug_reg_print (VRSAVE_REGNO, VRSAVE_REGNO, "vrsave"); 2080 rs6000_debug_reg_print (VRSAVE_REGNO, VRSAVE_REGNO, "vrsave");
1772 rs6000_debug_reg_print (VSCR_REGNO, VSCR_REGNO, "vscr"); 2081 rs6000_debug_reg_print (VSCR_REGNO, VSCR_REGNO, "vscr");
1773 rs6000_debug_reg_print (SPE_ACC_REGNO, SPE_ACC_REGNO, "spe_a"); 2082 rs6000_debug_reg_print (SPE_ACC_REGNO, SPE_ACC_REGNO, "spe_a");
1774 rs6000_debug_reg_print (SPEFSCR_REGNO, SPEFSCR_REGNO, "spe_f"); 2083 rs6000_debug_reg_print (SPEFSCR_REGNO, SPEFSCR_REGNO, "spe_f");
1775 2084
1801 } 2110 }
1802 2111
1803 if (nl) 2112 if (nl)
1804 fputs (nl, stderr); 2113 fputs (nl, stderr);
1805 2114
2115 if (rs6000_recip_control)
2116 {
2117 fprintf (stderr, "\nReciprocal mask = 0x%x\n", rs6000_recip_control);
2118
2119 for (m = 0; m < NUM_MACHINE_MODES; ++m)
2120 if (rs6000_recip_bits[m])
2121 {
2122 fprintf (stderr,
2123 "Reciprocal estimate mode: %-5s divide: %s rsqrt: %s\n",
2124 GET_MODE_NAME (m),
2125 (RS6000_RECIP_AUTO_RE_P (m)
2126 ? "auto"
2127 : (RS6000_RECIP_HAVE_RE_P (m) ? "have" : "none")),
2128 (RS6000_RECIP_AUTO_RSQRTE_P (m)
2129 ? "auto"
2130 : (RS6000_RECIP_HAVE_RSQRTE_P (m) ? "have" : "none")));
2131 }
2132
2133 fputs ("\n", stderr);
2134 }
2135
2136 if (rs6000_cpu_index >= 0)
2137 fprintf (stderr, DEBUG_FMT_S, "cpu",
2138 processor_target_table[rs6000_cpu_index].name);
2139
2140 if (rs6000_tune_index >= 0)
2141 fprintf (stderr, DEBUG_FMT_S, "tune",
2142 processor_target_table[rs6000_tune_index].name);
2143
1806 switch (rs6000_sched_costly_dep) 2144 switch (rs6000_sched_costly_dep)
1807 { 2145 {
1808 case max_dep_latency: 2146 case max_dep_latency:
1809 costly_str = "max_dep_latency"; 2147 costly_str = "max_dep_latency";
1810 break; 2148 break;
1829 costly_str = costly_num; 2167 costly_str = costly_num;
1830 sprintf (costly_num, "%d", (int)rs6000_sched_costly_dep); 2168 sprintf (costly_num, "%d", (int)rs6000_sched_costly_dep);
1831 break; 2169 break;
1832 } 2170 }
1833 2171
2172 fprintf (stderr, DEBUG_FMT_S, "sched_costly_dep", costly_str);
2173
1834 switch (rs6000_sched_insert_nops) 2174 switch (rs6000_sched_insert_nops)
1835 { 2175 {
1836 case sched_finish_regroup_exact: 2176 case sched_finish_regroup_exact:
1837 nop_str = "sched_finish_regroup_exact"; 2177 nop_str = "sched_finish_regroup_exact";
1838 break; 2178 break;
1849 nop_str = nop_num; 2189 nop_str = nop_num;
1850 sprintf (nop_num, "%d", (int)rs6000_sched_insert_nops); 2190 sprintf (nop_num, "%d", (int)rs6000_sched_insert_nops);
1851 break; 2191 break;
1852 } 2192 }
1853 2193
1854 fprintf (stderr, 2194 fprintf (stderr, DEBUG_FMT_S, "sched_insert_nops", nop_str);
1855 "always_hint = %s\n" 2195
1856 "align_branch_targets = %s\n" 2196 switch (rs6000_sdata)
1857 "sched_restricted_insns_priority = %d\n" 2197 {
1858 "sched_costly_dep = %s\n" 2198 default:
1859 "sched_insert_nops = %s\n\n", 2199 case SDATA_NONE:
1860 rs6000_always_hint ? "true" : "false", 2200 break;
1861 rs6000_align_branch_targets ? "true" : "false", 2201
1862 (int)rs6000_sched_restricted_insns_priority, 2202 case SDATA_DATA:
1863 costly_str, nop_str); 2203 fprintf (stderr, DEBUG_FMT_S, "sdata", "data");
2204 break;
2205
2206 case SDATA_SYSV:
2207 fprintf (stderr, DEBUG_FMT_S, "sdata", "sysv");
2208 break;
2209
2210 case SDATA_EABI:
2211 fprintf (stderr, DEBUG_FMT_S, "sdata", "eabi");
2212 break;
2213
2214 }
2215
2216 switch (rs6000_traceback)
2217 {
2218 case traceback_default: trace_str = "default"; break;
2219 case traceback_none: trace_str = "none"; break;
2220 case traceback_part: trace_str = "part"; break;
2221 case traceback_full: trace_str = "full"; break;
2222 default: trace_str = "unknown"; break;
2223 }
2224
2225 fprintf (stderr, DEBUG_FMT_S, "traceback", trace_str);
2226
2227 switch (rs6000_current_cmodel)
2228 {
2229 case CMODEL_SMALL: cmodel_str = "small"; break;
2230 case CMODEL_MEDIUM: cmodel_str = "medium"; break;
2231 case CMODEL_LARGE: cmodel_str = "large"; break;
2232 default: cmodel_str = "unknown"; break;
2233 }
2234
2235 fprintf (stderr, DEBUG_FMT_S, "cmodel", cmodel_str);
2236
2237 switch (rs6000_current_abi)
2238 {
2239 case ABI_NONE: abi_str = "none"; break;
2240 case ABI_AIX: abi_str = "aix"; break;
2241 case ABI_V4: abi_str = "V4"; break;
2242 case ABI_DARWIN: abi_str = "darwin"; break;
2243 default: abi_str = "unknown"; break;
2244 }
2245
2246 fprintf (stderr, DEBUG_FMT_S, "abi", abi_str);
2247
2248 if (rs6000_altivec_abi)
2249 fprintf (stderr, DEBUG_FMT_S, "altivec_abi", "true");
2250
2251 if (rs6000_spe_abi)
2252 fprintf (stderr, DEBUG_FMT_S, "spe_abi", "true");
2253
2254 if (rs6000_darwin64_abi)
2255 fprintf (stderr, DEBUG_FMT_S, "darwin64_abi", "true");
2256
2257 if (rs6000_float_gprs)
2258 fprintf (stderr, DEBUG_FMT_S, "float_gprs", "true");
2259
2260 fprintf (stderr, DEBUG_FMT_S, "always_hint", tf[!!rs6000_always_hint]);
2261 fprintf (stderr, DEBUG_FMT_S, "align_branch",
2262 tf[!!rs6000_align_branch_targets]);
2263 fprintf (stderr, DEBUG_FMT_D, "tls_size", rs6000_tls_size);
2264 fprintf (stderr, DEBUG_FMT_D, "long_double_size",
2265 rs6000_long_double_type_size);
2266 fprintf (stderr, DEBUG_FMT_D, "sched_restricted_insns_priority",
2267 (int)rs6000_sched_restricted_insns_priority);
1864 } 2268 }
1865 2269
1866 /* Initialize the various global tables that are based on register size. */ 2270 /* Initialize the various global tables that are based on register size. */
1867 static void 2271 static void
1868 rs6000_init_hard_regno_mode_ok (void) 2272 rs6000_init_hard_regno_mode_ok (bool global_init_p)
1869 { 2273 {
1870 int r, m, c; 2274 int r, m, c;
1871 int align64; 2275 int align64;
1872 int align32; 2276 int align32;
1873 2277
1890 rs6000_regno_regclass[r] = CR_REGS; 2294 rs6000_regno_regclass[r] = CR_REGS;
1891 2295
1892 rs6000_regno_regclass[MQ_REGNO] = MQ_REGS; 2296 rs6000_regno_regclass[MQ_REGNO] = MQ_REGS;
1893 rs6000_regno_regclass[LR_REGNO] = LINK_REGS; 2297 rs6000_regno_regclass[LR_REGNO] = LINK_REGS;
1894 rs6000_regno_regclass[CTR_REGNO] = CTR_REGS; 2298 rs6000_regno_regclass[CTR_REGNO] = CTR_REGS;
1895 rs6000_regno_regclass[XER_REGNO] = XER_REGS; 2299 rs6000_regno_regclass[CA_REGNO] = CA_REGS;
1896 rs6000_regno_regclass[VRSAVE_REGNO] = VRSAVE_REGS; 2300 rs6000_regno_regclass[VRSAVE_REGNO] = VRSAVE_REGS;
1897 rs6000_regno_regclass[VSCR_REGNO] = VRSAVE_REGS; 2301 rs6000_regno_regclass[VSCR_REGNO] = VRSAVE_REGS;
1898 rs6000_regno_regclass[SPE_ACC_REGNO] = SPE_ACC_REGS; 2302 rs6000_regno_regclass[SPE_ACC_REGNO] = SPE_ACC_REGS;
1899 rs6000_regno_regclass[SPEFSCR_REGNO] = SPEFSCR_REGS; 2303 rs6000_regno_regclass[SPEFSCR_REGNO] = SPEFSCR_REGS;
1900 rs6000_regno_regclass[ARG_POINTER_REGNUM] = BASE_REGS; 2304 rs6000_regno_regclass[ARG_POINTER_REGNUM] = BASE_REGS;
2008 V4SF, wd = register class to use for V2DF, and ws = register classs to 2412 V4SF, wd = register class to use for V2DF, and ws = register classs to
2009 use for DF scalars. */ 2413 use for DF scalars. */
2010 rs6000_constraints[RS6000_CONSTRAINT_wa] = VSX_REGS; 2414 rs6000_constraints[RS6000_CONSTRAINT_wa] = VSX_REGS;
2011 rs6000_constraints[RS6000_CONSTRAINT_wf] = VSX_REGS; 2415 rs6000_constraints[RS6000_CONSTRAINT_wf] = VSX_REGS;
2012 rs6000_constraints[RS6000_CONSTRAINT_wd] = VSX_REGS; 2416 rs6000_constraints[RS6000_CONSTRAINT_wd] = VSX_REGS;
2013 if (TARGET_VSX_SCALAR_DOUBLE) 2417 rs6000_constraints[RS6000_CONSTRAINT_ws] = (TARGET_VSX_SCALAR_MEMORY
2014 rs6000_constraints[RS6000_CONSTRAINT_ws] = VSX_REGS; 2418 ? VSX_REGS
2419 : FLOAT_REGS);
2015 } 2420 }
2016 2421
2017 if (TARGET_ALTIVEC) 2422 if (TARGET_ALTIVEC)
2018 rs6000_constraints[RS6000_CONSTRAINT_v] = ALTIVEC_REGS; 2423 rs6000_constraints[RS6000_CONSTRAINT_v] = ALTIVEC_REGS;
2019 2424
2087 } 2492 }
2088 2493
2089 if (TARGET_E500_DOUBLE) 2494 if (TARGET_E500_DOUBLE)
2090 rs6000_class_max_nregs[DFmode][GENERAL_REGS] = 1; 2495 rs6000_class_max_nregs[DFmode][GENERAL_REGS] = 1;
2091 2496
2092 if (TARGET_DEBUG_REG) 2497 /* Calculate which modes to automatically generate code to use a the
2093 rs6000_debug_reg_global (); 2498 reciprocal divide and square root instructions. In the future, possibly
2499 automatically generate the instructions even if the user did not specify
2500 -mrecip. The older machines double precision reciprocal sqrt estimate is
2501 not accurate enough. */
2502 memset (rs6000_recip_bits, 0, sizeof (rs6000_recip_bits));
2503 if (TARGET_FRES)
2504 rs6000_recip_bits[SFmode] = RS6000_RECIP_MASK_HAVE_RE;
2505 if (TARGET_FRE)
2506 rs6000_recip_bits[DFmode] = RS6000_RECIP_MASK_HAVE_RE;
2507 if (VECTOR_UNIT_ALTIVEC_OR_VSX_P (V4SFmode))
2508 rs6000_recip_bits[V4SFmode] = RS6000_RECIP_MASK_HAVE_RE;
2509 if (VECTOR_UNIT_VSX_P (V2DFmode))
2510 rs6000_recip_bits[V2DFmode] = RS6000_RECIP_MASK_HAVE_RE;
2511
2512 if (TARGET_FRSQRTES)
2513 rs6000_recip_bits[SFmode] |= RS6000_RECIP_MASK_HAVE_RSQRTE;
2514 if (TARGET_FRSQRTE)
2515 rs6000_recip_bits[DFmode] |= RS6000_RECIP_MASK_HAVE_RSQRTE;
2516 if (VECTOR_UNIT_ALTIVEC_OR_VSX_P (V4SFmode))
2517 rs6000_recip_bits[V4SFmode] |= RS6000_RECIP_MASK_HAVE_RSQRTE;
2518 if (VECTOR_UNIT_VSX_P (V2DFmode))
2519 rs6000_recip_bits[V2DFmode] |= RS6000_RECIP_MASK_HAVE_RSQRTE;
2520
2521 if (rs6000_recip_control)
2522 {
2523 if (!flag_finite_math_only)
2524 warning (0, "-mrecip requires -ffinite-math or -ffast-math");
2525 if (flag_trapping_math)
2526 warning (0, "-mrecip requires -fno-trapping-math or -ffast-math");
2527 if (!flag_reciprocal_math)
2528 warning (0, "-mrecip requires -freciprocal-math or -ffast-math");
2529 if (flag_finite_math_only && !flag_trapping_math && flag_reciprocal_math)
2530 {
2531 if (RS6000_RECIP_HAVE_RE_P (SFmode)
2532 && (rs6000_recip_control & RECIP_SF_DIV) != 0)
2533 rs6000_recip_bits[SFmode] |= RS6000_RECIP_MASK_AUTO_RE;
2534
2535 if (RS6000_RECIP_HAVE_RE_P (DFmode)
2536 && (rs6000_recip_control & RECIP_DF_DIV) != 0)
2537 rs6000_recip_bits[DFmode] |= RS6000_RECIP_MASK_AUTO_RE;
2538
2539 if (RS6000_RECIP_HAVE_RE_P (V4SFmode)
2540 && (rs6000_recip_control & RECIP_V4SF_DIV) != 0)
2541 rs6000_recip_bits[V4SFmode] |= RS6000_RECIP_MASK_AUTO_RE;
2542
2543 if (RS6000_RECIP_HAVE_RE_P (V2DFmode)
2544 && (rs6000_recip_control & RECIP_V2DF_DIV) != 0)
2545 rs6000_recip_bits[V2DFmode] |= RS6000_RECIP_MASK_AUTO_RE;
2546
2547 if (RS6000_RECIP_HAVE_RSQRTE_P (SFmode)
2548 && (rs6000_recip_control & RECIP_SF_RSQRT) != 0)
2549 rs6000_recip_bits[SFmode] |= RS6000_RECIP_MASK_AUTO_RSQRTE;
2550
2551 if (RS6000_RECIP_HAVE_RSQRTE_P (DFmode)
2552 && (rs6000_recip_control & RECIP_DF_RSQRT) != 0)
2553 rs6000_recip_bits[DFmode] |= RS6000_RECIP_MASK_AUTO_RSQRTE;
2554
2555 if (RS6000_RECIP_HAVE_RSQRTE_P (V4SFmode)
2556 && (rs6000_recip_control & RECIP_V4SF_RSQRT) != 0)
2557 rs6000_recip_bits[V4SFmode] |= RS6000_RECIP_MASK_AUTO_RSQRTE;
2558
2559 if (RS6000_RECIP_HAVE_RSQRTE_P (V2DFmode)
2560 && (rs6000_recip_control & RECIP_V2DF_RSQRT) != 0)
2561 rs6000_recip_bits[V2DFmode] |= RS6000_RECIP_MASK_AUTO_RSQRTE;
2562 }
2563 }
2564
2565 if (global_init_p || TARGET_DEBUG_TARGET)
2566 {
2567 if (TARGET_DEBUG_REG)
2568 rs6000_debug_reg_global ();
2569
2570 if (TARGET_DEBUG_COST || TARGET_DEBUG_REG)
2571 fprintf (stderr,
2572 "SImode variable mult cost = %d\n"
2573 "SImode constant mult cost = %d\n"
2574 "SImode short constant mult cost = %d\n"
2575 "DImode multipliciation cost = %d\n"
2576 "SImode division cost = %d\n"
2577 "DImode division cost = %d\n"
2578 "Simple fp operation cost = %d\n"
2579 "DFmode multiplication cost = %d\n"
2580 "SFmode division cost = %d\n"
2581 "DFmode division cost = %d\n"
2582 "cache line size = %d\n"
2583 "l1 cache size = %d\n"
2584 "l2 cache size = %d\n"
2585 "simultaneous prefetches = %d\n"
2586 "\n",
2587 rs6000_cost->mulsi,
2588 rs6000_cost->mulsi_const,
2589 rs6000_cost->mulsi_const9,
2590 rs6000_cost->muldi,
2591 rs6000_cost->divsi,
2592 rs6000_cost->divdi,
2593 rs6000_cost->fp,
2594 rs6000_cost->dmul,
2595 rs6000_cost->sdiv,
2596 rs6000_cost->ddiv,
2597 rs6000_cost->cache_line_size,
2598 rs6000_cost->l1_cache_size,
2599 rs6000_cost->l2_cache_size,
2600 rs6000_cost->simultaneous_prefetches);
2601 }
2094 } 2602 }
2095 2603
2096 #if TARGET_MACHO 2604 #if TARGET_MACHO
2097 /* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */ 2605 /* The Darwin version of SUBTARGET_OVERRIDE_OPTIONS. */
2098 2606
2101 { 2609 {
2102 /* The Darwin ABI always includes AltiVec, can't be (validly) turned 2610 /* The Darwin ABI always includes AltiVec, can't be (validly) turned
2103 off. */ 2611 off. */
2104 rs6000_altivec_abi = 1; 2612 rs6000_altivec_abi = 1;
2105 TARGET_ALTIVEC_VRSAVE = 1; 2613 TARGET_ALTIVEC_VRSAVE = 1;
2106 if (DEFAULT_ABI == ABI_DARWIN) 2614
2107 { 2615 if (DEFAULT_ABI == ABI_DARWIN
2108 if (MACHO_DYNAMIC_NO_PIC_P) 2616 && TARGET_64BIT)
2109 { 2617 darwin_one_byte_bool = 1;
2110 if (flag_pic) 2618
2111 warning (0, "-mdynamic-no-pic overrides -fpic or -fPIC");
2112 flag_pic = 0;
2113 }
2114 else if (flag_pic == 1)
2115 {
2116 flag_pic = 2;
2117 }
2118 }
2119 if (TARGET_64BIT && ! TARGET_POWERPC64) 2619 if (TARGET_64BIT && ! TARGET_POWERPC64)
2120 { 2620 {
2121 target_flags |= MASK_POWERPC64; 2621 target_flags |= MASK_POWERPC64;
2122 warning (0, "-m64 requires PowerPC64 architecture, enabling"); 2622 warning (0, "-m64 requires PowerPC64 architecture, enabling");
2123 } 2623 }
2153 2653
2154 #ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE 2654 #ifndef RS6000_DEFAULT_LONG_DOUBLE_SIZE
2155 #define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64 2655 #define RS6000_DEFAULT_LONG_DOUBLE_SIZE 64
2156 #endif 2656 #endif
2157 2657
2158 /* Override command line options. Mostly we process the processor 2658 /* Override command line options. Mostly we process the processor type and
2159 type and sometimes adjust other TARGET_ options. */ 2659 sometimes adjust other TARGET_ options. */
2160 2660
2161 void 2661 static bool
2162 rs6000_override_options (const char *default_cpu) 2662 rs6000_option_override_internal (bool global_init_p)
2163 { 2663 {
2164 size_t i, j; 2664 bool ret = true;
2165 struct rs6000_cpu_select *ptr; 2665 const char *default_cpu = OPTION_TARGET_CPU_DEFAULT;
2166 int set_masks; 2666 int set_masks;
2167 2667 int cpu_index;
2168 /* Simplifications for entries below. */ 2668 int tune_index;
2169 2669 struct cl_target_option *main_target_opt
2170 enum { 2670 = ((global_init_p || target_option_default_node == NULL)
2171 POWERPC_BASE_MASK = MASK_POWERPC | MASK_NEW_MNEMONICS, 2671 ? NULL : TREE_TARGET_OPTION (target_option_default_node));
2172 POWERPC_7400_MASK = POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_ALTIVEC
2173 };
2174
2175 /* This table occasionally claims that a processor does not support
2176 a particular feature even though it does, but the feature is slower
2177 than the alternative. Thus, it shouldn't be relied on as a
2178 complete description of the processor's support.
2179
2180 Please keep this list in order, and don't forget to update the
2181 documentation in invoke.texi when adding a new processor or
2182 flag. */
2183 static struct ptt
2184 {
2185 const char *const name; /* Canonical processor name. */
2186 const enum processor_type processor; /* Processor type enum value. */
2187 const int target_enable; /* Target flags to enable. */
2188 } const processor_target_table[]
2189 = {{"401", PROCESSOR_PPC403, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
2190 {"403", PROCESSOR_PPC403,
2191 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_STRICT_ALIGN},
2192 {"405", PROCESSOR_PPC405,
2193 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
2194 {"405fp", PROCESSOR_PPC405,
2195 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
2196 {"440", PROCESSOR_PPC440,
2197 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
2198 {"440fp", PROCESSOR_PPC440,
2199 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
2200 {"464", PROCESSOR_PPC440,
2201 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_MULHW | MASK_DLMZB},
2202 {"464fp", PROCESSOR_PPC440,
2203 POWERPC_BASE_MASK | MASK_MULHW | MASK_DLMZB},
2204 {"476", PROCESSOR_PPC476,
2205 POWERPC_BASE_MASK | MASK_SOFT_FLOAT | MASK_PPC_GFXOPT | MASK_MFCRF
2206 | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_MULHW | MASK_DLMZB},
2207 {"476fp", PROCESSOR_PPC476,
2208 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_MFCRF | MASK_POPCNTB
2209 | MASK_FPRND | MASK_CMPB | MASK_MULHW | MASK_DLMZB},
2210 {"505", PROCESSOR_MPCCORE, POWERPC_BASE_MASK},
2211 {"601", PROCESSOR_PPC601,
2212 MASK_POWER | POWERPC_BASE_MASK | MASK_MULTIPLE | MASK_STRING},
2213 {"602", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
2214 {"603", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
2215 {"603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
2216 {"604", PROCESSOR_PPC604, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
2217 {"604e", PROCESSOR_PPC604e, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
2218 {"620", PROCESSOR_PPC620,
2219 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
2220 {"630", PROCESSOR_PPC630,
2221 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
2222 {"740", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
2223 {"7400", PROCESSOR_PPC7400, POWERPC_7400_MASK},
2224 {"7450", PROCESSOR_PPC7450, POWERPC_7400_MASK},
2225 {"750", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
2226 {"801", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
2227 {"821", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
2228 {"823", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
2229 {"8540", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN
2230 | MASK_ISEL},
2231 /* 8548 has a dummy entry for now. */
2232 {"8548", PROCESSOR_PPC8540, POWERPC_BASE_MASK | MASK_STRICT_ALIGN
2233 | MASK_ISEL},
2234 {"a2", PROCESSOR_PPCA2,
2235 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_POPCNTB
2236 | MASK_CMPB | MASK_NO_UPDATE },
2237 {"e300c2", PROCESSOR_PPCE300C2, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
2238 {"e300c3", PROCESSOR_PPCE300C3, POWERPC_BASE_MASK},
2239 {"e500mc", PROCESSOR_PPCE500MC, POWERPC_BASE_MASK | MASK_PPC_GFXOPT
2240 | MASK_ISEL},
2241 {"e500mc64", PROCESSOR_PPCE500MC64, POWERPC_BASE_MASK | MASK_POWERPC64
2242 | MASK_PPC_GFXOPT | MASK_ISEL},
2243 {"860", PROCESSOR_MPCCORE, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
2244 {"970", PROCESSOR_POWER4,
2245 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
2246 {"cell", PROCESSOR_CELL,
2247 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
2248 {"common", PROCESSOR_COMMON, MASK_NEW_MNEMONICS},
2249 {"ec603e", PROCESSOR_PPC603, POWERPC_BASE_MASK | MASK_SOFT_FLOAT},
2250 {"G3", PROCESSOR_PPC750, POWERPC_BASE_MASK | MASK_PPC_GFXOPT},
2251 {"G4", PROCESSOR_PPC7450, POWERPC_7400_MASK},
2252 {"G5", PROCESSOR_POWER4,
2253 POWERPC_7400_MASK | MASK_PPC_GPOPT | MASK_MFCRF | MASK_POWERPC64},
2254 {"power", PROCESSOR_POWER, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
2255 {"power2", PROCESSOR_POWER,
2256 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
2257 {"power3", PROCESSOR_PPC630,
2258 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
2259 {"power4", PROCESSOR_POWER4,
2260 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
2261 | MASK_MFCRF},
2262 {"power5", PROCESSOR_POWER5,
2263 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
2264 | MASK_MFCRF | MASK_POPCNTB},
2265 {"power5+", PROCESSOR_POWER5,
2266 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
2267 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND},
2268 {"power6", PROCESSOR_POWER6,
2269 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
2270 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP},
2271 {"power6x", PROCESSOR_POWER6,
2272 POWERPC_BASE_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_PPC_GFXOPT
2273 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP
2274 | MASK_MFPGPR},
2275 {"power7", PROCESSOR_POWER7,
2276 POWERPC_7400_MASK | MASK_POWERPC64 | MASK_PPC_GPOPT | MASK_MFCRF
2277 | MASK_POPCNTB | MASK_FPRND | MASK_CMPB | MASK_DFP | MASK_POPCNTD
2278 | MASK_VSX}, /* Don't add MASK_ISEL by default */
2279 {"powerpc", PROCESSOR_POWERPC, POWERPC_BASE_MASK},
2280 {"powerpc64", PROCESSOR_POWERPC64,
2281 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64},
2282 {"rios", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
2283 {"rios1", PROCESSOR_RIOS1, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
2284 {"rios2", PROCESSOR_RIOS2,
2285 MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING},
2286 {"rsc", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
2287 {"rsc1", PROCESSOR_PPC601, MASK_POWER | MASK_MULTIPLE | MASK_STRING},
2288 {"rs64", PROCESSOR_RS64A,
2289 POWERPC_BASE_MASK | MASK_PPC_GFXOPT | MASK_POWERPC64}
2290 };
2291
2292 const size_t ptt_size = ARRAY_SIZE (processor_target_table);
2293
2294 /* Some OSs don't support saving the high part of 64-bit registers on
2295 context switch. Other OSs don't support saving Altivec registers.
2296 On those OSs, we don't touch the MASK_POWERPC64 or MASK_ALTIVEC
2297 settings; if the user wants either, the user must explicitly specify
2298 them and we won't interfere with the user's specification. */
2299
2300 enum {
2301 POWER_MASKS = MASK_POWER | MASK_POWER2 | MASK_MULTIPLE | MASK_STRING,
2302 POWERPC_MASKS = (POWERPC_BASE_MASK | MASK_PPC_GPOPT | MASK_STRICT_ALIGN
2303 | MASK_PPC_GFXOPT | MASK_POWERPC64 | MASK_ALTIVEC
2304 | MASK_MFCRF | MASK_POPCNTB | MASK_FPRND | MASK_MULHW
2305 | MASK_DLMZB | MASK_CMPB | MASK_MFPGPR | MASK_DFP
2306 | MASK_POPCNTD | MASK_VSX | MASK_ISEL | MASK_NO_UPDATE)
2307 };
2308 2672
2309 /* Numerous experiment shows that IRA based loop pressure 2673 /* Numerous experiment shows that IRA based loop pressure
2310 calculation works better for RTL loop invariant motion on targets 2674 calculation works better for RTL loop invariant motion on targets
2311 with enough (>= 32) registers. It is an expensive optimization. 2675 with enough (>= 32) registers. It is an expensive optimization.
2312 So it is on only for peak performance. */ 2676 So it is on only for peak performance. */
2313 if (optimize >= 3) 2677 if (optimize >= 3 && global_init_p)
2314 flag_ira_loop_pressure = 1; 2678 flag_ira_loop_pressure = 1;
2315 2679
2316 /* Set the pointer size. */ 2680 /* Set the pointer size. */
2317 if (TARGET_64BIT) 2681 if (TARGET_64BIT)
2318 { 2682 {
2337 2701
2338 /* Don't override by the processor default if given explicitly. */ 2702 /* Don't override by the processor default if given explicitly. */
2339 set_masks &= ~target_flags_explicit; 2703 set_masks &= ~target_flags_explicit;
2340 2704
2341 /* Identify the processor type. */ 2705 /* Identify the processor type. */
2342 rs6000_select[0].string = default_cpu; 2706 if (!default_cpu)
2343 rs6000_cpu = TARGET_POWERPC64 ? PROCESSOR_DEFAULT64 : PROCESSOR_DEFAULT; 2707 {
2344 2708 if (TARGET_POWERPC64)
2345 for (i = 0; i < ARRAY_SIZE (rs6000_select); i++) 2709 default_cpu = "powerpc64";
2346 { 2710 else if (TARGET_POWERPC)
2347 ptr = &rs6000_select[i]; 2711 default_cpu = "powerpc";
2348 if (ptr->string != (char *)0 && ptr->string[0] != '\0') 2712 }
2349 { 2713
2350 for (j = 0; j < ptt_size; j++) 2714 /* Process the -mcpu=<xxx> and -mtune=<xxx> argument. If the user changed
2351 if (! strcmp (ptr->string, processor_target_table[j].name)) 2715 the cpu in a target attribute or pragma, but did not specify a tuning
2352 { 2716 option, use the cpu for the tuning option rather than the option specified
2353 if (ptr->set_tune_p) 2717 with -mtune on the command line. */
2354 rs6000_cpu = processor_target_table[j].processor; 2718 if (rs6000_cpu_index > 0)
2355 2719 cpu_index = rs6000_cpu_index;
2356 if (ptr->set_arch_p) 2720 else if (main_target_opt != NULL && main_target_opt->x_rs6000_cpu_index > 0)
2357 { 2721 rs6000_cpu_index = cpu_index = main_target_opt->x_rs6000_cpu_index;
2358 target_flags &= ~set_masks; 2722 else
2359 target_flags |= (processor_target_table[j].target_enable 2723 rs6000_cpu_index = cpu_index = rs6000_cpu_name_lookup (default_cpu);
2360 & set_masks); 2724
2361 } 2725 if (rs6000_tune_index > 0)
2362 break; 2726 tune_index = rs6000_tune_index;
2363 } 2727 else
2364 2728 rs6000_tune_index = tune_index = cpu_index;
2365 if (j == ptt_size) 2729
2366 error ("bad value (%s) for %s switch", ptr->string, ptr->name); 2730 if (cpu_index >= 0)
2367 } 2731 {
2368 } 2732 target_flags &= ~set_masks;
2733 target_flags |= (processor_target_table[cpu_index].target_enable
2734 & set_masks);
2735 }
2736
2737 rs6000_cpu = ((tune_index >= 0)
2738 ? processor_target_table[tune_index].processor
2739 : (TARGET_POWERPC64
2740 ? PROCESSOR_DEFAULT64
2741 : PROCESSOR_DEFAULT));
2369 2742
2370 if (rs6000_cpu == PROCESSOR_PPCE300C2 || rs6000_cpu == PROCESSOR_PPCE300C3 2743 if (rs6000_cpu == PROCESSOR_PPCE300C2 || rs6000_cpu == PROCESSOR_PPCE300C3
2371 || rs6000_cpu == PROCESSOR_PPCE500MC || rs6000_cpu == PROCESSOR_PPCE500MC64) 2744 || rs6000_cpu == PROCESSOR_PPCE500MC || rs6000_cpu == PROCESSOR_PPCE500MC64)
2372 { 2745 {
2373 if (TARGET_ALTIVEC) 2746 if (TARGET_ALTIVEC)
2374 error ("AltiVec not supported in this target"); 2747 error ("AltiVec not supported in this target");
2375 if (TARGET_SPE) 2748 if (TARGET_SPE)
2376 error ("Spe not supported in this target"); 2749 error ("SPE not supported in this target");
2377 } 2750 }
2378 2751
2379 /* Disable Cell microcode if we are optimizing for the Cell 2752 /* Disable Cell microcode if we are optimizing for the Cell
2380 and not optimizing for size. */ 2753 and not optimizing for size. */
2381 if (rs6000_gen_cell_microcode == -1) 2754 if (rs6000_gen_cell_microcode == -1)
2442 2815
2443 if (msg) 2816 if (msg)
2444 { 2817 {
2445 warning (0, msg); 2818 warning (0, msg);
2446 target_flags &= ~ MASK_VSX; 2819 target_flags &= ~ MASK_VSX;
2447 } 2820 target_flags_explicit |= MASK_VSX;
2448 else if (TARGET_VSX && !TARGET_ALTIVEC) 2821 }
2449 target_flags |= MASK_ALTIVEC; 2822 }
2450 } 2823
2451 2824 /* For the newer switches (vsx, dfp, etc.) set some of the older options,
2452 /* Set debug flags */ 2825 unless the user explicitly used the -mno-<option> to disable the code. */
2453 if (rs6000_debug_name) 2826 if (TARGET_VSX)
2454 { 2827 target_flags |= (ISA_2_6_MASKS_SERVER & ~target_flags_explicit);
2455 if (! strcmp (rs6000_debug_name, "all")) 2828 else if (TARGET_POPCNTD)
2456 rs6000_debug_stack = rs6000_debug_arg = rs6000_debug_reg 2829 target_flags |= (ISA_2_6_MASKS_EMBEDDED & ~target_flags_explicit);
2457 = rs6000_debug_addr = rs6000_debug_cost = 1; 2830 else if (TARGET_DFP)
2458 else if (! strcmp (rs6000_debug_name, "stack")) 2831 target_flags |= (ISA_2_5_MASKS_SERVER & ~target_flags_explicit);
2459 rs6000_debug_stack = 1; 2832 else if (TARGET_CMPB)
2460 else if (! strcmp (rs6000_debug_name, "arg")) 2833 target_flags |= (ISA_2_5_MASKS_EMBEDDED & ~target_flags_explicit);
2461 rs6000_debug_arg = 1; 2834 else if (TARGET_FPRND)
2462 else if (! strcmp (rs6000_debug_name, "reg")) 2835 target_flags |= (ISA_2_4_MASKS & ~target_flags_explicit);
2463 rs6000_debug_reg = 1; 2836 else if (TARGET_POPCNTB)
2464 else if (! strcmp (rs6000_debug_name, "addr")) 2837 target_flags |= (ISA_2_2_MASKS & ~target_flags_explicit);
2465 rs6000_debug_addr = 1; 2838 else if (TARGET_ALTIVEC)
2466 else if (! strcmp (rs6000_debug_name, "cost")) 2839 target_flags |= (MASK_PPC_GFXOPT & ~target_flags_explicit);
2467 rs6000_debug_cost = 1; 2840
2468 else 2841 /* E500mc does "better" if we inline more aggressively. Respect the
2469 error ("unknown -mdebug-%s switch", rs6000_debug_name); 2842 user's opinion, though. */
2470 2843 if (rs6000_block_move_inline_limit == 0
2844 && (rs6000_cpu == PROCESSOR_PPCE500MC
2845 || rs6000_cpu == PROCESSOR_PPCE500MC64))
2846 rs6000_block_move_inline_limit = 128;
2847
2848 /* store_one_arg depends on expand_block_move to handle at least the
2849 size of reg_parm_stack_space. */
2850 if (rs6000_block_move_inline_limit < (TARGET_POWERPC64 ? 64 : 32))
2851 rs6000_block_move_inline_limit = (TARGET_POWERPC64 ? 64 : 32);
2852
2853 if (global_init_p)
2854 {
2471 /* If the appropriate debug option is enabled, replace the target hooks 2855 /* If the appropriate debug option is enabled, replace the target hooks
2472 with debug versions that call the real version and then prints 2856 with debug versions that call the real version and then prints
2473 debugging information. */ 2857 debugging information. */
2474 if (TARGET_DEBUG_COST) 2858 if (TARGET_DEBUG_COST)
2475 { 2859 {
2493 rs6000_legitimize_reload_address_ptr 2877 rs6000_legitimize_reload_address_ptr
2494 = rs6000_debug_legitimize_reload_address; 2878 = rs6000_debug_legitimize_reload_address;
2495 rs6000_mode_dependent_address_ptr 2879 rs6000_mode_dependent_address_ptr
2496 = rs6000_debug_mode_dependent_address; 2880 = rs6000_debug_mode_dependent_address;
2497 } 2881 }
2498 } 2882
2499 2883 if (rs6000_veclibabi_name)
2500 if (rs6000_traceback_name) 2884 {
2501 { 2885 if (strcmp (rs6000_veclibabi_name, "mass") == 0)
2502 if (! strncmp (rs6000_traceback_name, "full", 4)) 2886 rs6000_veclib_handler = rs6000_builtin_vectorized_libmass;
2503 rs6000_traceback = traceback_full; 2887 else
2504 else if (! strncmp (rs6000_traceback_name, "part", 4)) 2888 {
2505 rs6000_traceback = traceback_part; 2889 error ("unknown vectorization library ABI type (%s) for "
2506 else if (! strncmp (rs6000_traceback_name, "no", 2)) 2890 "-mveclibabi= switch", rs6000_veclibabi_name);
2507 rs6000_traceback = traceback_none; 2891 ret = false;
2892 }
2893 }
2894 }
2895
2896 if (!rs6000_explicit_options.long_double)
2897 {
2898 if (main_target_opt != NULL
2899 && (main_target_opt->x_rs6000_long_double_type_size
2900 != RS6000_DEFAULT_LONG_DOUBLE_SIZE))
2901 error ("target attribute or pragma changes long double size");
2508 else 2902 else
2509 error ("unknown -mtraceback arg %qs; expecting %<full%>, %<partial%> or %<none%>", 2903 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2510 rs6000_traceback_name); 2904 }
2511 }
2512
2513 if (!rs6000_explicit_options.long_double)
2514 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
2515 2905
2516 #ifndef POWERPC_LINUX 2906 #ifndef POWERPC_LINUX
2517 if (!rs6000_explicit_options.ieee) 2907 if (!rs6000_explicit_options.ieee)
2518 rs6000_ieeequad = 1; 2908 rs6000_ieeequad = 1;
2519 #endif 2909 #endif
2520 2910
2911 /* Disable VSX and Altivec silently if the user switched cpus to power7 in a
2912 target attribute or pragma which automatically enables both options,
2913 unless the altivec ABI was set. This is set by default for 64-bit, but
2914 not for 32-bit. */
2915 if (main_target_opt != NULL && !main_target_opt->x_rs6000_altivec_abi)
2916 target_flags &= ~((MASK_VSX | MASK_ALTIVEC) & ~target_flags_explicit);
2917
2521 /* Enable Altivec ABI for AIX -maltivec. */ 2918 /* Enable Altivec ABI for AIX -maltivec. */
2522 if (TARGET_XCOFF && (TARGET_ALTIVEC || TARGET_VSX)) 2919 if (TARGET_XCOFF && (TARGET_ALTIVEC || TARGET_VSX))
2523 rs6000_altivec_abi = 1; 2920 {
2921 if (main_target_opt != NULL && !main_target_opt->x_rs6000_altivec_abi)
2922 error ("target attribute or pragma changes AltiVec ABI");
2923 else
2924 rs6000_altivec_abi = 1;
2925 }
2524 2926
2525 /* The AltiVec ABI is the default for PowerPC-64 GNU/Linux. For 2927 /* The AltiVec ABI is the default for PowerPC-64 GNU/Linux. For
2526 PowerPC-32 GNU/Linux, -maltivec implies the AltiVec ABI. It can 2928 PowerPC-32 GNU/Linux, -maltivec implies the AltiVec ABI. It can
2527 be explicitly overridden in either case. */ 2929 be explicitly overridden in either case. */
2528 if (TARGET_ELF) 2930 if (TARGET_ELF)
2529 { 2931 {
2530 if (!rs6000_explicit_options.altivec_abi 2932 if (!rs6000_explicit_options.altivec_abi
2531 && (TARGET_64BIT || TARGET_ALTIVEC || TARGET_VSX)) 2933 && (TARGET_64BIT || TARGET_ALTIVEC || TARGET_VSX))
2532 rs6000_altivec_abi = 1; 2934 {
2935 if (main_target_opt != NULL &&
2936 !main_target_opt->x_rs6000_altivec_abi)
2937 error ("target attribute or pragma changes AltiVec ABI");
2938 else
2939 rs6000_altivec_abi = 1;
2940 }
2533 2941
2534 /* Enable VRSAVE for AltiVec ABI, unless explicitly overridden. */ 2942 /* Enable VRSAVE for AltiVec ABI, unless explicitly overridden. */
2535 if (!rs6000_explicit_options.vrsave) 2943 if (!rs6000_explicit_options.vrsave)
2536 TARGET_ALTIVEC_VRSAVE = rs6000_altivec_abi; 2944 TARGET_ALTIVEC_VRSAVE = rs6000_altivec_abi;
2537 } 2945 }
2538 2946
2539 /* Set the Darwin64 ABI as default for 64-bit Darwin. */ 2947 /* Set the Darwin64 ABI as default for 64-bit Darwin.
2540 if (DEFAULT_ABI == ABI_DARWIN && TARGET_64BIT) 2948 So far, the only darwin64 targets are also MACH-O. */
2541 { 2949 if (TARGET_MACHO
2542 rs6000_darwin64_abi = 1; 2950 && DEFAULT_ABI == ABI_DARWIN
2543 #if TARGET_MACHO 2951 && TARGET_64BIT)
2544 darwin_one_byte_bool = 1; 2952 {
2545 #endif 2953 if (main_target_opt != NULL && !main_target_opt->x_rs6000_darwin64_abi)
2546 /* Default to natural alignment, for better performance. */ 2954 error ("target attribute or pragma changes darwin64 ABI");
2547 rs6000_alignment_flags = MASK_ALIGN_NATURAL; 2955 else
2956 {
2957 rs6000_darwin64_abi = 1;
2958 /* Default to natural alignment, for better performance. */
2959 rs6000_alignment_flags = MASK_ALIGN_NATURAL;
2960 }
2548 } 2961 }
2549 2962
2550 /* Place FP constants in the constant pool instead of TOC 2963 /* Place FP constants in the constant pool instead of TOC
2551 if section anchors enabled. */ 2964 if section anchors enabled. */
2552 if (flag_section_anchors) 2965 if (flag_section_anchors)
2553 TARGET_NO_FP_IN_TOC = 1; 2966 TARGET_NO_FP_IN_TOC = 1;
2554
2555 /* Handle -mtls-size option. */
2556 rs6000_parse_tls_size_option ();
2557 2967
2558 #ifdef SUBTARGET_OVERRIDE_OPTIONS 2968 #ifdef SUBTARGET_OVERRIDE_OPTIONS
2559 SUBTARGET_OVERRIDE_OPTIONS; 2969 SUBTARGET_OVERRIDE_OPTIONS;
2560 #endif 2970 #endif
2561 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS 2971 #ifdef SUBSUBTARGET_OVERRIDE_OPTIONS
2576 else if (rs6000_select[1].string != NULL) 2986 else if (rs6000_select[1].string != NULL)
2577 { 2987 {
2578 /* For the powerpc-eabispe configuration, we set all these by 2988 /* For the powerpc-eabispe configuration, we set all these by
2579 default, so let's unset them if we manually set another 2989 default, so let's unset them if we manually set another
2580 CPU that is not the E500. */ 2990 CPU that is not the E500. */
2581 if (!rs6000_explicit_options.spe_abi) 2991 if (main_target_opt != NULL
2582 rs6000_spe_abi = 0; 2992 && ((main_target_opt->x_rs6000_spe_abi != rs6000_spe_abi)
2583 if (!rs6000_explicit_options.spe) 2993 || (main_target_opt->x_rs6000_spe != rs6000_spe)
2584 rs6000_spe = 0; 2994 || (main_target_opt->x_rs6000_float_gprs != rs6000_float_gprs)))
2585 if (!rs6000_explicit_options.float_gprs) 2995 error ("target attribute or pragma changes SPE ABI");
2586 rs6000_float_gprs = 0; 2996 else
2997 {
2998 if (!rs6000_explicit_options.spe_abi)
2999 rs6000_spe_abi = 0;
3000 if (!rs6000_explicit_options.spe)
3001 rs6000_spe = 0;
3002 if (!rs6000_explicit_options.float_gprs)
3003 rs6000_float_gprs = 0;
3004 }
2587 if (!(target_flags_explicit & MASK_ISEL)) 3005 if (!(target_flags_explicit & MASK_ISEL))
2588 target_flags &= ~MASK_ISEL; 3006 target_flags &= ~MASK_ISEL;
2589 } 3007 }
2590 3008
2591 /* Detect invalid option combinations with E500. */ 3009 /* Detect invalid option combinations with E500. */
2605 || rs6000_cpu == PROCESSOR_POWER6 3023 || rs6000_cpu == PROCESSOR_POWER6
2606 || rs6000_cpu == PROCESSOR_POWER7 3024 || rs6000_cpu == PROCESSOR_POWER7
2607 || rs6000_cpu == PROCESSOR_PPCE500MC 3025 || rs6000_cpu == PROCESSOR_PPCE500MC
2608 || rs6000_cpu == PROCESSOR_PPCE500MC64); 3026 || rs6000_cpu == PROCESSOR_PPCE500MC64);
2609 3027
2610 /* Allow debug switches to override the above settings. */ 3028 /* Allow debug switches to override the above settings. These are set to -1
2611 if (TARGET_ALWAYS_HINT > 0) 3029 in rs6000.opt to indicate the user hasn't directly set the switch. */
3030 if (TARGET_ALWAYS_HINT >= 0)
2612 rs6000_always_hint = TARGET_ALWAYS_HINT; 3031 rs6000_always_hint = TARGET_ALWAYS_HINT;
2613 3032
2614 if (TARGET_SCHED_GROUPS > 0) 3033 if (TARGET_SCHED_GROUPS >= 0)
2615 rs6000_sched_groups = TARGET_SCHED_GROUPS; 3034 rs6000_sched_groups = TARGET_SCHED_GROUPS;
2616 3035
2617 if (TARGET_ALIGN_BRANCH_TARGETS > 0) 3036 if (TARGET_ALIGN_BRANCH_TARGETS >= 0)
2618 rs6000_align_branch_targets = TARGET_ALIGN_BRANCH_TARGETS; 3037 rs6000_align_branch_targets = TARGET_ALIGN_BRANCH_TARGETS;
2619 3038
2620 rs6000_sched_restricted_insns_priority 3039 rs6000_sched_restricted_insns_priority
2621 = (rs6000_sched_groups ? 1 : 0); 3040 = (rs6000_sched_groups ? 1 : 0);
2622 3041
2654 else 3073 else
2655 rs6000_sched_insert_nops = ((enum rs6000_nop_insertion) 3074 rs6000_sched_insert_nops = ((enum rs6000_nop_insertion)
2656 atoi (rs6000_sched_insert_nops_str)); 3075 atoi (rs6000_sched_insert_nops_str));
2657 } 3076 }
2658 3077
3078 if (global_init_p)
3079 {
2659 #ifdef TARGET_REGNAMES 3080 #ifdef TARGET_REGNAMES
2660 /* If the user desires alternate register names, copy in the 3081 /* If the user desires alternate register names, copy in the
2661 alternate names now. */ 3082 alternate names now. */
2662 if (TARGET_REGNAMES) 3083 if (TARGET_REGNAMES)
2663 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names)); 3084 memcpy (rs6000_reg_names, alt_reg_names, sizeof (rs6000_reg_names));
2664 #endif 3085 #endif
2665 3086
2666 /* Set aix_struct_return last, after the ABI is determined. 3087 /* Set aix_struct_return last, after the ABI is determined.
2667 If -maix-struct-return or -msvr4-struct-return was explicitly 3088 If -maix-struct-return or -msvr4-struct-return was explicitly
2668 used, don't override with the ABI default. */ 3089 used, don't override with the ABI default. */
2669 if (!rs6000_explicit_options.aix_struct_ret) 3090 if (!rs6000_explicit_options.aix_struct_ret)
2670 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET); 3091 aix_struct_return = (DEFAULT_ABI != ABI_V4 || DRAFT_V4_STRUCT_RET);
2671 3092
2672 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD) 3093 #if 0
2673 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format; 3094 /* IBM XL compiler defaults to unsigned bitfields. */
2674 3095 if (TARGET_XL_COMPAT)
2675 if (TARGET_TOC) 3096 flag_signed_bitfields = 0;
2676 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1); 3097 #endif
2677 3098
2678 /* We can only guarantee the availability of DI pseudo-ops when 3099 if (TARGET_LONG_DOUBLE_128 && !TARGET_IEEEQUAD)
2679 assembling for 64-bit targets. */ 3100 REAL_MODE_FORMAT (TFmode) = &ibm_extended_format;
2680 if (!TARGET_64BIT) 3101
2681 { 3102 if (TARGET_TOC)
2682 targetm.asm_out.aligned_op.di = NULL; 3103 ASM_GENERATE_INTERNAL_LABEL (toc_label_name, "LCTOC", 1);
2683 targetm.asm_out.unaligned_op.di = NULL; 3104
2684 } 3105 /* We can only guarantee the availability of DI pseudo-ops when
2685 3106 assembling for 64-bit targets. */
2686 /* Set branch target alignment, if not optimizing for size. */ 3107 if (!TARGET_64BIT)
2687 if (!optimize_size) 3108 {
2688 { 3109 targetm.asm_out.aligned_op.di = NULL;
2689 /* Cell wants to be aligned 8byte for dual issue. */ 3110 targetm.asm_out.unaligned_op.di = NULL;
2690 if (rs6000_cpu == PROCESSOR_CELL) 3111 }
2691 { 3112
2692 if (align_functions <= 0) 3113
2693 align_functions = 8; 3114 /* Set branch target alignment, if not optimizing for size. */
2694 if (align_jumps <= 0) 3115 if (!optimize_size)
2695 align_jumps = 8; 3116 {
2696 if (align_loops <= 0) 3117 /* Cell wants to be aligned 8byte for dual issue. Titan wants to be
2697 align_loops = 8; 3118 aligned 8byte to avoid misprediction by the branch predictor. */
2698 } 3119 if (rs6000_cpu == PROCESSOR_TITAN
2699 if (rs6000_align_branch_targets) 3120 || rs6000_cpu == PROCESSOR_CELL)
2700 { 3121 {
2701 if (align_functions <= 0) 3122 if (align_functions <= 0)
2702 align_functions = 16; 3123 align_functions = 8;
2703 if (align_jumps <= 0) 3124 if (align_jumps <= 0)
2704 align_jumps = 16; 3125 align_jumps = 8;
2705 if (align_loops <= 0) 3126 if (align_loops <= 0)
2706 align_loops = 16; 3127 align_loops = 8;
2707 } 3128 }
2708 if (align_jumps_max_skip <= 0) 3129 if (rs6000_align_branch_targets)
2709 align_jumps_max_skip = 15; 3130 {
2710 if (align_loops_max_skip <= 0) 3131 if (align_functions <= 0)
2711 align_loops_max_skip = 15; 3132 align_functions = 16;
2712 } 3133 if (align_jumps <= 0)
2713 3134 align_jumps = 16;
2714 /* Arrange to save and restore machine status around nested functions. */ 3135 if (align_loops <= 0)
2715 init_machine_status = rs6000_init_machine_status; 3136 {
2716 3137 can_override_loop_align = 1;
2717 /* We should always be splitting complex arguments, but we can't break 3138 align_loops = 16;
2718 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */ 3139 }
2719 if (DEFAULT_ABI != ABI_AIX) 3140 }
2720 targetm.calls.split_complex_arg = NULL; 3141 if (align_jumps_max_skip <= 0)
3142 align_jumps_max_skip = 15;
3143 if (align_loops_max_skip <= 0)
3144 align_loops_max_skip = 15;
3145 }
3146
3147 /* Arrange to save and restore machine status around nested functions. */
3148 init_machine_status = rs6000_init_machine_status;
3149
3150 /* We should always be splitting complex arguments, but we can't break
3151 Linux and Darwin ABIs at the moment. For now, only AIX is fixed. */
3152 if (DEFAULT_ABI != ABI_AIX)
3153 targetm.calls.split_complex_arg = NULL;
3154 }
2721 3155
2722 /* Initialize rs6000_cost with the appropriate target costs. */ 3156 /* Initialize rs6000_cost with the appropriate target costs. */
2723 if (optimize_size) 3157 if (optimize_size)
2724 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost; 3158 rs6000_cost = TARGET_POWERPC64 ? &size64_cost : &size32_cost;
2725 else 3159 else
2809 3243
2810 case PROCESSOR_PPCE500MC64: 3244 case PROCESSOR_PPCE500MC64:
2811 rs6000_cost = &ppce500mc64_cost; 3245 rs6000_cost = &ppce500mc64_cost;
2812 break; 3246 break;
2813 3247
3248 case PROCESSOR_TITAN:
3249 rs6000_cost = &titan_cost;
3250 break;
3251
2814 case PROCESSOR_POWER4: 3252 case PROCESSOR_POWER4:
2815 case PROCESSOR_POWER5: 3253 case PROCESSOR_POWER5:
2816 rs6000_cost = &power4_cost; 3254 rs6000_cost = &power4_cost;
2817 break; 3255 break;
2818 3256
2830 3268
2831 default: 3269 default:
2832 gcc_unreachable (); 3270 gcc_unreachable ();
2833 } 3271 }
2834 3272
2835 if (!PARAM_SET_P (PARAM_SIMULTANEOUS_PREFETCHES)) 3273 if (global_init_p)
2836 set_param_value ("simultaneous-prefetches", 3274 {
2837 rs6000_cost->simultaneous_prefetches); 3275 maybe_set_param_value (PARAM_SIMULTANEOUS_PREFETCHES,
2838 if (!PARAM_SET_P (PARAM_L1_CACHE_SIZE)) 3276 rs6000_cost->simultaneous_prefetches,
2839 set_param_value ("l1-cache-size", rs6000_cost->l1_cache_size); 3277 global_options.x_param_values,
2840 if (!PARAM_SET_P (PARAM_L1_CACHE_LINE_SIZE)) 3278 global_options_set.x_param_values);
2841 set_param_value ("l1-cache-line-size", rs6000_cost->cache_line_size); 3279 maybe_set_param_value (PARAM_L1_CACHE_SIZE, rs6000_cost->l1_cache_size,
2842 if (!PARAM_SET_P (PARAM_L2_CACHE_SIZE)) 3280 global_options.x_param_values,
2843 set_param_value ("l2-cache-size", rs6000_cost->l2_cache_size); 3281 global_options_set.x_param_values);
2844 3282 maybe_set_param_value (PARAM_L1_CACHE_LINE_SIZE,
2845 /* If using typedef char *va_list, signal that __builtin_va_start (&ap, 0) 3283 rs6000_cost->cache_line_size,
2846 can be optimized to ap = __builtin_next_arg (0). */ 3284 global_options.x_param_values,
2847 if (DEFAULT_ABI != ABI_V4) 3285 global_options_set.x_param_values);
2848 targetm.expand_builtin_va_start = NULL; 3286 maybe_set_param_value (PARAM_L2_CACHE_SIZE, rs6000_cost->l2_cache_size,
3287 global_options.x_param_values,
3288 global_options_set.x_param_values);
3289
3290 /* If using typedef char *va_list, signal that
3291 __builtin_va_start (&ap, 0) can be optimized to
3292 ap = __builtin_next_arg (0). */
3293 if (DEFAULT_ABI != ABI_V4)
3294 targetm.expand_builtin_va_start = NULL;
3295 }
2849 3296
2850 /* Set up single/double float flags. 3297 /* Set up single/double float flags.
2851 If TARGET_HARD_FLOAT is set, but neither single or double is set, 3298 If TARGET_HARD_FLOAT is set, but neither single or double is set,
2852 then set both flags. */ 3299 then set both flags. */
2853 if (TARGET_HARD_FLOAT && TARGET_FPRS 3300 if (TARGET_HARD_FLOAT && TARGET_FPRS
2862 rs6000_single_float = 1; 3309 rs6000_single_float = 1;
2863 if (TARGET_E500_DOUBLE) 3310 if (TARGET_E500_DOUBLE)
2864 rs6000_single_float = rs6000_double_float = 1; 3311 rs6000_single_float = rs6000_double_float = 1;
2865 } 3312 }
2866 3313
3314 if (main_target_opt)
3315 {
3316 if (main_target_opt->x_rs6000_single_float != rs6000_single_float)
3317 error ("target attribute or pragma changes single precision floating "
3318 "point");
3319 if (main_target_opt->x_rs6000_double_float != rs6000_double_float)
3320 error ("target attribute or pragma changes double precision floating "
3321 "point");
3322 }
3323
2867 /* If not explicitly specified via option, decide whether to generate indexed 3324 /* If not explicitly specified via option, decide whether to generate indexed
2868 load/store instructions. */ 3325 load/store instructions. */
2869 if (TARGET_AVOID_XFORM == -1) 3326 if (TARGET_AVOID_XFORM == -1)
2870 /* Avoid indexed addressing when targeting Power6 in order to avoid 3327 /* Avoid indexed addressing when targeting Power6 in order to avoid the
2871 the DERAT mispredict penalty. */ 3328 DERAT mispredict penalty. However the LVE and STVE altivec instructions
2872 TARGET_AVOID_XFORM = (rs6000_cpu == PROCESSOR_POWER6 && TARGET_CMPB); 3329 need indexed accesses and the type used is the scalar type of the element
2873 3330 being loaded or stored. */
2874 rs6000_init_hard_regno_mode_ok (); 3331 TARGET_AVOID_XFORM = (rs6000_cpu == PROCESSOR_POWER6 && TARGET_CMPB
2875 } 3332 && !TARGET_ALTIVEC);
2876 3333
3334 /* Set the -mrecip options. */
3335 if (rs6000_recip_name)
3336 {
3337 char *p = ASTRDUP (rs6000_recip_name);
3338 char *q;
3339 unsigned int mask, i;
3340 bool invert;
3341
3342 while ((q = strtok (p, ",")) != NULL)
3343 {
3344 p = NULL;
3345 if (*q == '!')
3346 {
3347 invert = true;
3348 q++;
3349 }
3350 else
3351 invert = false;
3352
3353 if (!strcmp (q, "default"))
3354 mask = ((TARGET_RECIP_PRECISION)
3355 ? RECIP_HIGH_PRECISION : RECIP_LOW_PRECISION);
3356 else
3357 {
3358 for (i = 0; i < ARRAY_SIZE (recip_options); i++)
3359 if (!strcmp (q, recip_options[i].string))
3360 {
3361 mask = recip_options[i].mask;
3362 break;
3363 }
3364
3365 if (i == ARRAY_SIZE (recip_options))
3366 {
3367 error ("unknown option for -mrecip=%s", q);
3368 invert = false;
3369 mask = 0;
3370 ret = false;
3371 }
3372 }
3373
3374 if (invert)
3375 rs6000_recip_control &= ~mask;
3376 else
3377 rs6000_recip_control |= mask;
3378 }
3379 }
3380
3381 rs6000_init_hard_regno_mode_ok (global_init_p);
3382
3383 /* Save the initial options in case the user does function specific options */
3384 if (global_init_p)
3385 target_option_default_node = target_option_current_node
3386 = build_target_option_node ();
3387
3388 return ret;
3389 }
3390
3391 /* Implement TARGET_OPTION_OVERRIDE. On the RS/6000 this is used to
3392 define the target cpu type. */
3393
3394 static void
3395 rs6000_option_override (void)
3396 {
3397 (void) rs6000_option_override_internal (true);
3398 }
3399
3400
2877 /* Implement targetm.vectorize.builtin_mask_for_load. */ 3401 /* Implement targetm.vectorize.builtin_mask_for_load. */
2878 static tree 3402 static tree
2879 rs6000_builtin_mask_for_load (void) 3403 rs6000_builtin_mask_for_load (void)
2880 { 3404 {
2881 if (TARGET_ALTIVEC || TARGET_VSX) 3405 if (TARGET_ALTIVEC || TARGET_VSX)
2882 return altivec_builtin_mask_for_load; 3406 return altivec_builtin_mask_for_load;
2883 else 3407 else
2884 return 0; 3408 return 0;
3409 }
3410
3411 /* Implement LOOP_ALIGN. */
3412 int
3413 rs6000_loop_align (rtx label)
3414 {
3415 basic_block bb;
3416 int ninsns;
3417
3418 /* Don't override loop alignment if -falign-loops was specified. */
3419 if (!can_override_loop_align)
3420 return align_loops_log;
3421
3422 bb = BLOCK_FOR_INSN (label);
3423 ninsns = num_loop_insns(bb->loop_father);
3424
3425 /* Align small loops to 32 bytes to fit in an icache sector, otherwise return default. */
3426 if (ninsns > 4 && ninsns <= 8
3427 && (rs6000_cpu == PROCESSOR_POWER4
3428 || rs6000_cpu == PROCESSOR_POWER5
3429 || rs6000_cpu == PROCESSOR_POWER6
3430 || rs6000_cpu == PROCESSOR_POWER7))
3431 return 5;
3432 else
3433 return align_loops_log;
3434 }
3435
3436 /* Implement TARGET_LOOP_ALIGN_MAX_SKIP. */
3437 static int
3438 rs6000_loop_align_max_skip (rtx label)
3439 {
3440 return (1 << rs6000_loop_align (label)) - 1;
2885 } 3441 }
2886 3442
2887 /* Implement targetm.vectorize.builtin_conversion. 3443 /* Implement targetm.vectorize.builtin_conversion.
2888 Returns a decl of a function that implements conversion of an integer vector 3444 Returns a decl of a function that implements conversion of an integer vector
2889 into a floating-point vector, or vice-versa. DEST_TYPE is the 3445 into a floating-point vector, or vice-versa. DEST_TYPE is the
3033 bool is_packed) 3589 bool is_packed)
3034 { 3590 {
3035 if (TARGET_VSX) 3591 if (TARGET_VSX)
3036 { 3592 {
3037 /* Return if movmisalign pattern is not supported for this mode. */ 3593 /* Return if movmisalign pattern is not supported for this mode. */
3038 if (optab_handler (movmisalign_optab, mode)->insn_code == 3594 if (optab_handler (movmisalign_optab, mode) == CODE_FOR_nothing)
3039 CODE_FOR_nothing)
3040 return false; 3595 return false;
3041 3596
3042 if (misalignment == -1) 3597 if (misalignment == -1)
3043 { 3598 {
3044 /* misalignment factor is unknown at compile time but we know 3599 /* Misalignment factor is unknown at compile time but we know
3045 it's word aligned. */ 3600 it's word aligned. */
3046 if (rs6000_vector_alignment_reachable (type, is_packed)) 3601 if (rs6000_vector_alignment_reachable (type, is_packed))
3047 return true; 3602 {
3603 int element_size = TREE_INT_CST_LOW (TYPE_SIZE (type));
3604
3605 if (element_size == 64 || element_size == 32)
3606 return true;
3607 }
3608
3048 return false; 3609 return false;
3049 } 3610 }
3611
3050 /* VSX supports word-aligned vector. */ 3612 /* VSX supports word-aligned vector. */
3051 if (misalignment % 4 == 0) 3613 if (misalignment % 4 == 0)
3052 return true; 3614 return true;
3053 } 3615 }
3054 return false; 3616 return false;
3108 return NULL_TREE; 3670 return NULL_TREE;
3109 } 3671 }
3110 3672
3111 gcc_assert (d); 3673 gcc_assert (d);
3112 return d; 3674 return d;
3675 }
3676
3677
3678 /* Implement targetm.vectorize.builtin_vectorization_cost. */
3679 static int
3680 rs6000_builtin_vectorization_cost (enum vect_cost_for_stmt type_of_cost,
3681 tree vectype, int misalign)
3682 {
3683 unsigned elements;
3684
3685 switch (type_of_cost)
3686 {
3687 case scalar_stmt:
3688 case scalar_load:
3689 case scalar_store:
3690 case vector_stmt:
3691 case vector_load:
3692 case vector_store:
3693 case vec_to_scalar:
3694 case scalar_to_vec:
3695 case cond_branch_not_taken:
3696 case vec_perm:
3697 return 1;
3698
3699 case cond_branch_taken:
3700 return 3;
3701
3702 case unaligned_load:
3703 if (TARGET_VSX && TARGET_ALLOW_MOVMISALIGN)
3704 {
3705 elements = TYPE_VECTOR_SUBPARTS (vectype);
3706 if (elements == 2)
3707 /* Double word aligned. */
3708 return 2;
3709
3710 if (elements == 4)
3711 {
3712 switch (misalign)
3713 {
3714 case 8:
3715 /* Double word aligned. */
3716 return 2;
3717
3718 case -1:
3719 /* Unknown misalignment. */
3720 case 4:
3721 case 12:
3722 /* Word aligned. */
3723 return 22;
3724
3725 default:
3726 gcc_unreachable ();
3727 }
3728 }
3729 }
3730
3731 if (TARGET_ALTIVEC)
3732 /* Misaligned loads are not supported. */
3733 gcc_unreachable ();
3734
3735 return 2;
3736
3737 case unaligned_store:
3738 if (TARGET_VSX && TARGET_ALLOW_MOVMISALIGN)
3739 {
3740 elements = TYPE_VECTOR_SUBPARTS (vectype);
3741 if (elements == 2)
3742 /* Double word aligned. */
3743 return 2;
3744
3745 if (elements == 4)
3746 {
3747 switch (misalign)
3748 {
3749 case 8:
3750 /* Double word aligned. */
3751 return 2;
3752
3753 case -1:
3754 /* Unknown misalignment. */
3755 case 4:
3756 case 12:
3757 /* Word aligned. */
3758 return 23;
3759
3760 default:
3761 gcc_unreachable ();
3762 }
3763 }
3764 }
3765
3766 if (TARGET_ALTIVEC)
3767 /* Misaligned stores are not supported. */
3768 gcc_unreachable ();
3769
3770 return 2;
3771
3772 default:
3773 gcc_unreachable ();
3774 }
3775 }
3776
3777 /* Implement targetm.vectorize.preferred_simd_mode. */
3778
3779 static enum machine_mode
3780 rs6000_preferred_simd_mode (enum machine_mode mode)
3781 {
3782 if (TARGET_VSX)
3783 switch (mode)
3784 {
3785 case DFmode:
3786 return V2DFmode;
3787 default:;
3788 }
3789 if (TARGET_ALTIVEC || TARGET_VSX)
3790 switch (mode)
3791 {
3792 case SFmode:
3793 return V4SFmode;
3794 case DImode:
3795 return V2DImode;
3796 case SImode:
3797 return V4SImode;
3798 case HImode:
3799 return V8HImode;
3800 case QImode:
3801 return V16QImode;
3802 default:;
3803 }
3804 if (TARGET_SPE)
3805 switch (mode)
3806 {
3807 case SFmode:
3808 return V2SFmode;
3809 case SImode:
3810 return V2SImode;
3811 default:;
3812 }
3813 if (TARGET_PAIRED_FLOAT
3814 && mode == SFmode)
3815 return V2SFmode;
3816 return word_mode;
3113 } 3817 }
3114 3818
3115 /* Handle generic options of the form -mfoo=yes/no. 3819 /* Handle generic options of the form -mfoo=yes/no.
3116 NAME is the option name. 3820 NAME is the option name.
3117 VALUE is the option value. 3821 VALUE is the option value.
3128 *flag = 0; 3832 *flag = 0;
3129 else 3833 else
3130 error ("unknown -m%s= option specified: '%s'", name, value); 3834 error ("unknown -m%s= option specified: '%s'", name, value);
3131 } 3835 }
3132 3836
3133 /* Validate and record the size specified with the -mtls-size option. */ 3837 /* Implement TARGET_OPTION_INIT_STRUCT. */
3134 3838
3135 static void 3839 static void
3136 rs6000_parse_tls_size_option (void) 3840 rs6000_option_init_struct (struct gcc_options *opts)
3137 {
3138 if (rs6000_tls_size_string == 0)
3139 return;
3140 else if (strcmp (rs6000_tls_size_string, "16") == 0)
3141 rs6000_tls_size = 16;
3142 else if (strcmp (rs6000_tls_size_string, "32") == 0)
3143 rs6000_tls_size = 32;
3144 else if (strcmp (rs6000_tls_size_string, "64") == 0)
3145 rs6000_tls_size = 64;
3146 else
3147 error ("bad value %qs for -mtls-size switch", rs6000_tls_size_string);
3148 }
3149
3150 void
3151 optimization_options (int level ATTRIBUTE_UNUSED, int size ATTRIBUTE_UNUSED)
3152 { 3841 {
3153 if (DEFAULT_ABI == ABI_DARWIN) 3842 if (DEFAULT_ABI == ABI_DARWIN)
3154 /* The Darwin libraries never set errno, so we might as well 3843 /* The Darwin libraries never set errno, so we might as well
3155 avoid calling them when that's the only reason we would. */ 3844 avoid calling them when that's the only reason we would. */
3156 flag_errno_math = 0; 3845 opts->x_flag_errno_math = 0;
3157 3846
3847 /* Enable section anchors by default. */
3848 if (!TARGET_MACHO)
3849 opts->x_flag_section_anchors = 1;
3850 }
3851
3852 /* Implement TARGET_OPTION_DEFAULT_PARAMS. */
3853
3854 static void
3855 rs6000_option_default_params (void)
3856 {
3158 /* Double growth factor to counter reduced min jump length. */ 3857 /* Double growth factor to counter reduced min jump length. */
3159 set_param_value ("max-grow-copy-bb-insns", 16); 3858 set_default_param_value (PARAM_MAX_GROW_COPY_BB_INSNS, 16);
3160
3161 /* Enable section anchors by default.
3162 Skip section anchors for Objective C and Objective C++
3163 until front-ends fixed. */
3164 if (!TARGET_MACHO && lang_hooks.name[4] != 'O')
3165 flag_section_anchors = 2;
3166 } 3859 }
3167 3860
3168 static enum fpu_type_t 3861 static enum fpu_type_t
3169 rs6000_parse_fpu_option (const char *option) 3862 rs6000_parse_fpu_option (const char *option)
3170 { 3863 {
3175 if (!strcmp("dp_full", option)) return FPU_DF_FULL; 3868 if (!strcmp("dp_full", option)) return FPU_DF_FULL;
3176 error("unknown value %s for -mfpu", option); 3869 error("unknown value %s for -mfpu", option);
3177 return FPU_NONE; 3870 return FPU_NONE;
3178 } 3871 }
3179 3872
3873
3874 /* Handler for the Mathematical Acceleration Subsystem (mass) interface to a
3875 library with vectorized intrinsics. */
3876
3877 static tree
3878 rs6000_builtin_vectorized_libmass (tree fndecl, tree type_out, tree type_in)
3879 {
3880 char name[32];
3881 const char *suffix = NULL;
3882 tree fntype, new_fndecl, bdecl = NULL_TREE;
3883 int n_args = 1;
3884 const char *bname;
3885 enum machine_mode el_mode, in_mode;
3886 int n, in_n;
3887
3888 /* Libmass is suitable for unsafe math only as it does not correctly support
3889 parts of IEEE with the required precision such as denormals. Only support
3890 it if we have VSX to use the simd d2 or f4 functions.
3891 XXX: Add variable length support. */
3892 if (!flag_unsafe_math_optimizations || !TARGET_VSX)
3893 return NULL_TREE;
3894
3895 el_mode = TYPE_MODE (TREE_TYPE (type_out));
3896 n = TYPE_VECTOR_SUBPARTS (type_out);
3897 in_mode = TYPE_MODE (TREE_TYPE (type_in));
3898 in_n = TYPE_VECTOR_SUBPARTS (type_in);
3899 if (el_mode != in_mode
3900 || n != in_n)
3901 return NULL_TREE;
3902
3903 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3904 {
3905 enum built_in_function fn = DECL_FUNCTION_CODE (fndecl);
3906 switch (fn)
3907 {
3908 case BUILT_IN_ATAN2:
3909 case BUILT_IN_HYPOT:
3910 case BUILT_IN_POW:
3911 n_args = 2;
3912 /* fall through */
3913
3914 case BUILT_IN_ACOS:
3915 case BUILT_IN_ACOSH:
3916 case BUILT_IN_ASIN:
3917 case BUILT_IN_ASINH:
3918 case BUILT_IN_ATAN:
3919 case BUILT_IN_ATANH:
3920 case BUILT_IN_CBRT:
3921 case BUILT_IN_COS:
3922 case BUILT_IN_COSH:
3923 case BUILT_IN_ERF:
3924 case BUILT_IN_ERFC:
3925 case BUILT_IN_EXP2:
3926 case BUILT_IN_EXP:
3927 case BUILT_IN_EXPM1:
3928 case BUILT_IN_LGAMMA:
3929 case BUILT_IN_LOG10:
3930 case BUILT_IN_LOG1P:
3931 case BUILT_IN_LOG2:
3932 case BUILT_IN_LOG:
3933 case BUILT_IN_SIN:
3934 case BUILT_IN_SINH:
3935 case BUILT_IN_SQRT:
3936 case BUILT_IN_TAN:
3937 case BUILT_IN_TANH:
3938 bdecl = implicit_built_in_decls[fn];
3939 suffix = "d2"; /* pow -> powd2 */
3940 if (el_mode != DFmode
3941 || n != 2)
3942 return NULL_TREE;
3943 break;
3944
3945 case BUILT_IN_ATAN2F:
3946 case BUILT_IN_HYPOTF:
3947 case BUILT_IN_POWF:
3948 n_args = 2;
3949 /* fall through */
3950
3951 case BUILT_IN_ACOSF:
3952 case BUILT_IN_ACOSHF:
3953 case BUILT_IN_ASINF:
3954 case BUILT_IN_ASINHF:
3955 case BUILT_IN_ATANF:
3956 case BUILT_IN_ATANHF:
3957 case BUILT_IN_CBRTF:
3958 case BUILT_IN_COSF:
3959 case BUILT_IN_COSHF:
3960 case BUILT_IN_ERFF:
3961 case BUILT_IN_ERFCF:
3962 case BUILT_IN_EXP2F:
3963 case BUILT_IN_EXPF:
3964 case BUILT_IN_EXPM1F:
3965 case BUILT_IN_LGAMMAF:
3966 case BUILT_IN_LOG10F:
3967 case BUILT_IN_LOG1PF:
3968 case BUILT_IN_LOG2F:
3969 case BUILT_IN_LOGF:
3970 case BUILT_IN_SINF:
3971 case BUILT_IN_SINHF:
3972 case BUILT_IN_SQRTF:
3973 case BUILT_IN_TANF:
3974 case BUILT_IN_TANHF:
3975 bdecl = implicit_built_in_decls[fn];
3976 suffix = "4"; /* powf -> powf4 */
3977 if (el_mode != SFmode
3978 || n != 4)
3979 return NULL_TREE;
3980 break;
3981
3982 default:
3983 return NULL_TREE;
3984 }
3985 }
3986 else
3987 return NULL_TREE;
3988
3989 gcc_assert (suffix != NULL);
3990 bname = IDENTIFIER_POINTER (DECL_NAME (bdecl));
3991 strcpy (name, bname + sizeof ("__builtin_") - 1);
3992 strcat (name, suffix);
3993
3994 if (n_args == 1)
3995 fntype = build_function_type_list (type_out, type_in, NULL);
3996 else if (n_args == 2)
3997 fntype = build_function_type_list (type_out, type_in, type_in, NULL);
3998 else
3999 gcc_unreachable ();
4000
4001 /* Build a function declaration for the vectorized function. */
4002 new_fndecl = build_decl (BUILTINS_LOCATION,
4003 FUNCTION_DECL, get_identifier (name), fntype);
4004 TREE_PUBLIC (new_fndecl) = 1;
4005 DECL_EXTERNAL (new_fndecl) = 1;
4006 DECL_IS_NOVOPS (new_fndecl) = 1;
4007 TREE_READONLY (new_fndecl) = 1;
4008
4009 return new_fndecl;
4010 }
4011
3180 /* Returns a function decl for a vectorized version of the builtin function 4012 /* Returns a function decl for a vectorized version of the builtin function
3181 with builtin function code FN and the result vector type TYPE, or NULL_TREE 4013 with builtin function code FN and the result vector type TYPE, or NULL_TREE
3182 if it is not available. */ 4014 if it is not available. */
3183 4015
3184 static tree 4016 static tree
3185 rs6000_builtin_vectorized_function (tree fndecl, tree type_out, 4017 rs6000_builtin_vectorized_function (tree fndecl, tree type_out,
3186 tree type_in) 4018 tree type_in)
3187 { 4019 {
3188 enum machine_mode in_mode, out_mode; 4020 enum machine_mode in_mode, out_mode;
3189 int in_n, out_n; 4021 int in_n, out_n;
3190 enum built_in_function fn = DECL_FUNCTION_CODE (fndecl);
3191 4022
3192 if (TREE_CODE (type_out) != VECTOR_TYPE 4023 if (TREE_CODE (type_out) != VECTOR_TYPE
3193 || TREE_CODE (type_in) != VECTOR_TYPE 4024 || TREE_CODE (type_in) != VECTOR_TYPE
3194 || !TARGET_VECTORIZE_BUILTINS 4025 || !TARGET_VECTORIZE_BUILTINS)
3195 || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL)
3196 return NULL_TREE; 4026 return NULL_TREE;
3197 4027
3198 out_mode = TYPE_MODE (TREE_TYPE (type_out)); 4028 out_mode = TYPE_MODE (TREE_TYPE (type_out));
3199 out_n = TYPE_VECTOR_SUBPARTS (type_out); 4029 out_n = TYPE_VECTOR_SUBPARTS (type_out);
3200 in_mode = TYPE_MODE (TREE_TYPE (type_in)); 4030 in_mode = TYPE_MODE (TREE_TYPE (type_in));
3201 in_n = TYPE_VECTOR_SUBPARTS (type_in); 4031 in_n = TYPE_VECTOR_SUBPARTS (type_in);
3202 4032
3203 switch (fn) 4033 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3204 { 4034 {
3205 case BUILT_IN_COPYSIGN: 4035 enum built_in_function fn = DECL_FUNCTION_CODE (fndecl);
3206 if (VECTOR_UNIT_VSX_P (V2DFmode) 4036 switch (fn)
3207 && out_mode == DFmode && out_n == 2 4037 {
3208 && in_mode == DFmode && in_n == 2) 4038 case BUILT_IN_COPYSIGN:
3209 return rs6000_builtin_decls[VSX_BUILTIN_CPSGNDP]; 4039 if (VECTOR_UNIT_VSX_P (V2DFmode)
3210 break; 4040 && out_mode == DFmode && out_n == 2
3211 case BUILT_IN_COPYSIGNF: 4041 && in_mode == DFmode && in_n == 2)
3212 if (out_mode != SFmode || out_n != 4 4042 return rs6000_builtin_decls[VSX_BUILTIN_CPSGNDP];
3213 || in_mode != SFmode || in_n != 4) 4043 break;
3214 break; 4044 case BUILT_IN_COPYSIGNF:
3215 if (VECTOR_UNIT_VSX_P (V4SFmode)) 4045 if (out_mode != SFmode || out_n != 4
3216 return rs6000_builtin_decls[VSX_BUILTIN_CPSGNSP]; 4046 || in_mode != SFmode || in_n != 4)
3217 if (VECTOR_UNIT_ALTIVEC_P (V4SFmode)) 4047 break;
3218 return rs6000_builtin_decls[ALTIVEC_BUILTIN_COPYSIGN_V4SF]; 4048 if (VECTOR_UNIT_VSX_P (V4SFmode))
3219 break; 4049 return rs6000_builtin_decls[VSX_BUILTIN_CPSGNSP];
3220 case BUILT_IN_SQRT: 4050 if (VECTOR_UNIT_ALTIVEC_P (V4SFmode))
3221 if (VECTOR_UNIT_VSX_P (V2DFmode) 4051 return rs6000_builtin_decls[ALTIVEC_BUILTIN_COPYSIGN_V4SF];
3222 && out_mode == DFmode && out_n == 2 4052 break;
3223 && in_mode == DFmode && in_n == 2) 4053 case BUILT_IN_SQRT:
3224 return rs6000_builtin_decls[VSX_BUILTIN_XVSQRTDP]; 4054 if (VECTOR_UNIT_VSX_P (V2DFmode)
3225 break; 4055 && out_mode == DFmode && out_n == 2
3226 case BUILT_IN_SQRTF: 4056 && in_mode == DFmode && in_n == 2)
3227 if (VECTOR_UNIT_VSX_P (V4SFmode) 4057 return rs6000_builtin_decls[VSX_BUILTIN_XVSQRTDP];
3228 && out_mode == SFmode && out_n == 4 4058 break;
3229 && in_mode == SFmode && in_n == 4) 4059 case BUILT_IN_SQRTF:
3230 return rs6000_builtin_decls[VSX_BUILTIN_XVSQRTSP]; 4060 if (VECTOR_UNIT_VSX_P (V4SFmode)
3231 break; 4061 && out_mode == SFmode && out_n == 4
3232 case BUILT_IN_CEIL: 4062 && in_mode == SFmode && in_n == 4)
3233 if (VECTOR_UNIT_VSX_P (V2DFmode) 4063 return rs6000_builtin_decls[VSX_BUILTIN_XVSQRTSP];
3234 && out_mode == DFmode && out_n == 2 4064 break;
3235 && in_mode == DFmode && in_n == 2) 4065 case BUILT_IN_CEIL:
3236 return rs6000_builtin_decls[VSX_BUILTIN_XVRDPIP]; 4066 if (VECTOR_UNIT_VSX_P (V2DFmode)
3237 break; 4067 && out_mode == DFmode && out_n == 2
3238 case BUILT_IN_CEILF: 4068 && in_mode == DFmode && in_n == 2)
3239 if (out_mode != SFmode || out_n != 4 4069 return rs6000_builtin_decls[VSX_BUILTIN_XVRDPIP];
3240 || in_mode != SFmode || in_n != 4) 4070 break;
3241 break; 4071 case BUILT_IN_CEILF:
3242 if (VECTOR_UNIT_VSX_P (V4SFmode)) 4072 if (out_mode != SFmode || out_n != 4
3243 return rs6000_builtin_decls[VSX_BUILTIN_XVRSPIP]; 4073 || in_mode != SFmode || in_n != 4)
3244 if (VECTOR_UNIT_ALTIVEC_P (V4SFmode)) 4074 break;
3245 return rs6000_builtin_decls[ALTIVEC_BUILTIN_VRFIP]; 4075 if (VECTOR_UNIT_VSX_P (V4SFmode))
3246 break; 4076 return rs6000_builtin_decls[VSX_BUILTIN_XVRSPIP];
3247 case BUILT_IN_FLOOR: 4077 if (VECTOR_UNIT_ALTIVEC_P (V4SFmode))
3248 if (VECTOR_UNIT_VSX_P (V2DFmode) 4078 return rs6000_builtin_decls[ALTIVEC_BUILTIN_VRFIP];
3249 && out_mode == DFmode && out_n == 2 4079 break;
3250 && in_mode == DFmode && in_n == 2) 4080 case BUILT_IN_FLOOR:
3251 return rs6000_builtin_decls[VSX_BUILTIN_XVRDPIM]; 4081 if (VECTOR_UNIT_VSX_P (V2DFmode)
3252 break; 4082 && out_mode == DFmode && out_n == 2
3253 case BUILT_IN_FLOORF: 4083 && in_mode == DFmode && in_n == 2)
3254 if (out_mode != SFmode || out_n != 4 4084 return rs6000_builtin_decls[VSX_BUILTIN_XVRDPIM];
3255 || in_mode != SFmode || in_n != 4) 4085 break;
3256 break; 4086 case BUILT_IN_FLOORF:
3257 if (VECTOR_UNIT_VSX_P (V4SFmode)) 4087 if (out_mode != SFmode || out_n != 4
3258 return rs6000_builtin_decls[VSX_BUILTIN_XVRSPIM]; 4088 || in_mode != SFmode || in_n != 4)
3259 if (VECTOR_UNIT_ALTIVEC_P (V4SFmode)) 4089 break;
3260 return rs6000_builtin_decls[ALTIVEC_BUILTIN_VRFIM]; 4090 if (VECTOR_UNIT_VSX_P (V4SFmode))
3261 break; 4091 return rs6000_builtin_decls[VSX_BUILTIN_XVRSPIM];
3262 case BUILT_IN_TRUNC: 4092 if (VECTOR_UNIT_ALTIVEC_P (V4SFmode))
3263 if (VECTOR_UNIT_VSX_P (V2DFmode) 4093 return rs6000_builtin_decls[ALTIVEC_BUILTIN_VRFIM];
3264 && out_mode == DFmode && out_n == 2 4094 break;
3265 && in_mode == DFmode && in_n == 2) 4095 case BUILT_IN_FMA:
3266 return rs6000_builtin_decls[VSX_BUILTIN_XVRDPIZ]; 4096 if (VECTOR_UNIT_VSX_P (V2DFmode)
3267 break; 4097 && out_mode == DFmode && out_n == 2
3268 case BUILT_IN_TRUNCF: 4098 && in_mode == DFmode && in_n == 2)
3269 if (out_mode != SFmode || out_n != 4 4099 return rs6000_builtin_decls[VSX_BUILTIN_XVMADDDP];
3270 || in_mode != SFmode || in_n != 4) 4100 break;
3271 break; 4101 case BUILT_IN_FMAF:
3272 if (VECTOR_UNIT_VSX_P (V4SFmode)) 4102 if (VECTOR_UNIT_VSX_P (V4SFmode)
3273 return rs6000_builtin_decls[VSX_BUILTIN_XVRSPIZ]; 4103 && out_mode == SFmode && out_n == 4
3274 if (VECTOR_UNIT_ALTIVEC_P (V4SFmode)) 4104 && in_mode == SFmode && in_n == 4)
3275 return rs6000_builtin_decls[ALTIVEC_BUILTIN_VRFIZ]; 4105 return rs6000_builtin_decls[VSX_BUILTIN_XVMADDSP];
3276 break; 4106 else if (VECTOR_UNIT_ALTIVEC_P (V4SFmode)
3277 case BUILT_IN_NEARBYINT: 4107 && out_mode == SFmode && out_n == 4
3278 if (VECTOR_UNIT_VSX_P (V2DFmode) 4108 && in_mode == SFmode && in_n == 4)
3279 && flag_unsafe_math_optimizations 4109 return rs6000_builtin_decls[ALTIVEC_BUILTIN_VMADDFP];
3280 && out_mode == DFmode && out_n == 2 4110 break;
3281 && in_mode == DFmode && in_n == 2) 4111 case BUILT_IN_TRUNC:
3282 return rs6000_builtin_decls[VSX_BUILTIN_XVRDPI]; 4112 if (VECTOR_UNIT_VSX_P (V2DFmode)
3283 break; 4113 && out_mode == DFmode && out_n == 2
3284 case BUILT_IN_NEARBYINTF: 4114 && in_mode == DFmode && in_n == 2)
3285 if (VECTOR_UNIT_VSX_P (V4SFmode) 4115 return rs6000_builtin_decls[VSX_BUILTIN_XVRDPIZ];
3286 && flag_unsafe_math_optimizations 4116 break;
3287 && out_mode == SFmode && out_n == 4 4117 case BUILT_IN_TRUNCF:
3288 && in_mode == SFmode && in_n == 4) 4118 if (out_mode != SFmode || out_n != 4
3289 return rs6000_builtin_decls[VSX_BUILTIN_XVRSPI]; 4119 || in_mode != SFmode || in_n != 4)
3290 break; 4120 break;
3291 case BUILT_IN_RINT: 4121 if (VECTOR_UNIT_VSX_P (V4SFmode))
3292 if (VECTOR_UNIT_VSX_P (V2DFmode) 4122 return rs6000_builtin_decls[VSX_BUILTIN_XVRSPIZ];
3293 && !flag_trapping_math 4123 if (VECTOR_UNIT_ALTIVEC_P (V4SFmode))
3294 && out_mode == DFmode && out_n == 2 4124 return rs6000_builtin_decls[ALTIVEC_BUILTIN_VRFIZ];
3295 && in_mode == DFmode && in_n == 2) 4125 break;
3296 return rs6000_builtin_decls[VSX_BUILTIN_XVRDPIC]; 4126 case BUILT_IN_NEARBYINT:
3297 break; 4127 if (VECTOR_UNIT_VSX_P (V2DFmode)
3298 case BUILT_IN_RINTF: 4128 && flag_unsafe_math_optimizations
3299 if (VECTOR_UNIT_VSX_P (V4SFmode) 4129 && out_mode == DFmode && out_n == 2
3300 && !flag_trapping_math 4130 && in_mode == DFmode && in_n == 2)
3301 && out_mode == SFmode && out_n == 4 4131 return rs6000_builtin_decls[VSX_BUILTIN_XVRDPI];
3302 && in_mode == SFmode && in_n == 4) 4132 break;
3303 return rs6000_builtin_decls[VSX_BUILTIN_XVRSPIC]; 4133 case BUILT_IN_NEARBYINTF:
3304 break; 4134 if (VECTOR_UNIT_VSX_P (V4SFmode)
3305 default: 4135 && flag_unsafe_math_optimizations
3306 break; 4136 && out_mode == SFmode && out_n == 4
3307 } 4137 && in_mode == SFmode && in_n == 4)
4138 return rs6000_builtin_decls[VSX_BUILTIN_XVRSPI];
4139 break;
4140 case BUILT_IN_RINT:
4141 if (VECTOR_UNIT_VSX_P (V2DFmode)
4142 && !flag_trapping_math
4143 && out_mode == DFmode && out_n == 2
4144 && in_mode == DFmode && in_n == 2)
4145 return rs6000_builtin_decls[VSX_BUILTIN_XVRDPIC];
4146 break;
4147 case BUILT_IN_RINTF:
4148 if (VECTOR_UNIT_VSX_P (V4SFmode)
4149 && !flag_trapping_math
4150 && out_mode == SFmode && out_n == 4
4151 && in_mode == SFmode && in_n == 4)
4152 return rs6000_builtin_decls[VSX_BUILTIN_XVRSPIC];
4153 break;
4154 default:
4155 break;
4156 }
4157 }
4158
4159 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
4160 {
4161 enum rs6000_builtins fn
4162 = (enum rs6000_builtins)DECL_FUNCTION_CODE (fndecl);
4163 switch (fn)
4164 {
4165 case RS6000_BUILTIN_RSQRTF:
4166 if (VECTOR_UNIT_ALTIVEC_OR_VSX_P (V4SFmode)
4167 && out_mode == SFmode && out_n == 4
4168 && in_mode == SFmode && in_n == 4)
4169 return rs6000_builtin_decls[ALTIVEC_BUILTIN_VRSQRTFP];
4170 break;
4171 case RS6000_BUILTIN_RSQRT:
4172 if (VECTOR_UNIT_VSX_P (V2DFmode)
4173 && out_mode == DFmode && out_n == 2
4174 && in_mode == DFmode && in_n == 2)
4175 return rs6000_builtin_decls[VSX_BUILTIN_VEC_RSQRT_V2DF];
4176 break;
4177 case RS6000_BUILTIN_RECIPF:
4178 if (VECTOR_UNIT_ALTIVEC_OR_VSX_P (V4SFmode)
4179 && out_mode == SFmode && out_n == 4
4180 && in_mode == SFmode && in_n == 4)
4181 return rs6000_builtin_decls[ALTIVEC_BUILTIN_VRECIPFP];
4182 break;
4183 case RS6000_BUILTIN_RECIP:
4184 if (VECTOR_UNIT_VSX_P (V2DFmode)
4185 && out_mode == DFmode && out_n == 2
4186 && in_mode == DFmode && in_n == 2)
4187 return rs6000_builtin_decls[VSX_BUILTIN_RECIP_V2DF];
4188 break;
4189 default:
4190 break;
4191 }
4192 }
4193
4194 /* Generate calls to libmass if appropriate. */
4195 if (rs6000_veclib_handler)
4196 return rs6000_veclib_handler (fndecl, type_out, type_in);
4197
3308 return NULL_TREE; 4198 return NULL_TREE;
3309 } 4199 }
3310 4200
3311 4201
3312 /* Implement TARGET_HANDLE_OPTION. */ 4202 /* Implement TARGET_HANDLE_OPTION. */
3314 static bool 4204 static bool
3315 rs6000_handle_option (size_t code, const char *arg, int value) 4205 rs6000_handle_option (size_t code, const char *arg, int value)
3316 { 4206 {
3317 enum fpu_type_t fpu_type = FPU_NONE; 4207 enum fpu_type_t fpu_type = FPU_NONE;
3318 int isel; 4208 int isel;
4209 char *p, *q;
3319 4210
3320 switch (code) 4211 switch (code)
3321 { 4212 {
3322 case OPT_mno_power: 4213 case OPT_mno_power:
3323 target_flags &= ~(MASK_POWER | MASK_POWER2 4214 target_flags &= ~(MASK_POWER | MASK_POWER2
3350 target_flags |= MASK_MINIMAL_TOC; 4241 target_flags |= MASK_MINIMAL_TOC;
3351 target_flags_explicit |= MASK_MINIMAL_TOC; 4242 target_flags_explicit |= MASK_MINIMAL_TOC;
3352 break; 4243 break;
3353 #endif 4244 #endif
3354 4245
4246 #if defined (HAVE_LD_LARGE_TOC) && defined (TARGET_USES_LINUX64_OPT)
4247 case OPT_mcmodel_:
4248 if (strcmp (arg, "small") == 0)
4249 rs6000_current_cmodel = CMODEL_SMALL;
4250 else if (strcmp (arg, "medium") == 0)
4251 rs6000_current_cmodel = CMODEL_MEDIUM;
4252 else if (strcmp (arg, "large") == 0)
4253 rs6000_current_cmodel = CMODEL_LARGE;
4254 else
4255 {
4256 error ("invalid option for -mcmodel: '%s'", arg);
4257 return false;
4258 }
4259 rs6000_explicit_options.cmodel = true;
4260 #endif
4261
3355 #ifdef TARGET_USES_AIX64_OPT 4262 #ifdef TARGET_USES_AIX64_OPT
3356 case OPT_maix64: 4263 case OPT_maix64:
3357 #else 4264 #else
3358 case OPT_m64: 4265 case OPT_m64:
3359 #endif 4266 #endif
3442 rs6000_explicit_options.spe = true; 4349 rs6000_explicit_options.spe = true;
3443 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe)); 4350 rs6000_parse_yes_no_option ("spe", arg, &(rs6000_spe));
3444 break; 4351 break;
3445 4352
3446 case OPT_mdebug_: 4353 case OPT_mdebug_:
3447 rs6000_debug_name = arg; 4354 p = ASTRDUP (arg);
4355 rs6000_debug = 0;
4356
4357 while ((q = strtok (p, ",")) != NULL)
4358 {
4359 unsigned mask = 0;
4360 bool invert;
4361
4362 p = NULL;
4363 if (*q == '!')
4364 {
4365 invert = true;
4366 q++;
4367 }
4368 else
4369 invert = false;
4370
4371 if (! strcmp (q, "all"))
4372 mask = MASK_DEBUG_ALL;
4373 else if (! strcmp (q, "stack"))
4374 mask = MASK_DEBUG_STACK;
4375 else if (! strcmp (q, "arg"))
4376 mask = MASK_DEBUG_ARG;
4377 else if (! strcmp (q, "reg"))
4378 mask = MASK_DEBUG_REG;
4379 else if (! strcmp (q, "addr"))
4380 mask = MASK_DEBUG_ADDR;
4381 else if (! strcmp (q, "cost"))
4382 mask = MASK_DEBUG_COST;
4383 else if (! strcmp (q, "target"))
4384 mask = MASK_DEBUG_TARGET;
4385 else
4386 error ("unknown -mdebug-%s switch", q);
4387
4388 if (invert)
4389 rs6000_debug &= ~mask;
4390 else
4391 rs6000_debug |= mask;
4392 }
3448 break; 4393 break;
3449 4394
3450 #ifdef TARGET_USES_SYSV4_OPT 4395 #ifdef TARGET_USES_SYSV4_OPT
3451 case OPT_mcall_: 4396 case OPT_mcall_:
3452 rs6000_abi_name = arg; 4397 rs6000_abi_name = arg;
3455 case OPT_msdata_: 4400 case OPT_msdata_:
3456 rs6000_sdata_name = arg; 4401 rs6000_sdata_name = arg;
3457 break; 4402 break;
3458 4403
3459 case OPT_mtls_size_: 4404 case OPT_mtls_size_:
3460 rs6000_tls_size_string = arg; 4405 if (strcmp (arg, "16") == 0)
4406 rs6000_tls_size = 16;
4407 else if (strcmp (arg, "32") == 0)
4408 rs6000_tls_size = 32;
4409 else if (strcmp (arg, "64") == 0)
4410 rs6000_tls_size = 64;
4411 else
4412 error ("bad value %qs for -mtls-size switch", arg);
3461 break; 4413 break;
3462 4414
3463 case OPT_mrelocatable: 4415 case OPT_mrelocatable:
3464 if (value == 1) 4416 if (value == 1)
3465 { 4417 {
3515 /* These are here for testing during development only, do not 4467 /* These are here for testing during development only, do not
3516 document in the manual please. */ 4468 document in the manual please. */
3517 else if (! strcmp (arg, "d64")) 4469 else if (! strcmp (arg, "d64"))
3518 { 4470 {
3519 rs6000_darwin64_abi = 1; 4471 rs6000_darwin64_abi = 1;
3520 warning (0, "Using darwin64 ABI"); 4472 warning (0, "using darwin64 ABI");
3521 } 4473 }
3522 else if (! strcmp (arg, "d32")) 4474 else if (! strcmp (arg, "d32"))
3523 { 4475 {
3524 rs6000_darwin64_abi = 0; 4476 rs6000_darwin64_abi = 0;
3525 warning (0, "Using old darwin ABI"); 4477 warning (0, "using old darwin ABI");
3526 } 4478 }
3527 4479
3528 else if (! strcmp (arg, "ibmlongdouble")) 4480 else if (! strcmp (arg, "ibmlongdouble"))
3529 { 4481 {
3530 rs6000_explicit_options.ieee = true; 4482 rs6000_explicit_options.ieee = true;
3531 rs6000_ieeequad = 0; 4483 rs6000_ieeequad = 0;
3532 warning (0, "Using IBM extended precision long double"); 4484 warning (0, "using IBM extended precision long double");
3533 } 4485 }
3534 else if (! strcmp (arg, "ieeelongdouble")) 4486 else if (! strcmp (arg, "ieeelongdouble"))
3535 { 4487 {
3536 rs6000_explicit_options.ieee = true; 4488 rs6000_explicit_options.ieee = true;
3537 rs6000_ieeequad = 1; 4489 rs6000_ieeequad = 1;
3538 warning (0, "Using IEEE extended precision long double"); 4490 warning (0, "using IEEE extended precision long double");
3539 } 4491 }
3540 4492
3541 else 4493 else
3542 { 4494 {
3543 error ("unknown ABI specified: '%s'", arg); 4495 error ("unknown ABI specified: '%s'", arg);
3545 } 4497 }
3546 break; 4498 break;
3547 4499
3548 case OPT_mcpu_: 4500 case OPT_mcpu_:
3549 rs6000_select[1].string = arg; 4501 rs6000_select[1].string = arg;
4502 rs6000_cpu_index = rs6000_cpu_name_lookup (arg);
4503 if (rs6000_cpu_index < 0)
4504 error ("bad value (%s) for -mcpu", arg);
3550 break; 4505 break;
3551 4506
3552 case OPT_mtune_: 4507 case OPT_mtune_:
3553 rs6000_select[2].string = arg; 4508 rs6000_select[2].string = arg;
4509 rs6000_tune_index = rs6000_cpu_name_lookup (arg);
4510 if (rs6000_tune_index < 0)
4511 error ("bad value (%s) for -mtune", arg);
3554 break; 4512 break;
3555 4513
3556 case OPT_mtraceback_: 4514 case OPT_mtraceback_:
3557 rs6000_traceback_name = arg; 4515 if (! strncmp (arg, "full", 4))
4516 rs6000_traceback = traceback_full;
4517 else if (! strncmp (arg, "part", 4))
4518 rs6000_traceback = traceback_part;
4519 else if (! strncmp (arg, "no", 2))
4520 rs6000_traceback = traceback_none;
4521 else
4522 error ("unknown -mtraceback arg %qs; expecting %<full%>, "
4523 "%<partial%> or %<none%>", arg);
3558 break; 4524 break;
3559 4525
3560 case OPT_mfloat_gprs_: 4526 case OPT_mfloat_gprs_:
3561 rs6000_explicit_options.float_gprs = true; 4527 rs6000_explicit_options.float_gprs = true;
3562 if (! strcmp (arg, "yes") || ! strcmp (arg, "single")) 4528 if (! strcmp (arg, "yes") || ! strcmp (arg, "single"))
3575 case OPT_mlong_double_: 4541 case OPT_mlong_double_:
3576 rs6000_explicit_options.long_double = true; 4542 rs6000_explicit_options.long_double = true;
3577 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE; 4543 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
3578 if (value != 64 && value != 128) 4544 if (value != 64 && value != 128)
3579 { 4545 {
3580 error ("Unknown switch -mlong-double-%s", arg); 4546 error ("unknown switch -mlong-double-%s", arg);
3581 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE; 4547 rs6000_long_double_type_size = RS6000_DEFAULT_LONG_DOUBLE_SIZE;
3582 return false; 4548 return false;
3583 } 4549 }
3584 else 4550 else
3585 rs6000_long_double_type_size = value; 4551 rs6000_long_double_type_size = value;
3662 /* -mfpu=none is equivalent to -msoft-float */ 4628 /* -mfpu=none is equivalent to -msoft-float */
3663 target_flags |= MASK_SOFT_FLOAT; 4629 target_flags |= MASK_SOFT_FLOAT;
3664 target_flags_explicit |= MASK_SOFT_FLOAT; 4630 target_flags_explicit |= MASK_SOFT_FLOAT;
3665 rs6000_single_float = rs6000_double_float = 0; 4631 rs6000_single_float = rs6000_double_float = 0;
3666 } 4632 }
4633
4634 case OPT_mrecip:
4635 rs6000_recip_name = (value) ? "default" : "none";
4636 break;
4637
4638 case OPT_mrecip_:
4639 rs6000_recip_name = arg;
3667 break; 4640 break;
3668 } 4641 }
3669 return true; 4642 return true;
3670 } 4643 }
3671 4644
3718 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break; 4691 case SDATA_EABI: fprintf (file, "%s -msdata=eabi", start); start = ""; break;
3719 } 4692 }
3720 4693
3721 if (rs6000_sdata && g_switch_value) 4694 if (rs6000_sdata && g_switch_value)
3722 { 4695 {
3723 fprintf (file, "%s -G " HOST_WIDE_INT_PRINT_UNSIGNED, start, 4696 fprintf (file, "%s -G %d", start,
3724 g_switch_value); 4697 g_switch_value);
3725 start = ""; 4698 start = "";
3726 } 4699 }
3727 #endif 4700 #endif
3728 4701
3729 if (*start == '\0') 4702 if (*start == '\0')
3730 putc ('\n', file); 4703 putc ('\n', file);
3731 } 4704 }
3732
3733 #ifdef HAVE_AS_GNU_ATTRIBUTE
3734 if (TARGET_32BIT && DEFAULT_ABI == ABI_V4)
3735 {
3736 fprintf (file, "\t.gnu_attribute 4, %d\n",
3737 ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT) ? 1
3738 : (TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_SINGLE_FLOAT) ? 3
3739 : 2));
3740 fprintf (file, "\t.gnu_attribute 8, %d\n",
3741 (TARGET_ALTIVEC_ABI ? 2
3742 : TARGET_SPE_ABI ? 3
3743 : 1));
3744 fprintf (file, "\t.gnu_attribute 12, %d\n",
3745 aix_struct_return ? 2 : 1);
3746
3747 }
3748 #endif
3749 4705
3750 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2)) 4706 if (DEFAULT_ABI == ABI_AIX || (TARGET_ELF && flag_pic == 2))
3751 { 4707 {
3752 switch_to_section (toc_section); 4708 switch_to_section (toc_section);
3753 switch_to_section (text_section); 4709 switch_to_section (text_section);
3897 the corresponding "float" is interpreted as an SImode integer. */ 4853 the corresponding "float" is interpreted as an SImode integer. */
3898 4854
3899 HOST_WIDE_INT 4855 HOST_WIDE_INT
3900 const_vector_elt_as_int (rtx op, unsigned int elt) 4856 const_vector_elt_as_int (rtx op, unsigned int elt)
3901 { 4857 {
3902 rtx tmp = CONST_VECTOR_ELT (op, elt); 4858 rtx tmp;
4859
4860 /* We can't handle V2DImode and V2DFmode vector constants here yet. */
4861 gcc_assert (GET_MODE (op) != V2DImode
4862 && GET_MODE (op) != V2DFmode);
4863
4864 tmp = CONST_VECTOR_ELT (op, elt);
3903 if (GET_MODE (op) == V4SFmode 4865 if (GET_MODE (op) == V4SFmode
3904 || GET_MODE (op) == V2SFmode) 4866 || GET_MODE (op) == V2SFmode)
3905 tmp = gen_lowpart (SImode, tmp); 4867 tmp = gen_lowpart (SImode, tmp);
3906 return INTVAL (tmp); 4868 return INTVAL (tmp);
3907 } 4869 }
3918 { 4880 {
3919 enum machine_mode mode = GET_MODE (op); 4881 enum machine_mode mode = GET_MODE (op);
3920 enum machine_mode inner = GET_MODE_INNER (mode); 4882 enum machine_mode inner = GET_MODE_INNER (mode);
3921 4883
3922 unsigned i; 4884 unsigned i;
3923 unsigned nunits = GET_MODE_NUNITS (mode); 4885 unsigned nunits;
3924 unsigned bitsize = GET_MODE_BITSIZE (inner); 4886 unsigned bitsize;
3925 unsigned mask = GET_MODE_MASK (inner); 4887 unsigned mask;
3926 4888
3927 HOST_WIDE_INT val = const_vector_elt_as_int (op, nunits - 1); 4889 HOST_WIDE_INT val;
3928 HOST_WIDE_INT splat_val = val; 4890 HOST_WIDE_INT splat_val;
3929 HOST_WIDE_INT msb_val = val > 0 ? 0 : -1; 4891 HOST_WIDE_INT msb_val;
4892
4893 if (mode == V2DImode || mode == V2DFmode)
4894 return false;
4895
4896 nunits = GET_MODE_NUNITS (mode);
4897 bitsize = GET_MODE_BITSIZE (inner);
4898 mask = GET_MODE_MASK (inner);
4899
4900 val = const_vector_elt_as_int (op, nunits - 1);
4901 splat_val = val;
4902 msb_val = val > 0 ? 0 : -1;
3930 4903
3931 /* Construct the value to be splatted, if possible. If not, return 0. */ 4904 /* Construct the value to be splatted, if possible. If not, return 0. */
3932 for (i = 2; i <= copies; i *= 2) 4905 for (i = 2; i <= copies; i *= 2)
3933 { 4906 {
3934 HOST_WIDE_INT small_val; 4907 HOST_WIDE_INT small_val;
3988 if (mode == VOIDmode) 4961 if (mode == VOIDmode)
3989 mode = GET_MODE (op); 4962 mode = GET_MODE (op);
3990 else if (mode != GET_MODE (op)) 4963 else if (mode != GET_MODE (op))
3991 return false; 4964 return false;
3992 4965
4966 /* V2DI/V2DF was added with VSX. Only allow 0 and all 1's as easy
4967 constants. */
4968 if (mode == V2DFmode)
4969 return zero_constant (op, mode);
4970
4971 if (mode == V2DImode)
4972 {
4973 /* In case the compiler is built 32-bit, CONST_DOUBLE constants are not
4974 easy. */
4975 if (GET_CODE (CONST_VECTOR_ELT (op, 0)) != CONST_INT
4976 || GET_CODE (CONST_VECTOR_ELT (op, 1)) != CONST_INT)
4977 return false;
4978
4979 if (zero_constant (op, mode))
4980 return true;
4981
4982 if (INTVAL (CONST_VECTOR_ELT (op, 0)) == -1
4983 && INTVAL (CONST_VECTOR_ELT (op, 1)) == -1)
4984 return true;
4985
4986 return false;
4987 }
4988
3993 /* Start with a vspltisw. */ 4989 /* Start with a vspltisw. */
3994 step = GET_MODE_NUNITS (mode) / 4; 4990 step = GET_MODE_NUNITS (mode) / 4;
3995 copies = 1; 4991 copies = 1;
3996 4992
3997 if (vspltis_constant (op, step, copies)) 4993 if (vspltis_constant (op, step, copies))
4064 5060
4065 dest = operands[0]; 5061 dest = operands[0];
4066 vec = operands[1]; 5062 vec = operands[1];
4067 mode = GET_MODE (dest); 5063 mode = GET_MODE (dest);
4068 5064
4069 if (TARGET_VSX && zero_constant (vec, mode)) 5065 if (TARGET_VSX)
4070 return "xxlxor %x0,%x0,%x0"; 5066 {
5067 if (zero_constant (vec, mode))
5068 return "xxlxor %x0,%x0,%x0";
5069
5070 if (mode == V2DImode
5071 && INTVAL (CONST_VECTOR_ELT (vec, 0)) == -1
5072 && INTVAL (CONST_VECTOR_ELT (vec, 1)) == -1)
5073 return "vspltisw %0,-1";
5074 }
4071 5075
4072 if (TARGET_ALTIVEC) 5076 if (TARGET_ALTIVEC)
4073 { 5077 {
4074 rtx splat_vec; 5078 rtx splat_vec;
4075 if (zero_constant (vec, mode)) 5079 if (zero_constant (vec, mode))
4325 else 5329 else
4326 emit_insn (gen_vsx_splat_v2di (target, element)); 5330 emit_insn (gen_vsx_splat_v2di (target, element));
4327 } 5331 }
4328 else 5332 else
4329 { 5333 {
4330 rtx op0 = copy_to_reg (XVECEXP (vals, 0, 0));
4331 rtx op1 = copy_to_reg (XVECEXP (vals, 0, 1));
4332 if (mode == V2DFmode) 5334 if (mode == V2DFmode)
4333 emit_insn (gen_vsx_concat_v2df (target, op0, op1)); 5335 {
5336 rtx op0 = copy_to_mode_reg (DFmode, XVECEXP (vals, 0, 0));
5337 rtx op1 = copy_to_mode_reg (DFmode, XVECEXP (vals, 0, 1));
5338 emit_insn (gen_vsx_concat_v2df (target, op0, op1));
5339 }
4334 else 5340 else
4335 emit_insn (gen_vsx_concat_v2di (target, op0, op1)); 5341 {
5342 rtx op0 = copy_to_mode_reg (DImode, XVECEXP (vals, 0, 0));
5343 rtx op1 = copy_to_mode_reg (DImode, XVECEXP (vals, 0, 1));
5344 emit_insn (gen_vsx_concat_v2di (target, op0, op1));
5345 }
4336 } 5346 }
4337 return; 5347 return;
4338 } 5348 }
4339 5349
4340 /* With single precision floating point on VSX, know that internally single 5350 /* With single precision floating point on VSX, know that internally single
4472 void 5482 void
4473 rs6000_expand_vector_extract (rtx target, rtx vec, int elt) 5483 rs6000_expand_vector_extract (rtx target, rtx vec, int elt)
4474 { 5484 {
4475 enum machine_mode mode = GET_MODE (vec); 5485 enum machine_mode mode = GET_MODE (vec);
4476 enum machine_mode inner_mode = GET_MODE_INNER (mode); 5486 enum machine_mode inner_mode = GET_MODE_INNER (mode);
4477 rtx mem, x; 5487 rtx mem;
4478 5488
4479 if (VECTOR_MEM_VSX_P (mode) && (mode == V2DFmode || mode == V2DImode)) 5489 if (VECTOR_MEM_VSX_P (mode) && (mode == V2DFmode || mode == V2DImode))
4480 { 5490 {
4481 rtx (*extract_func) (rtx, rtx, rtx) 5491 rtx (*extract_func) (rtx, rtx, rtx)
4482 = ((mode == V2DFmode) ? gen_vsx_extract_v2df : gen_vsx_extract_v2di); 5492 = ((mode == V2DFmode) ? gen_vsx_extract_v2df : gen_vsx_extract_v2di);
4485 } 5495 }
4486 5496
4487 /* Allocate mode-sized buffer. */ 5497 /* Allocate mode-sized buffer. */
4488 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0); 5498 mem = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
4489 5499
5500 emit_move_insn (mem, vec);
5501
4490 /* Add offset to field within buffer matching vector element. */ 5502 /* Add offset to field within buffer matching vector element. */
4491 mem = adjust_address_nv (mem, mode, elt * GET_MODE_SIZE (inner_mode)); 5503 mem = adjust_address_nv (mem, inner_mode, elt * GET_MODE_SIZE (inner_mode));
4492 5504
4493 /* Store single field into mode-sized buffer. */
4494 x = gen_rtx_UNSPEC (VOIDmode,
4495 gen_rtvec (1, const0_rtx), UNSPEC_STVE);
4496 emit_insn (gen_rtx_PARALLEL (VOIDmode,
4497 gen_rtvec (2,
4498 gen_rtx_SET (VOIDmode,
4499 mem, vec),
4500 x)));
4501 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0)); 5505 emit_move_insn (target, adjust_address_nv (mem, inner_mode, 0));
4502 } 5506 }
4503 5507
4504 /* Generates shifts and masks for a pair of rldicl or rldicr insns to 5508 /* Generates shifts and masks for a pair of rldicl or rldicr insns to
4505 implement ANDing by the mask IN. */ 5509 implement ANDing by the mask IN. */
4621 unsigned int align = MAX (computed, specified); 5625 unsigned int align = MAX (computed, specified);
4622 tree field = TYPE_FIELDS (type); 5626 tree field = TYPE_FIELDS (type);
4623 5627
4624 /* Skip all non field decls */ 5628 /* Skip all non field decls */
4625 while (field != NULL && TREE_CODE (field) != FIELD_DECL) 5629 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
4626 field = TREE_CHAIN (field); 5630 field = DECL_CHAIN (field);
4627 5631
4628 if (field != NULL && field != type) 5632 if (field != NULL && field != type)
4629 { 5633 {
4630 type = TREE_TYPE (field); 5634 type = TREE_TYPE (field);
4631 while (TREE_CODE (type) == ARRAY_TYPE) 5635 while (TREE_CODE (type) == ARRAY_TYPE)
4653 /* Find the first field, looking down into aggregates. */ 5657 /* Find the first field, looking down into aggregates. */
4654 do { 5658 do {
4655 tree field = TYPE_FIELDS (type); 5659 tree field = TYPE_FIELDS (type);
4656 /* Skip all non field decls */ 5660 /* Skip all non field decls */
4657 while (field != NULL && TREE_CODE (field) != FIELD_DECL) 5661 while (field != NULL && TREE_CODE (field) != FIELD_DECL)
4658 field = TREE_CHAIN (field); 5662 field = DECL_CHAIN (field);
4659 if (! field) 5663 if (! field)
4660 break; 5664 break;
4661 /* A packed field does not contribute any extra alignment. */ 5665 /* A packed field does not contribute any extra alignment. */
4662 if (DECL_PACKED (field)) 5666 if (DECL_PACKED (field))
4663 return align; 5667 return align;
4709 HOST_WIDE_INT summand; 5713 HOST_WIDE_INT summand;
4710 5714
4711 /* We have to be careful here, because it is the referenced address 5715 /* We have to be careful here, because it is the referenced address
4712 that must be 32k from _SDA_BASE_, not just the symbol. */ 5716 that must be 32k from _SDA_BASE_, not just the symbol. */
4713 summand = INTVAL (XEXP (sum, 1)); 5717 summand = INTVAL (XEXP (sum, 1));
4714 if (summand < 0 || (unsigned HOST_WIDE_INT) summand > g_switch_value) 5718 if (summand < 0 || summand > g_switch_value)
4715 return 0; 5719 return 0;
4716 5720
4717 sym_ref = XEXP (sum, 0); 5721 sym_ref = XEXP (sum, 0);
4718 } 5722 }
4719 5723
4782 5786
4783 else 5787 else
4784 return false; 5788 return false;
4785 5789
4786 return (regnum >= FIRST_VIRTUAL_REGISTER 5790 return (regnum >= FIRST_VIRTUAL_REGISTER
4787 && regnum <= LAST_VIRTUAL_REGISTER); 5791 && regnum <= LAST_VIRTUAL_POINTER_REGISTER);
5792 }
5793
5794 /* Return true if memory accesses to OP are known to never straddle
5795 a 32k boundary. */
5796
5797 static bool
5798 offsettable_ok_by_alignment (rtx op, HOST_WIDE_INT offset,
5799 enum machine_mode mode)
5800 {
5801 tree decl, type;
5802 unsigned HOST_WIDE_INT dsize, dalign;
5803
5804 if (GET_CODE (op) != SYMBOL_REF)
5805 return false;
5806
5807 decl = SYMBOL_REF_DECL (op);
5808 if (!decl)
5809 {
5810 if (GET_MODE_SIZE (mode) == 0)
5811 return false;
5812
5813 /* -fsection-anchors loses the original SYMBOL_REF_DECL when
5814 replacing memory addresses with an anchor plus offset. We
5815 could find the decl by rummaging around in the block->objects
5816 VEC for the given offset but that seems like too much work. */
5817 dalign = 1;
5818 if (SYMBOL_REF_HAS_BLOCK_INFO_P (op)
5819 && SYMBOL_REF_ANCHOR_P (op)
5820 && SYMBOL_REF_BLOCK (op) != NULL)
5821 {
5822 struct object_block *block = SYMBOL_REF_BLOCK (op);
5823 HOST_WIDE_INT lsb, mask;
5824
5825 /* Given the alignment of the block.. */
5826 dalign = block->alignment;
5827 mask = dalign / BITS_PER_UNIT - 1;
5828
5829 /* ..and the combined offset of the anchor and any offset
5830 to this block object.. */
5831 offset += SYMBOL_REF_BLOCK_OFFSET (op);
5832 lsb = offset & -offset;
5833
5834 /* ..find how many bits of the alignment we know for the
5835 object. */
5836 mask &= lsb - 1;
5837 dalign = mask + 1;
5838 }
5839 return dalign >= GET_MODE_SIZE (mode);
5840 }
5841
5842 if (DECL_P (decl))
5843 {
5844 if (TREE_CODE (decl) == FUNCTION_DECL)
5845 return true;
5846
5847 if (!DECL_SIZE_UNIT (decl))
5848 return false;
5849
5850 if (!host_integerp (DECL_SIZE_UNIT (decl), 1))
5851 return false;
5852
5853 dsize = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
5854 if (dsize > 32768)
5855 return false;
5856
5857 dalign = DECL_ALIGN_UNIT (decl);
5858 return dalign >= dsize;
5859 }
5860
5861 type = TREE_TYPE (decl);
5862
5863 if (TREE_CODE (decl) == STRING_CST)
5864 dsize = TREE_STRING_LENGTH (decl);
5865 else if (TYPE_SIZE_UNIT (type)
5866 && host_integerp (TYPE_SIZE_UNIT (type), 1))
5867 dsize = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5868 else
5869 return false;
5870 if (dsize > 32768)
5871 return false;
5872
5873 dalign = TYPE_ALIGN (type);
5874 if (CONSTANT_CLASS_P (decl))
5875 dalign = CONSTANT_ALIGNMENT (decl, dalign);
5876 else
5877 dalign = DATA_ALIGNMENT (decl, dalign);
5878 dalign /= BITS_PER_UNIT;
5879 return dalign >= dsize;
4788 } 5880 }
4789 5881
4790 static bool 5882 static bool
4791 constant_pool_expr_p (rtx op) 5883 constant_pool_expr_p (rtx op)
4792 { 5884 {
4796 return (GET_CODE (base) == SYMBOL_REF 5888 return (GET_CODE (base) == SYMBOL_REF
4797 && CONSTANT_POOL_ADDRESS_P (base) 5889 && CONSTANT_POOL_ADDRESS_P (base)
4798 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (base), Pmode)); 5890 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (base), Pmode));
4799 } 5891 }
4800 5892
5893 static rtx tocrel_base, tocrel_offset;
5894
4801 bool 5895 bool
4802 toc_relative_expr_p (rtx op) 5896 toc_relative_expr_p (rtx op)
4803 { 5897 {
4804 rtx base, offset;
4805
4806 if (GET_CODE (op) != CONST) 5898 if (GET_CODE (op) != CONST)
4807 return false; 5899 return false;
4808 5900
4809 split_const (op, &base, &offset); 5901 split_const (op, &tocrel_base, &tocrel_offset);
4810 return (GET_CODE (base) == UNSPEC 5902 return (GET_CODE (tocrel_base) == UNSPEC
4811 && XINT (base, 1) == UNSPEC_TOCREL); 5903 && XINT (tocrel_base, 1) == UNSPEC_TOCREL);
4812 } 5904 }
5905
5906 /* Return true if X is a constant pool address, and also for cmodel=medium
5907 if X is a toc-relative address known to be offsettable within MODE. */
4813 5908
4814 bool 5909 bool
4815 legitimate_constant_pool_address_p (rtx x) 5910 legitimate_constant_pool_address_p (const_rtx x, enum machine_mode mode,
5911 bool strict)
4816 { 5912 {
4817 return (TARGET_TOC 5913 return (TARGET_TOC
4818 && GET_CODE (x) == PLUS 5914 && (GET_CODE (x) == PLUS || GET_CODE (x) == LO_SUM)
4819 && GET_CODE (XEXP (x, 0)) == REG 5915 && GET_CODE (XEXP (x, 0)) == REG
4820 && (TARGET_MINIMAL_TOC || REGNO (XEXP (x, 0)) == TOC_REGISTER) 5916 && (REGNO (XEXP (x, 0)) == TOC_REGISTER
4821 && toc_relative_expr_p (XEXP (x, 1))); 5917 || ((TARGET_MINIMAL_TOC
5918 || TARGET_CMODEL != CMODEL_SMALL)
5919 && INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict)))
5920 && toc_relative_expr_p (XEXP (x, 1))
5921 && (TARGET_CMODEL != CMODEL_MEDIUM
5922 || constant_pool_expr_p (XVECEXP (tocrel_base, 0, 0))
5923 || mode == QImode
5924 || offsettable_ok_by_alignment (XVECEXP (tocrel_base, 0, 0),
5925 INTVAL (tocrel_offset), mode)));
4822 } 5926 }
4823 5927
4824 static bool 5928 static bool
4825 legitimate_small_data_p (enum machine_mode mode, rtx x) 5929 legitimate_small_data_p (enum machine_mode mode, rtx x)
4826 { 5930 {
4844 return false; 5948 return false;
4845 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict)) 5949 if (!INT_REG_OK_FOR_BASE_P (XEXP (x, 0), strict))
4846 return false; 5950 return false;
4847 if (!reg_offset_addressing_ok_p (mode)) 5951 if (!reg_offset_addressing_ok_p (mode))
4848 return virtual_stack_registers_memory_p (x); 5952 return virtual_stack_registers_memory_p (x);
4849 if (legitimate_constant_pool_address_p (x)) 5953 if (legitimate_constant_pool_address_p (x, mode, strict))
4850 return true; 5954 return true;
4851 if (GET_CODE (XEXP (x, 1)) != CONST_INT) 5955 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4852 return false; 5956 return false;
4853 5957
4854 offset = INTVAL (XEXP (x, 1)); 5958 offset = INTVAL (XEXP (x, 1));
5192 else if (TARGET_TOC 6296 else if (TARGET_TOC
5193 && GET_CODE (x) == SYMBOL_REF 6297 && GET_CODE (x) == SYMBOL_REF
5194 && constant_pool_expr_p (x) 6298 && constant_pool_expr_p (x)
5195 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode)) 6299 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), Pmode))
5196 { 6300 {
5197 return create_TOC_reference (x); 6301 rtx reg = TARGET_CMODEL != CMODEL_SMALL ? gen_reg_rtx (Pmode) : NULL_RTX;
6302 return create_TOC_reference (x, reg);
5198 } 6303 }
5199 else 6304 else
5200 return x; 6305 return x;
5201 } 6306 }
5202 6307
5283 orig_x = delegitimize_mem_from_attrs (orig_x); 6388 orig_x = delegitimize_mem_from_attrs (orig_x);
5284 x = orig_x; 6389 x = orig_x;
5285 if (MEM_P (x)) 6390 if (MEM_P (x))
5286 x = XEXP (x, 0); 6391 x = XEXP (x, 0);
5287 6392
5288 if (GET_CODE (x) == PLUS 6393 if (GET_CODE (x) == (TARGET_CMODEL != CMODEL_SMALL ? LO_SUM : PLUS)
5289 && GET_CODE (XEXP (x, 1)) == CONST 6394 && GET_CODE (XEXP (x, 1)) == CONST)
5290 && GET_CODE (XEXP (x, 0)) == REG
5291 && REGNO (XEXP (x, 0)) == TOC_REGISTER)
5292 { 6395 {
5293 y = XEXP (XEXP (x, 1), 0); 6396 y = XEXP (XEXP (x, 1), 0);
5294 if (GET_CODE (y) == UNSPEC 6397 if (GET_CODE (y) == UNSPEC
5295 && XINT (y, 1) == UNSPEC_TOCREL) 6398 && XINT (y, 1) == UNSPEC_TOCREL
6399 && ((GET_CODE (XEXP (x, 0)) == REG
6400 && (REGNO (XEXP (x, 0)) == TOC_REGISTER
6401 || TARGET_MINIMAL_TOC
6402 || TARGET_CMODEL != CMODEL_SMALL))
6403 || (TARGET_CMODEL != CMODEL_SMALL
6404 && GET_CODE (XEXP (x, 0)) == PLUS
6405 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
6406 && REGNO (XEXP (XEXP (x, 0), 0)) == TOC_REGISTER
6407 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
6408 && rtx_equal_p (XEXP (x, 1),
6409 XEXP (XEXP (XEXP (x, 0), 1), 0)))))
5296 { 6410 {
5297 y = XVECEXP (y, 0, 0); 6411 y = XVECEXP (y, 0, 0);
5298 if (!MEM_P (orig_x)) 6412 if (!MEM_P (orig_x))
5299 return y; 6413 return y;
5300 else 6414 else
5301 return replace_equiv_address_nv (orig_x, y); 6415 return replace_equiv_address_nv (orig_x, y);
5302 } 6416 }
5303 return orig_x;
5304 } 6417 }
5305 6418
5306 if (TARGET_MACHO 6419 if (TARGET_MACHO
5307 && GET_CODE (orig_x) == LO_SUM 6420 && GET_CODE (orig_x) == LO_SUM
5308 && GET_CODE (XEXP (x, 1)) == CONST) 6421 && GET_CODE (XEXP (x, 1)) == CONST)
5391 insn = gen_tls_tprel_lo_32 (dest, tmp, addr); 6504 insn = gen_tls_tprel_lo_32 (dest, tmp, addr);
5392 emit_insn (insn); 6505 emit_insn (insn);
5393 } 6506 }
5394 else 6507 else
5395 { 6508 {
5396 rtx r3, got, tga, tmp1, tmp2, eqv; 6509 rtx r3, got, tga, tmp1, tmp2, call_insn;
5397 6510
5398 /* We currently use relocations like @got@tlsgd for tls, which 6511 /* We currently use relocations like @got@tlsgd for tls, which
5399 means the linker will handle allocation of tls entries, placing 6512 means the linker will handle allocation of tls entries, placing
5400 them in the .got section. So use a pointer to the .got section, 6513 them in the .got section. So use a pointer to the .got section,
5401 not one to secondary TOC sections used by 64-bit -mminimal-toc, 6514 not one to secondary TOC sections used by 64-bit -mminimal-toc,
5412 got = gen_reg_rtx (Pmode); 6525 got = gen_reg_rtx (Pmode);
5413 if (flag_pic == 0) 6526 if (flag_pic == 0)
5414 rs6000_emit_move (got, gsym, Pmode); 6527 rs6000_emit_move (got, gsym, Pmode);
5415 else 6528 else
5416 { 6529 {
5417 rtx tmp3, mem; 6530 rtx mem, lab, last;
5418 rtx last;
5419 6531
5420 tmp1 = gen_reg_rtx (Pmode); 6532 tmp1 = gen_reg_rtx (Pmode);
5421 tmp2 = gen_reg_rtx (Pmode); 6533 tmp2 = gen_reg_rtx (Pmode);
5422 tmp3 = gen_reg_rtx (Pmode);
5423 mem = gen_const_mem (Pmode, tmp1); 6534 mem = gen_const_mem (Pmode, tmp1);
5424 6535 lab = gen_label_rtx ();
5425 emit_insn (gen_load_toc_v4_PIC_1b (gsym)); 6536 emit_insn (gen_load_toc_v4_PIC_1b (gsym, lab));
5426 emit_move_insn (tmp1, 6537 emit_move_insn (tmp1, gen_rtx_REG (Pmode, LR_REGNO));
5427 gen_rtx_REG (Pmode, LR_REGNO));
5428 emit_move_insn (tmp2, mem); 6538 emit_move_insn (tmp2, mem);
5429 emit_insn (gen_addsi3 (tmp3, tmp1, tmp2)); 6539 last = emit_insn (gen_addsi3 (got, tmp1, tmp2));
5430 last = emit_move_insn (got, tmp3);
5431 set_unique_reg_note (last, REG_EQUAL, gsym); 6540 set_unique_reg_note (last, REG_EQUAL, gsym);
5432 } 6541 }
5433 } 6542 }
5434 } 6543 }
5435 6544
5436 if (model == TLS_MODEL_GLOBAL_DYNAMIC) 6545 if (model == TLS_MODEL_GLOBAL_DYNAMIC)
5437 { 6546 {
5438 r3 = gen_rtx_REG (Pmode, 3); 6547 r3 = gen_rtx_REG (Pmode, 3);
5439 tga = rs6000_tls_get_addr (); 6548 tga = rs6000_tls_get_addr ();
6549 emit_library_call_value (tga, dest, LCT_CONST, Pmode, 1, r3, Pmode);
5440 6550
5441 if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT) 6551 if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
5442 insn = gen_tls_gd_aix64 (r3, got, addr, tga, const0_rtx); 6552 insn = gen_tls_gd_aix64 (r3, got, addr, tga, const0_rtx);
5443 else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT) 6553 else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT)
5444 insn = gen_tls_gd_aix32 (r3, got, addr, tga, const0_rtx); 6554 insn = gen_tls_gd_aix32 (r3, got, addr, tga, const0_rtx);
5445 else if (DEFAULT_ABI == ABI_V4) 6555 else if (DEFAULT_ABI == ABI_V4)
5446 insn = gen_tls_gd_sysvsi (r3, got, addr, tga, const0_rtx); 6556 insn = gen_tls_gd_sysvsi (r3, got, addr, tga, const0_rtx);
5447 else 6557 else
5448 gcc_unreachable (); 6558 gcc_unreachable ();
5449 6559 call_insn = last_call_insn ();
5450 start_sequence (); 6560 PATTERN (call_insn) = insn;
5451 insn = emit_call_insn (insn);
5452 RTL_CONST_CALL_P (insn) = 1;
5453 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
5454 if (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic) 6561 if (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
5455 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx); 6562 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn),
5456 insn = get_insns (); 6563 pic_offset_table_rtx);
5457 end_sequence ();
5458 emit_libcall_block (insn, dest, r3, addr);
5459 } 6564 }
5460 else if (model == TLS_MODEL_LOCAL_DYNAMIC) 6565 else if (model == TLS_MODEL_LOCAL_DYNAMIC)
5461 { 6566 {
5462 r3 = gen_rtx_REG (Pmode, 3); 6567 r3 = gen_rtx_REG (Pmode, 3);
5463 tga = rs6000_tls_get_addr (); 6568 tga = rs6000_tls_get_addr ();
6569 tmp1 = gen_reg_rtx (Pmode);
6570 emit_library_call_value (tga, tmp1, LCT_CONST, Pmode, 1, r3, Pmode);
5464 6571
5465 if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT) 6572 if (DEFAULT_ABI == ABI_AIX && TARGET_64BIT)
5466 insn = gen_tls_ld_aix64 (r3, got, tga, const0_rtx); 6573 insn = gen_tls_ld_aix64 (r3, got, tga, const0_rtx);
5467 else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT) 6574 else if (DEFAULT_ABI == ABI_AIX && !TARGET_64BIT)
5468 insn = gen_tls_ld_aix32 (r3, got, tga, const0_rtx); 6575 insn = gen_tls_ld_aix32 (r3, got, tga, const0_rtx);
5469 else if (DEFAULT_ABI == ABI_V4) 6576 else if (DEFAULT_ABI == ABI_V4)
5470 insn = gen_tls_ld_sysvsi (r3, got, tga, const0_rtx); 6577 insn = gen_tls_ld_sysvsi (r3, got, tga, const0_rtx);
5471 else 6578 else
5472 gcc_unreachable (); 6579 gcc_unreachable ();
5473 6580 call_insn = last_call_insn ();
5474 start_sequence (); 6581 PATTERN (call_insn) = insn;
5475 insn = emit_call_insn (insn);
5476 RTL_CONST_CALL_P (insn) = 1;
5477 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r3);
5478 if (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic) 6582 if (DEFAULT_ABI == ABI_V4 && TARGET_SECURE_PLT && flag_pic)
5479 use_reg (&CALL_INSN_FUNCTION_USAGE (insn), pic_offset_table_rtx); 6583 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn),
5480 insn = get_insns (); 6584 pic_offset_table_rtx);
5481 end_sequence (); 6585
5482 tmp1 = gen_reg_rtx (Pmode);
5483 eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
5484 UNSPEC_TLSLD);
5485 emit_libcall_block (insn, tmp1, r3, eqv);
5486 if (rs6000_tls_size == 16) 6586 if (rs6000_tls_size == 16)
5487 { 6587 {
5488 if (TARGET_64BIT) 6588 if (TARGET_64BIT)
5489 insn = gen_tls_dtprel_64 (dest, tmp1, addr); 6589 insn = gen_tls_dtprel_64 (dest, tmp1, addr);
5490 else 6590 else
5588 opnum, (enum reload_type)type); 6688 opnum, (enum reload_type)type);
5589 *win = 1; 6689 *win = 1;
5590 return x; 6690 return x;
5591 } 6691 }
5592 6692
6693 /* Likewise for (lo_sum (high ...) ...) output we have generated. */
6694 if (GET_CODE (x) == LO_SUM
6695 && GET_CODE (XEXP (x, 0)) == HIGH)
6696 {
6697 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
6698 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
6699 opnum, (enum reload_type)type);
6700 *win = 1;
6701 return x;
6702 }
6703
5593 #if TARGET_MACHO 6704 #if TARGET_MACHO
5594 if (DEFAULT_ABI == ABI_DARWIN && flag_pic 6705 if (DEFAULT_ABI == ABI_DARWIN && flag_pic
5595 && GET_CODE (x) == LO_SUM 6706 && GET_CODE (x) == LO_SUM
5596 && GET_CODE (XEXP (x, 0)) == PLUS 6707 && GET_CODE (XEXP (x, 0)) == PLUS
5597 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx 6708 && XEXP (XEXP (x, 0), 0) == pic_offset_table_rtx
5606 opnum, (enum reload_type)type); 6717 opnum, (enum reload_type)type);
5607 *win = 1; 6718 *win = 1;
5608 return x; 6719 return x;
5609 } 6720 }
5610 #endif 6721 #endif
6722
6723 if (TARGET_CMODEL != CMODEL_SMALL
6724 && GET_CODE (x) == LO_SUM
6725 && GET_CODE (XEXP (x, 0)) == PLUS
6726 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
6727 && REGNO (XEXP (XEXP (x, 0), 0)) == TOC_REGISTER
6728 && GET_CODE (XEXP (XEXP (x, 0), 1)) == HIGH
6729 && GET_CODE (XEXP (x, 1)) == CONST
6730 && GET_CODE (XEXP (XEXP (x, 1), 0)) == UNSPEC
6731 && XINT (XEXP (XEXP (x, 1), 0), 1) == UNSPEC_TOCREL
6732 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 1), 0), XEXP (x, 1)))
6733 {
6734 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
6735 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
6736 opnum, (enum reload_type) type);
6737 *win = 1;
6738 return x;
6739 }
5611 6740
5612 /* Force ld/std non-word aligned offset into base register by wrapping 6741 /* Force ld/std non-word aligned offset into base register by wrapping
5613 in offset 0. */ 6742 in offset 0. */
5614 if (GET_CODE (x) == PLUS 6743 if (GET_CODE (x) == PLUS
5615 && GET_CODE (XEXP (x, 0)) == REG 6744 && GET_CODE (XEXP (x, 0)) == REG
5732 && reg_offset_p 6861 && reg_offset_p
5733 && GET_CODE (x) == SYMBOL_REF 6862 && GET_CODE (x) == SYMBOL_REF
5734 && constant_pool_expr_p (x) 6863 && constant_pool_expr_p (x)
5735 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode)) 6864 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (x), mode))
5736 { 6865 {
5737 x = create_TOC_reference (x); 6866 x = create_TOC_reference (x, NULL_RTX);
6867 if (TARGET_CMODEL != CMODEL_SMALL)
6868 push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,
6869 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
6870 opnum, (enum reload_type) type);
5738 *win = 1; 6871 *win = 1;
5739 return x; 6872 return x;
5740 } 6873 }
5741 *win = 0; 6874 *win = 0;
5742 return x; 6875 return x;
5815 return 1; 6948 return 1;
5816 if (virtual_stack_registers_memory_p (x)) 6949 if (virtual_stack_registers_memory_p (x))
5817 return 1; 6950 return 1;
5818 if (reg_offset_p && legitimate_small_data_p (mode, x)) 6951 if (reg_offset_p && legitimate_small_data_p (mode, x))
5819 return 1; 6952 return 1;
5820 if (reg_offset_p && legitimate_constant_pool_address_p (x)) 6953 if (reg_offset_p
6954 && legitimate_constant_pool_address_p (x, mode, reg_ok_strict))
5821 return 1; 6955 return 1;
5822 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */ 6956 /* If not REG_OK_STRICT (before reload) let pass any stack offset. */
5823 if (! reg_ok_strict 6957 if (! reg_ok_strict
5824 && reg_offset_p 6958 && reg_offset_p
5825 && GET_CODE (x) == PLUS 6959 && GET_CODE (x) == PLUS
5882 debug_rtx (x); 7016 debug_rtx (x);
5883 7017
5884 return ret; 7018 return ret;
5885 } 7019 }
5886 7020
7021 /* Implement TARGET_MODE_DEPENDENT_ADDRESS_P. */
7022
7023 static bool
7024 rs6000_mode_dependent_address_p (const_rtx addr)
7025 {
7026 return rs6000_mode_dependent_address_ptr (addr);
7027 }
7028
5887 /* Go to LABEL if ADDR (a legitimate address expression) 7029 /* Go to LABEL if ADDR (a legitimate address expression)
5888 has an effect that depends on the machine mode it is used for. 7030 has an effect that depends on the machine mode it is used for.
5889 7031
5890 On the RS/6000 this is true of all integral offsets (since AltiVec 7032 On the RS/6000 this is true of all integral offsets (since AltiVec
5891 and VSX modes don't allow them) or is a pre-increment or decrement. 7033 and VSX modes don't allow them) or is a pre-increment or decrement.
5915 return val + 12 + 0x8000 >= 0x10000; 7057 return val + 12 + 0x8000 >= 0x10000;
5916 } 7058 }
5917 break; 7059 break;
5918 7060
5919 case LO_SUM: 7061 case LO_SUM:
5920 return true; 7062 /* Anything in the constant pool is sufficiently aligned that
7063 all bytes have the same high part address. */
7064 return !legitimate_constant_pool_address_p (addr, QImode, false);
5921 7065
5922 /* Auto-increment cases are now treated generically in recog.c. */ 7066 /* Auto-increment cases are now treated generically in recog.c. */
5923 case PRE_MODIFY: 7067 case PRE_MODIFY:
5924 return TARGET_UPDATE; 7068 return TARGET_UPDATE;
5925 7069
5994 the correct logic for the PLUS case of rs6000_mode_dependent_address. */ 7138 the correct logic for the PLUS case of rs6000_mode_dependent_address. */
5995 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1); 7139 return rs6000_legitimate_offset_address_p (GET_MODE (op), XEXP (op, 0), 1);
5996 } 7140 }
5997 7141
5998 /* Change register usage conditional on target flags. */ 7142 /* Change register usage conditional on target flags. */
5999 void 7143 static void
6000 rs6000_conditional_register_usage (void) 7144 rs6000_conditional_register_usage (void)
6001 { 7145 {
6002 int i; 7146 int i;
7147
7148 if (TARGET_DEBUG_TARGET)
7149 fprintf (stderr, "rs6000_conditional_register_usage called\n");
6003 7150
6004 /* Set MQ register fixed (already call_used) if not POWER 7151 /* Set MQ register fixed (already call_used) if not POWER
6005 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not 7152 architecture (RIOS1, RIOS2, RSC, and PPC601) so that it will not
6006 be allocated. */ 7153 be allocated. */
6007 if (! TARGET_POWER) 7154 if (! TARGET_POWER)
6271 in cases where it won't work (TImode, TFmode, TDmode). */ 7418 in cases where it won't work (TImode, TFmode, TDmode). */
6272 7419
6273 static void 7420 static void
6274 rs6000_eliminate_indexed_memrefs (rtx operands[2]) 7421 rs6000_eliminate_indexed_memrefs (rtx operands[2])
6275 { 7422 {
7423 if (reload_in_progress)
7424 return;
7425
6276 if (GET_CODE (operands[0]) == MEM 7426 if (GET_CODE (operands[0]) == MEM
6277 && GET_CODE (XEXP (operands[0], 0)) != REG 7427 && GET_CODE (XEXP (operands[0], 0)) != REG
6278 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0)) 7428 && ! legitimate_constant_pool_address_p (XEXP (operands[0], 0),
6279 && ! reload_in_progress) 7429 GET_MODE (operands[0]), false))
6280 operands[0] 7430 operands[0]
6281 = replace_equiv_address (operands[0], 7431 = replace_equiv_address (operands[0],
6282 copy_addr_to_reg (XEXP (operands[0], 0))); 7432 copy_addr_to_reg (XEXP (operands[0], 0)));
6283 7433
6284 if (GET_CODE (operands[1]) == MEM 7434 if (GET_CODE (operands[1]) == MEM
6285 && GET_CODE (XEXP (operands[1], 0)) != REG 7435 && GET_CODE (XEXP (operands[1], 0)) != REG
6286 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0)) 7436 && ! legitimate_constant_pool_address_p (XEXP (operands[1], 0),
6287 && ! reload_in_progress) 7437 GET_MODE (operands[1]), false))
6288 operands[1] 7438 operands[1]
6289 = replace_equiv_address (operands[1], 7439 = replace_equiv_address (operands[1],
6290 copy_addr_to_reg (XEXP (operands[1], 0))); 7440 copy_addr_to_reg (XEXP (operands[1], 0)));
6291 } 7441 }
6292 7442
6601 } 7751 }
6602 7752
6603 /* If this is a SYMBOL_REF that refers to a constant pool entry, 7753 /* If this is a SYMBOL_REF that refers to a constant pool entry,
6604 and we have put it in the TOC, we just need to make a TOC-relative 7754 and we have put it in the TOC, we just need to make a TOC-relative
6605 reference to it. */ 7755 reference to it. */
6606 if (TARGET_TOC 7756 if ((TARGET_TOC
6607 && GET_CODE (operands[1]) == SYMBOL_REF 7757 && GET_CODE (operands[1]) == SYMBOL_REF
6608 && constant_pool_expr_p (operands[1]) 7758 && constant_pool_expr_p (operands[1])
6609 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]), 7759 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (get_pool_constant (operands[1]),
6610 get_pool_mode (operands[1]))) 7760 get_pool_mode (operands[1])))
6611 { 7761 || (TARGET_CMODEL == CMODEL_MEDIUM
6612 operands[1] = create_TOC_reference (operands[1]); 7762 && GET_CODE (operands[1]) == SYMBOL_REF
7763 && !CONSTANT_POOL_ADDRESS_P (operands[1])
7764 && SYMBOL_REF_LOCAL_P (operands[1])))
7765 {
7766 rtx reg = NULL_RTX;
7767 if (TARGET_CMODEL != CMODEL_SMALL)
7768 {
7769 if (can_create_pseudo_p ())
7770 reg = gen_reg_rtx (Pmode);
7771 else
7772 reg = operands[0];
7773 }
7774 operands[1] = create_TOC_reference (operands[1], reg);
6613 } 7775 }
6614 else if (mode == Pmode 7776 else if (mode == Pmode
6615 && CONSTANT_P (operands[1]) 7777 && CONSTANT_P (operands[1])
6616 && ((GET_CODE (operands[1]) != CONST_INT 7778 && ((GET_CODE (operands[1]) != CONST_INT
6617 && ! easy_fp_constant (operands[1], mode)) 7779 && ! easy_fp_constant (operands[1], mode))
6618 || (GET_CODE (operands[1]) == CONST_INT 7780 || (GET_CODE (operands[1]) == CONST_INT
6619 && num_insns_constant (operands[1], mode) > 2) 7781 && (num_insns_constant (operands[1], mode)
7782 > (TARGET_CMODEL != CMODEL_SMALL ? 3 : 2)))
6620 || (GET_CODE (operands[0]) == REG 7783 || (GET_CODE (operands[0]) == REG
6621 && FP_REGNO_P (REGNO (operands[0])))) 7784 && FP_REGNO_P (REGNO (operands[0]))))
6622 && GET_CODE (operands[1]) != HIGH 7785 && GET_CODE (operands[1]) != HIGH
6623 && ! legitimate_constant_pool_address_p (operands[1]) 7786 && ! legitimate_constant_pool_address_p (operands[1], mode,
6624 && ! toc_relative_expr_p (operands[1])) 7787 false)
7788 && ! toc_relative_expr_p (operands[1])
7789 && (TARGET_CMODEL == CMODEL_SMALL
7790 || can_create_pseudo_p ()
7791 || (REG_P (operands[0])
7792 && INT_REG_OK_FOR_BASE_P (operands[0], true))))
6625 { 7793 {
6626 7794
6627 #if TARGET_MACHO 7795 #if TARGET_MACHO
6628 /* Darwin uses a special PIC legitimizer. */ 7796 /* Darwin uses a special PIC legitimizer. */
6629 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT) 7797 if (DEFAULT_ABI == ABI_DARWIN && MACHOPIC_INDIRECT)
6665 && constant_pool_expr_p (XEXP (operands[1], 0)) 7833 && constant_pool_expr_p (XEXP (operands[1], 0))
6666 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P ( 7834 && ASM_OUTPUT_SPECIAL_POOL_ENTRY_P (
6667 get_pool_constant (XEXP (operands[1], 0)), 7835 get_pool_constant (XEXP (operands[1], 0)),
6668 get_pool_mode (XEXP (operands[1], 0)))) 7836 get_pool_mode (XEXP (operands[1], 0))))
6669 { 7837 {
6670 operands[1] 7838 rtx tocref;
6671 = gen_const_mem (mode, 7839 rtx reg = NULL_RTX;
6672 create_TOC_reference (XEXP (operands[1], 0))); 7840 if (TARGET_CMODEL != CMODEL_SMALL)
7841 {
7842 if (can_create_pseudo_p ())
7843 reg = gen_reg_rtx (Pmode);
7844 else
7845 reg = operands[0];
7846 }
7847 tocref = create_TOC_reference (XEXP (operands[1], 0), reg);
7848 operands[1] = gen_const_mem (mode, tocref);
6673 set_mem_alias_set (operands[1], get_TOC_alias_set ()); 7849 set_mem_alias_set (operands[1], get_TOC_alias_set ());
6674 } 7850 }
6675 } 7851 }
6676 break; 7852 break;
6677 7853
6721 memory, just as large structures are always returned. TYPE will be 7897 memory, just as large structures are always returned. TYPE will be
6722 the data type of the value, and FNTYPE will be the type of the 7898 the data type of the value, and FNTYPE will be the type of the
6723 function doing the returning, or @code{NULL} for libcalls. 7899 function doing the returning, or @code{NULL} for libcalls.
6724 7900
6725 The AIX ABI for the RS/6000 specifies that all structures are 7901 The AIX ABI for the RS/6000 specifies that all structures are
6726 returned in memory. The Darwin ABI does the same. The SVR4 ABI 7902 returned in memory. The Darwin ABI does the same.
6727 specifies that structures <= 8 bytes are returned in r3/r4, but a 7903
6728 draft put them in memory, and GCC used to implement the draft 7904 For the Darwin 64 Bit ABI, a function result can be returned in
7905 registers or in memory, depending on the size of the return data
7906 type. If it is returned in registers, the value occupies the same
7907 registers as it would if it were the first and only function
7908 argument. Otherwise, the function places its result in memory at
7909 the location pointed to by GPR3.
7910
7911 The SVR4 ABI specifies that structures <= 8 bytes are returned in r3/r4,
7912 but a draft put them in memory, and GCC used to implement the draft
6729 instead of the final standard. Therefore, aix_struct_return 7913 instead of the final standard. Therefore, aix_struct_return
6730 controls this instead of DEFAULT_ABI; V.4 targets needing backward 7914 controls this instead of DEFAULT_ABI; V.4 targets needing backward
6731 compatibility can change DRAFT_V4_STRUCT_RET to override the 7915 compatibility can change DRAFT_V4_STRUCT_RET to override the
6732 default, and -m switches get the final word. See 7916 default, and -m switches get the final word. See
6733 rs6000_override_options for more details. 7917 rs6000_option_override_internal for more details.
6734 7918
6735 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit 7919 The PPC32 SVR4 ABI uses IEEE double extended for long double, if 128-bit
6736 long double support is enabled. These values are returned in memory. 7920 long double support is enabled. These values are returned in memory.
6737 7921
6738 int_size_in_bytes returns -1 for variable size objects, which go in 7922 int_size_in_bytes returns -1 for variable size objects, which go in
6739 memory always. The cast to unsigned makes -1 > 8. */ 7923 memory always. The cast to unsigned makes -1 > 8. */
6740 7924
6741 static bool 7925 static bool
6742 rs6000_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED) 7926 rs6000_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
6743 { 7927 {
6744 /* In the darwin64 abi, try to use registers for larger structs 7928 /* For the Darwin64 ABI, test if we can fit the return value in regs. */
6745 if possible. */ 7929 if (TARGET_MACHO
6746 if (rs6000_darwin64_abi 7930 && rs6000_darwin64_abi
6747 && TREE_CODE (type) == RECORD_TYPE 7931 && TREE_CODE (type) == RECORD_TYPE
6748 && int_size_in_bytes (type) > 0) 7932 && int_size_in_bytes (type) > 0)
6749 { 7933 {
6750 CUMULATIVE_ARGS valcum; 7934 CUMULATIVE_ARGS valcum;
6751 rtx valret; 7935 rtx valret;
6753 valcum.words = 0; 7937 valcum.words = 0;
6754 valcum.fregno = FP_ARG_MIN_REG; 7938 valcum.fregno = FP_ARG_MIN_REG;
6755 valcum.vregno = ALTIVEC_ARG_MIN_REG; 7939 valcum.vregno = ALTIVEC_ARG_MIN_REG;
6756 /* Do a trial code generation as if this were going to be passed 7940 /* Do a trial code generation as if this were going to be passed
6757 as an argument; if any part goes in memory, we return NULL. */ 7941 as an argument; if any part goes in memory, we return NULL. */
6758 valret = rs6000_darwin64_record_arg (&valcum, type, 1, true); 7942 valret = rs6000_darwin64_record_arg (&valcum, type, true, true);
6759 if (valret) 7943 if (valret)
6760 return false; 7944 return false;
6761 /* Otherwise fall through to more conventional ABI rules. */ 7945 /* Otherwise fall through to more conventional ABI rules. */
6762 } 7946 }
6763 7947
6790 return true; 7974 return true;
6791 7975
6792 return false; 7976 return false;
6793 } 7977 }
6794 7978
7979 #ifdef HAVE_AS_GNU_ATTRIBUTE
7980 /* Return TRUE if a call to function FNDECL may be one that
7981 potentially affects the function calling ABI of the object file. */
7982
7983 static bool
7984 call_ABI_of_interest (tree fndecl)
7985 {
7986 if (cgraph_state == CGRAPH_STATE_EXPANSION)
7987 {
7988 struct cgraph_node *c_node;
7989
7990 /* Libcalls are always interesting. */
7991 if (fndecl == NULL_TREE)
7992 return true;
7993
7994 /* Any call to an external function is interesting. */
7995 if (DECL_EXTERNAL (fndecl))
7996 return true;
7997
7998 /* Interesting functions that we are emitting in this object file. */
7999 c_node = cgraph_node (fndecl);
8000 return !cgraph_only_called_directly_p (c_node);
8001 }
8002 return false;
8003 }
8004 #endif
8005
6795 /* Initialize a variable CUM of type CUMULATIVE_ARGS 8006 /* Initialize a variable CUM of type CUMULATIVE_ARGS
6796 for a call to a function whose data type is FNTYPE. 8007 for a call to a function whose data type is FNTYPE.
6797 For a library call, FNTYPE is 0. 8008 For a library call, FNTYPE is 0 and RETURN_MODE the return value mode.
6798 8009
6799 For incoming args we set the number of arguments in the prototype large 8010 For incoming args we set the number of arguments in the prototype large
6800 so we never return a PARALLEL. */ 8011 so we never return a PARALLEL. */
6801 8012
6802 void 8013 void
6803 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype, 8014 init_cumulative_args (CUMULATIVE_ARGS *cum, tree fntype,
6804 rtx libname ATTRIBUTE_UNUSED, int incoming, 8015 rtx libname ATTRIBUTE_UNUSED, int incoming,
6805 int libcall, int n_named_args) 8016 int libcall, int n_named_args,
8017 tree fndecl ATTRIBUTE_UNUSED,
8018 enum machine_mode return_mode ATTRIBUTE_UNUSED)
6806 { 8019 {
6807 static CUMULATIVE_ARGS zero_cumulative; 8020 static CUMULATIVE_ARGS zero_cumulative;
6808 8021
6809 *cum = zero_cumulative; 8022 *cum = zero_cumulative;
6810 cum->words = 0; 8023 cum->words = 0;
6811 cum->fregno = FP_ARG_MIN_REG; 8024 cum->fregno = FP_ARG_MIN_REG;
6812 cum->vregno = ALTIVEC_ARG_MIN_REG; 8025 cum->vregno = ALTIVEC_ARG_MIN_REG;
6813 cum->prototype = (fntype && TYPE_ARG_TYPES (fntype)); 8026 cum->prototype = (fntype && prototype_p (fntype));
6814 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall) 8027 cum->call_cookie = ((DEFAULT_ABI == ABI_V4 && libcall)
6815 ? CALL_LIBCALL : CALL_NORMAL); 8028 ? CALL_LIBCALL : CALL_NORMAL);
6816 cum->sysv_gregno = GP_ARG_MIN_REG; 8029 cum->sysv_gregno = GP_ARG_MIN_REG;
6817 cum->stdarg = fntype 8030 cum->stdarg = stdarg_p (fntype);
6818 && (TYPE_ARG_TYPES (fntype) != 0
6819 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
6820 != void_type_node));
6821 8031
6822 cum->nargs_prototype = 0; 8032 cum->nargs_prototype = 0;
6823 if (incoming || cum->prototype) 8033 if (incoming || cum->prototype)
6824 cum->nargs_prototype = n_named_args; 8034 cum->nargs_prototype = n_named_args;
6825 8035
6844 fprintf (stderr, " longcall,"); 8054 fprintf (stderr, " longcall,");
6845 8055
6846 fprintf (stderr, " proto = %d, nargs = %d\n", 8056 fprintf (stderr, " proto = %d, nargs = %d\n",
6847 cum->prototype, cum->nargs_prototype); 8057 cum->prototype, cum->nargs_prototype);
6848 } 8058 }
8059
8060 #ifdef HAVE_AS_GNU_ATTRIBUTE
8061 if (DEFAULT_ABI == ABI_V4)
8062 {
8063 cum->escapes = call_ABI_of_interest (fndecl);
8064 if (cum->escapes)
8065 {
8066 tree return_type;
8067
8068 if (fntype)
8069 {
8070 return_type = TREE_TYPE (fntype);
8071 return_mode = TYPE_MODE (return_type);
8072 }
8073 else
8074 return_type = lang_hooks.types.type_for_mode (return_mode, 0);
8075
8076 if (return_type != NULL)
8077 {
8078 if (TREE_CODE (return_type) == RECORD_TYPE
8079 && TYPE_TRANSPARENT_AGGR (return_type))
8080 {
8081 return_type = TREE_TYPE (first_field (return_type));
8082 return_mode = TYPE_MODE (return_type);
8083 }
8084 if (AGGREGATE_TYPE_P (return_type)
8085 && ((unsigned HOST_WIDE_INT) int_size_in_bytes (return_type)
8086 <= 8))
8087 rs6000_returns_struct = true;
8088 }
8089 if (SCALAR_FLOAT_MODE_P (return_mode))
8090 rs6000_passes_float = true;
8091 else if (ALTIVEC_VECTOR_MODE (return_mode)
8092 || VSX_VECTOR_MODE (return_mode)
8093 || SPE_VECTOR_MODE (return_mode))
8094 rs6000_passes_vector = true;
8095 }
8096 }
8097 #endif
6849 8098
6850 if (fntype 8099 if (fntype
6851 && !TARGET_ALTIVEC 8100 && !TARGET_ALTIVEC
6852 && TARGET_ALTIVEC_ABI 8101 && TARGET_ALTIVEC_ABI
6853 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype)))) 8102 && ALTIVEC_VECTOR_MODE (TYPE_MODE (TREE_TYPE (fntype))))
6939 8188
6940 Doubleword align SPE vectors. 8189 Doubleword align SPE vectors.
6941 Quadword align Altivec vectors. 8190 Quadword align Altivec vectors.
6942 Quadword align large synthetic vector types. */ 8191 Quadword align large synthetic vector types. */
6943 8192
6944 int 8193 static unsigned int
6945 function_arg_boundary (enum machine_mode mode, tree type) 8194 rs6000_function_arg_boundary (enum machine_mode mode, const_tree type)
6946 { 8195 {
6947 if (DEFAULT_ABI == ABI_V4 8196 if (DEFAULT_ABI == ABI_V4
6948 && (GET_MODE_SIZE (mode) == 8 8197 && (GET_MODE_SIZE (mode) == 8
6949 || (TARGET_HARD_FLOAT 8198 || (TARGET_HARD_FLOAT
6950 && TARGET_FPRS 8199 && TARGET_FPRS
6957 return 64; 8206 return 64;
6958 else if ((ALTIVEC_VECTOR_MODE (mode) || VSX_VECTOR_MODE (mode)) 8207 else if ((ALTIVEC_VECTOR_MODE (mode) || VSX_VECTOR_MODE (mode))
6959 || (type && TREE_CODE (type) == VECTOR_TYPE 8208 || (type && TREE_CODE (type) == VECTOR_TYPE
6960 && int_size_in_bytes (type) >= 16)) 8209 && int_size_in_bytes (type) >= 16))
6961 return 128; 8210 return 128;
6962 else if (rs6000_darwin64_abi && mode == BLKmode 8211 else if (TARGET_MACHO
8212 && rs6000_darwin64_abi
8213 && mode == BLKmode
6963 && type && TYPE_ALIGN (type) > 64) 8214 && type && TYPE_ALIGN (type) > 64)
6964 return 128; 8215 return 128;
6965 else 8216 else
6966 return PARM_BOUNDARY; 8217 return PARM_BOUNDARY;
6967 } 8218 }
6968 8219
6969 /* For a function parm of MODE and TYPE, return the starting word in 8220 /* For a function parm of MODE and TYPE, return the starting word in
6970 the parameter area. NWORDS of the parameter area are already used. */ 8221 the parameter area. NWORDS of the parameter area are already used. */
6971 8222
6972 static unsigned int 8223 static unsigned int
6973 rs6000_parm_start (enum machine_mode mode, tree type, unsigned int nwords) 8224 rs6000_parm_start (enum machine_mode mode, const_tree type,
8225 unsigned int nwords)
6974 { 8226 {
6975 unsigned int align; 8227 unsigned int align;
6976 unsigned int parm_offset; 8228 unsigned int parm_offset;
6977 8229
6978 align = function_arg_boundary (mode, type) / PARM_BOUNDARY - 1; 8230 align = rs6000_function_arg_boundary (mode, type) / PARM_BOUNDARY - 1;
6979 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6; 8231 parm_offset = DEFAULT_ABI == ABI_V4 ? 2 : 6;
6980 return nwords + (-(parm_offset + nwords) & align); 8232 return nwords + (-(parm_offset + nwords) & align);
6981 } 8233 }
6982 8234
6983 /* Compute the size (in words) of a function argument. */ 8235 /* Compute the size (in words) of a function argument. */
6984 8236
6985 static unsigned long 8237 static unsigned long
6986 rs6000_arg_size (enum machine_mode mode, tree type) 8238 rs6000_arg_size (enum machine_mode mode, const_tree type)
6987 { 8239 {
6988 unsigned long size; 8240 unsigned long size;
6989 8241
6990 if (mode != BLKmode) 8242 if (mode != BLKmode)
6991 size = GET_MODE_SIZE (mode); 8243 size = GET_MODE_SIZE (mode);
7000 8252
7001 /* Use this to flush pending int fields. */ 8253 /* Use this to flush pending int fields. */
7002 8254
7003 static void 8255 static void
7004 rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum, 8256 rs6000_darwin64_record_arg_advance_flush (CUMULATIVE_ARGS *cum,
7005 HOST_WIDE_INT bitpos) 8257 HOST_WIDE_INT bitpos, int final)
7006 { 8258 {
7007 unsigned int startbit, endbit; 8259 unsigned int startbit, endbit;
7008 int intregs, intoffset; 8260 int intregs, intoffset;
7009 enum machine_mode mode; 8261 enum machine_mode mode;
7010 8262
8263 /* Handle the situations where a float is taking up the first half
8264 of the GPR, and the other half is empty (typically due to
8265 alignment restrictions). We can detect this by a 8-byte-aligned
8266 int field, or by seeing that this is the final flush for this
8267 argument. Count the word and continue on. */
8268 if (cum->floats_in_gpr == 1
8269 && (cum->intoffset % 64 == 0
8270 || (cum->intoffset == -1 && final)))
8271 {
8272 cum->words++;
8273 cum->floats_in_gpr = 0;
8274 }
8275
7011 if (cum->intoffset == -1) 8276 if (cum->intoffset == -1)
7012 return; 8277 return;
7013 8278
7014 intoffset = cum->intoffset; 8279 intoffset = cum->intoffset;
7015 cum->intoffset = -1; 8280 cum->intoffset = -1;
8281 cum->floats_in_gpr = 0;
7016 8282
7017 if (intoffset % BITS_PER_WORD != 0) 8283 if (intoffset % BITS_PER_WORD != 0)
7018 { 8284 {
7019 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD, 8285 mode = mode_for_size (BITS_PER_WORD - intoffset % BITS_PER_WORD,
7020 MODE_INT, 0); 8286 MODE_INT, 0);
7030 8296
7031 startbit = intoffset & -BITS_PER_WORD; 8297 startbit = intoffset & -BITS_PER_WORD;
7032 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD; 8298 endbit = (bitpos + BITS_PER_WORD - 1) & -BITS_PER_WORD;
7033 intregs = (endbit - startbit) / BITS_PER_WORD; 8299 intregs = (endbit - startbit) / BITS_PER_WORD;
7034 cum->words += intregs; 8300 cum->words += intregs;
8301 /* words should be unsigned. */
8302 if ((unsigned)cum->words < (endbit/BITS_PER_WORD))
8303 {
8304 int pad = (endbit/BITS_PER_WORD) - cum->words;
8305 cum->words += pad;
8306 }
7035 } 8307 }
7036 8308
7037 /* The darwin64 ABI calls for us to recurse down through structs, 8309 /* The darwin64 ABI calls for us to recurse down through structs,
7038 looking for elements passed in registers. Unfortunately, we have 8310 looking for elements passed in registers. Unfortunately, we have
7039 to track int register count here also because of misalignments 8311 to track int register count here also because of misalignments
7040 in powerpc alignment mode. */ 8312 in powerpc alignment mode. */
7041 8313
7042 static void 8314 static void
7043 rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum, 8315 rs6000_darwin64_record_arg_advance_recurse (CUMULATIVE_ARGS *cum,
7044 tree type, 8316 const_tree type,
7045 HOST_WIDE_INT startbitpos) 8317 HOST_WIDE_INT startbitpos)
7046 { 8318 {
7047 tree f; 8319 tree f;
7048 8320
7049 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f)) 8321 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
7050 if (TREE_CODE (f) == FIELD_DECL) 8322 if (TREE_CODE (f) == FIELD_DECL)
7051 { 8323 {
7052 HOST_WIDE_INT bitpos = startbitpos; 8324 HOST_WIDE_INT bitpos = startbitpos;
7053 tree ftype = TREE_TYPE (f); 8325 tree ftype = TREE_TYPE (f);
7054 enum machine_mode mode; 8326 enum machine_mode mode;
7064 8336
7065 if (TREE_CODE (ftype) == RECORD_TYPE) 8337 if (TREE_CODE (ftype) == RECORD_TYPE)
7066 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos); 8338 rs6000_darwin64_record_arg_advance_recurse (cum, ftype, bitpos);
7067 else if (USE_FP_FOR_ARG_P (cum, mode, ftype)) 8339 else if (USE_FP_FOR_ARG_P (cum, mode, ftype))
7068 { 8340 {
7069 rs6000_darwin64_record_arg_advance_flush (cum, bitpos); 8341 unsigned n_fpregs = (GET_MODE_SIZE (mode) + 7) >> 3;
7070 cum->fregno += (GET_MODE_SIZE (mode) + 7) >> 3; 8342 rs6000_darwin64_record_arg_advance_flush (cum, bitpos, 0);
7071 cum->words += (GET_MODE_SIZE (mode) + 7) >> 3; 8343 cum->fregno += n_fpregs;
8344 /* Single-precision floats present a special problem for
8345 us, because they are smaller than an 8-byte GPR, and so
8346 the structure-packing rules combined with the standard
8347 varargs behavior mean that we want to pack float/float
8348 and float/int combinations into a single register's
8349 space. This is complicated by the arg advance flushing,
8350 which works on arbitrarily large groups of int-type
8351 fields. */
8352 if (mode == SFmode)
8353 {
8354 if (cum->floats_in_gpr == 1)
8355 {
8356 /* Two floats in a word; count the word and reset
8357 the float count. */
8358 cum->words++;
8359 cum->floats_in_gpr = 0;
8360 }
8361 else if (bitpos % 64 == 0)
8362 {
8363 /* A float at the beginning of an 8-byte word;
8364 count it and put off adjusting cum->words until
8365 we see if a arg advance flush is going to do it
8366 for us. */
8367 cum->floats_in_gpr++;
8368 }
8369 else
8370 {
8371 /* The float is at the end of a word, preceded
8372 by integer fields, so the arg advance flush
8373 just above has already set cum->words and
8374 everything is taken care of. */
8375 }
8376 }
8377 else
8378 cum->words += n_fpregs;
7072 } 8379 }
7073 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1)) 8380 else if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, 1))
7074 { 8381 {
7075 rs6000_darwin64_record_arg_advance_flush (cum, bitpos); 8382 rs6000_darwin64_record_arg_advance_flush (cum, bitpos, 0);
7076 cum->vregno++; 8383 cum->vregno++;
7077 cum->words += 2; 8384 cum->words += 2;
7078 } 8385 }
7079 else if (cum->intoffset == -1) 8386 else if (cum->intoffset == -1)
7080 cum->intoffset = bitpos; 8387 cum->intoffset = bitpos;
7081 } 8388 }
7082 } 8389 }
7083 8390
8391 /* Check for an item that needs to be considered specially under the darwin 64
8392 bit ABI. These are record types where the mode is BLK or the structure is
8393 8 bytes in size. */
8394 static int
8395 rs6000_darwin64_struct_check_p (enum machine_mode mode, const_tree type)
8396 {
8397 return rs6000_darwin64_abi
8398 && ((mode == BLKmode
8399 && TREE_CODE (type) == RECORD_TYPE
8400 && int_size_in_bytes (type) > 0)
8401 || (type && TREE_CODE (type) == RECORD_TYPE
8402 && int_size_in_bytes (type) == 8)) ? 1 : 0;
8403 }
8404
7084 /* Update the data in CUM to advance over an argument 8405 /* Update the data in CUM to advance over an argument
7085 of mode MODE and data type TYPE. 8406 of mode MODE and data type TYPE.
7086 (TYPE is null for libcalls where that information may not be available.) 8407 (TYPE is null for libcalls where that information may not be available.)
7087 8408
7088 Note that for args passed by reference, function_arg will be called 8409 Note that for args passed by reference, function_arg will be called
7089 with MODE and TYPE set to that of the pointer to the arg, not the arg 8410 with MODE and TYPE set to that of the pointer to the arg, not the arg
7090 itself. */ 8411 itself. */
7091 8412
7092 void 8413 static void
7093 function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode, 8414 rs6000_function_arg_advance_1 (CUMULATIVE_ARGS *cum, enum machine_mode mode,
7094 tree type, int named, int depth) 8415 const_tree type, bool named, int depth)
7095 { 8416 {
7096 int size;
7097
7098 /* Only tick off an argument if we're not recursing. */ 8417 /* Only tick off an argument if we're not recursing. */
7099 if (depth == 0) 8418 if (depth == 0)
7100 cum->nargs_prototype--; 8419 cum->nargs_prototype--;
8420
8421 #ifdef HAVE_AS_GNU_ATTRIBUTE
8422 if (DEFAULT_ABI == ABI_V4
8423 && cum->escapes)
8424 {
8425 if (SCALAR_FLOAT_MODE_P (mode))
8426 rs6000_passes_float = true;
8427 else if (named && (ALTIVEC_VECTOR_MODE (mode) || VSX_VECTOR_MODE (mode)))
8428 rs6000_passes_vector = true;
8429 else if (SPE_VECTOR_MODE (mode)
8430 && !cum->stdarg
8431 && cum->sysv_gregno <= GP_ARG_MAX_REG)
8432 rs6000_passes_vector = true;
8433 }
8434 #endif
7101 8435
7102 if (TARGET_ALTIVEC_ABI 8436 if (TARGET_ALTIVEC_ABI
7103 && (ALTIVEC_VECTOR_MODE (mode) 8437 && (ALTIVEC_VECTOR_MODE (mode)
7104 || VSX_VECTOR_MODE (mode) 8438 || VSX_VECTOR_MODE (mode)
7105 || (type && TREE_CODE (type) == VECTOR_TYPE 8439 || (type && TREE_CODE (type) == VECTOR_TYPE
7155 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode) 8489 else if (TARGET_SPE_ABI && TARGET_SPE && SPE_VECTOR_MODE (mode)
7156 && !cum->stdarg 8490 && !cum->stdarg
7157 && cum->sysv_gregno <= GP_ARG_MAX_REG) 8491 && cum->sysv_gregno <= GP_ARG_MAX_REG)
7158 cum->sysv_gregno++; 8492 cum->sysv_gregno++;
7159 8493
7160 else if (rs6000_darwin64_abi 8494 else if (TARGET_MACHO && rs6000_darwin64_struct_check_p (mode, type))
7161 && mode == BLKmode 8495 {
7162 && TREE_CODE (type) == RECORD_TYPE 8496 int size = int_size_in_bytes (type);
7163 && (size = int_size_in_bytes (type)) > 0)
7164 {
7165 /* Variable sized types have size == -1 and are 8497 /* Variable sized types have size == -1 and are
7166 treated as if consisting entirely of ints. 8498 treated as if consisting entirely of ints.
7167 Pad to 16 byte boundary if needed. */ 8499 Pad to 16 byte boundary if needed. */
7168 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD 8500 if (TYPE_ALIGN (type) >= 2 * BITS_PER_WORD
7169 && (cum->words % 2) != 0) 8501 && (cum->words % 2) != 0)
7176 /* It is tempting to say int register count just goes up by 8508 /* It is tempting to say int register count just goes up by
7177 sizeof(type)/8, but this is wrong in a case such as 8509 sizeof(type)/8, but this is wrong in a case such as
7178 { int; double; int; } [powerpc alignment]. We have to 8510 { int; double; int; } [powerpc alignment]. We have to
7179 grovel through the fields for these too. */ 8511 grovel through the fields for these too. */
7180 cum->intoffset = 0; 8512 cum->intoffset = 0;
8513 cum->floats_in_gpr = 0;
7181 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0); 8514 rs6000_darwin64_record_arg_advance_recurse (cum, type, 0);
7182 rs6000_darwin64_record_arg_advance_flush (cum, 8515 rs6000_darwin64_record_arg_advance_flush (cum,
7183 size * BITS_PER_UNIT); 8516 size * BITS_PER_UNIT, 1);
7184 } 8517 }
8518 if (TARGET_DEBUG_ARG)
8519 {
8520 fprintf (stderr, "function_adv: words = %2d, align=%d, size=%d",
8521 cum->words, TYPE_ALIGN (type), size);
8522 fprintf (stderr,
8523 "nargs = %4d, proto = %d, mode = %4s (darwin64 abi)\n",
8524 cum->nargs_prototype, cum->prototype,
8525 GET_MODE_NAME (mode));
8526 }
7185 } 8527 }
7186 else if (DEFAULT_ABI == ABI_V4) 8528 else if (DEFAULT_ABI == ABI_V4)
7187 { 8529 {
7188 if (TARGET_HARD_FLOAT && TARGET_FPRS 8530 if (TARGET_HARD_FLOAT && TARGET_FPRS
7189 && ((TARGET_SINGLE_FLOAT && mode == SFmode) 8531 && ((TARGET_SINGLE_FLOAT && mode == SFmode)
7275 named, align_words - start_words, depth); 8617 named, align_words - start_words, depth);
7276 } 8618 }
7277 } 8619 }
7278 } 8620 }
7279 8621
8622 static void
8623 rs6000_function_arg_advance (CUMULATIVE_ARGS *cum, enum machine_mode mode,
8624 const_tree type, bool named)
8625 {
8626 rs6000_function_arg_advance_1 (cum, mode, type, named, 0);
8627 }
8628
7280 static rtx 8629 static rtx
7281 spe_build_register_parallel (enum machine_mode mode, int gregno) 8630 spe_build_register_parallel (enum machine_mode mode, int gregno)
7282 { 8631 {
7283 rtx r1, r3, r5, r7; 8632 rtx r1, r3, r5, r7;
7284 8633
7313 } 8662 }
7314 } 8663 }
7315 8664
7316 /* Determine where to put a SIMD argument on the SPE. */ 8665 /* Determine where to put a SIMD argument on the SPE. */
7317 static rtx 8666 static rtx
7318 rs6000_spe_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, 8667 rs6000_spe_function_arg (const CUMULATIVE_ARGS *cum, enum machine_mode mode,
7319 tree type) 8668 const_tree type)
7320 { 8669 {
7321 int gregno = cum->sysv_gregno; 8670 int gregno = cum->sysv_gregno;
7322 8671
7323 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but 8672 /* On E500 v2, double arithmetic is done on the full 64-bit GPR, but
7324 are passed and returned in a pair of GPRs for ABI compatibility. */ 8673 are passed and returned in a pair of GPRs for ABI compatibility. */
7444 HOST_WIDE_INT startbitpos, rtx rvec[], 8793 HOST_WIDE_INT startbitpos, rtx rvec[],
7445 int *k) 8794 int *k)
7446 { 8795 {
7447 tree f; 8796 tree f;
7448 8797
7449 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f)) 8798 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
7450 if (TREE_CODE (f) == FIELD_DECL) 8799 if (TREE_CODE (f) == FIELD_DECL)
7451 { 8800 {
7452 HOST_WIDE_INT bitpos = startbitpos; 8801 HOST_WIDE_INT bitpos = startbitpos;
7453 tree ftype = TREE_TYPE (f); 8802 tree ftype = TREE_TYPE (f);
7454 enum machine_mode mode; 8803 enum machine_mode mode;
7464 8813
7465 if (TREE_CODE (ftype) == RECORD_TYPE) 8814 if (TREE_CODE (ftype) == RECORD_TYPE)
7466 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k); 8815 rs6000_darwin64_record_arg_recurse (cum, ftype, bitpos, rvec, k);
7467 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype)) 8816 else if (cum->named && USE_FP_FOR_ARG_P (cum, mode, ftype))
7468 { 8817 {
8818 unsigned n_fpreg = (GET_MODE_SIZE (mode) + 7) >> 3;
7469 #if 0 8819 #if 0
7470 switch (mode) 8820 switch (mode)
7471 { 8821 {
7472 case SCmode: mode = SFmode; break; 8822 case SCmode: mode = SFmode; break;
7473 case DCmode: mode = DFmode; break; 8823 case DCmode: mode = DFmode; break;
7474 case TCmode: mode = TFmode; break; 8824 case TCmode: mode = TFmode; break;
7475 default: break; 8825 default: break;
7476 } 8826 }
7477 #endif 8827 #endif
7478 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k); 8828 rs6000_darwin64_record_arg_flush (cum, bitpos, rvec, k);
8829 if (cum->fregno + n_fpreg > FP_ARG_MAX_REG + 1)
8830 {
8831 gcc_assert (cum->fregno == FP_ARG_MAX_REG
8832 && (mode == TFmode || mode == TDmode));
8833 /* Long double or _Decimal128 split over regs and memory. */
8834 mode = DECIMAL_FLOAT_MODE_P (mode) ? DDmode : DFmode;
8835 cum->use_stack=1;
8836 }
7479 rvec[(*k)++] 8837 rvec[(*k)++]
7480 = gen_rtx_EXPR_LIST (VOIDmode, 8838 = gen_rtx_EXPR_LIST (VOIDmode,
7481 gen_rtx_REG (mode, cum->fregno++), 8839 gen_rtx_REG (mode, cum->fregno++),
7482 GEN_INT (bitpos / BITS_PER_UNIT)); 8840 GEN_INT (bitpos / BITS_PER_UNIT));
7483 if (mode == TFmode || mode == TDmode) 8841 if (mode == TFmode || mode == TDmode)
7509 Much of this is taken from the SPARC V9 port, which has a similar 8867 Much of this is taken from the SPARC V9 port, which has a similar
7510 calling convention. */ 8868 calling convention. */
7511 8869
7512 static rtx 8870 static rtx
7513 rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, const_tree type, 8871 rs6000_darwin64_record_arg (CUMULATIVE_ARGS *orig_cum, const_tree type,
7514 int named, bool retval) 8872 bool named, bool retval)
7515 { 8873 {
7516 rtx rvec[FIRST_PSEUDO_REGISTER]; 8874 rtx rvec[FIRST_PSEUDO_REGISTER];
7517 int k = 1, kbase = 1; 8875 int k = 1, kbase = 1;
7518 HOST_WIDE_INT typesize = int_size_in_bytes (type); 8876 HOST_WIDE_INT typesize = int_size_in_bytes (type);
7519 /* This is a copy; modifications are not visible to our caller. */ 8877 /* This is a copy; modifications are not visible to our caller. */
7531 8889
7532 /* Put entries into rvec[] for individual FP and vector fields, and 8890 /* Put entries into rvec[] for individual FP and vector fields, and
7533 for the chunks of memory that go in int regs. Note we start at 8891 for the chunks of memory that go in int regs. Note we start at
7534 element 1; 0 is reserved for an indication of using memory, and 8892 element 1; 0 is reserved for an indication of using memory, and
7535 may or may not be filled in below. */ 8893 may or may not be filled in below. */
7536 rs6000_darwin64_record_arg_recurse (cum, type, 0, rvec, &k); 8894 rs6000_darwin64_record_arg_recurse (cum, type, /* startbit pos= */ 0, rvec, &k);
7537 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k); 8895 rs6000_darwin64_record_arg_flush (cum, typesize * BITS_PER_UNIT, rvec, &k);
7538 8896
7539 /* If any part of the struct went on the stack put all of it there. 8897 /* If any part of the struct went on the stack put all of it there.
7540 This hack is because the generic code for 8898 This hack is because the generic code for
7541 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register 8899 FUNCTION_ARG_PARTIAL_NREGS cannot handle cases where the register
7554 } 8912 }
7555 8913
7556 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */ 8914 /* Determine where to place an argument in 64-bit mode with 32-bit ABI. */
7557 8915
7558 static rtx 8916 static rtx
7559 rs6000_mixed_function_arg (enum machine_mode mode, tree type, int align_words) 8917 rs6000_mixed_function_arg (enum machine_mode mode, const_tree type,
8918 int align_words)
7560 { 8919 {
7561 int n_units; 8920 int n_units;
7562 int i, k; 8921 int i, k;
7563 rtx rvec[GP_ARG_NUM_REG + 1]; 8922 rtx rvec[GP_ARG_NUM_REG + 1];
7564 8923
7625 8984
7626 Note that for args passed by reference, function_arg will be called 8985 Note that for args passed by reference, function_arg will be called
7627 with MODE and TYPE set to that of the pointer to the arg, not the arg 8986 with MODE and TYPE set to that of the pointer to the arg, not the arg
7628 itself. */ 8987 itself. */
7629 8988
7630 rtx 8989 static rtx
7631 function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode, 8990 rs6000_function_arg (CUMULATIVE_ARGS *cum, enum machine_mode mode,
7632 tree type, int named) 8991 const_tree type, bool named)
7633 { 8992 {
7634 enum rs6000_abi abi = DEFAULT_ABI; 8993 enum rs6000_abi abi = DEFAULT_ABI;
7635 8994
7636 /* Return a marker to indicate whether CR1 needs to set or clear the 8995 /* Return a marker to indicate whether CR1 needs to set or clear the
7637 bit that V.4 uses to say fp args were passed in registers. 8996 bit that V.4 uses to say fp args were passed in registers.
7656 } 9015 }
7657 9016
7658 return GEN_INT (cum->call_cookie); 9017 return GEN_INT (cum->call_cookie);
7659 } 9018 }
7660 9019
7661 if (rs6000_darwin64_abi && mode == BLKmode 9020 if (TARGET_MACHO && rs6000_darwin64_struct_check_p (mode, type))
7662 && TREE_CODE (type) == RECORD_TYPE) 9021 {
7663 { 9022 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, /*retval= */false);
7664 rtx rslt = rs6000_darwin64_record_arg (cum, type, named, false);
7665 if (rslt != NULL_RTX) 9023 if (rslt != NULL_RTX)
7666 return rslt; 9024 return rslt;
7667 /* Else fall through to usual handling. */ 9025 /* Else fall through to usual handling. */
7668 } 9026 }
7669 9027
7914 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named) 9272 if (USE_ALTIVEC_FOR_ARG_P (cum, mode, type, named)
7915 && cum->nargs_prototype >= 0) 9273 && cum->nargs_prototype >= 0)
7916 return 0; 9274 return 0;
7917 9275
7918 /* In this complicated case we just disable the partial_nregs code. */ 9276 /* In this complicated case we just disable the partial_nregs code. */
7919 if (rs6000_darwin64_abi && mode == BLKmode 9277 if (TARGET_MACHO && rs6000_darwin64_struct_check_p (mode, type))
7920 && TREE_CODE (type) == RECORD_TYPE
7921 && int_size_in_bytes (type) > 0)
7922 return 0; 9278 return 0;
7923 9279
7924 align_words = rs6000_parm_start (mode, type, cum->words); 9280 align_words = rs6000_parm_start (mode, type, cum->words);
7925 9281
7926 if (USE_FP_FOR_ARG_P (cum, mode, type)) 9282 if (USE_FP_FOR_ARG_P (cum, mode, type))
8077 int first_reg_offset; 9433 int first_reg_offset;
8078 alias_set_type set; 9434 alias_set_type set;
8079 9435
8080 /* Skip the last named argument. */ 9436 /* Skip the last named argument. */
8081 next_cum = *cum; 9437 next_cum = *cum;
8082 function_arg_advance (&next_cum, mode, type, 1, 0); 9438 rs6000_function_arg_advance_1 (&next_cum, mode, type, true, 0);
8083 9439
8084 if (DEFAULT_ABI == ABI_V4) 9440 if (DEFAULT_ABI == ABI_V4)
8085 { 9441 {
8086 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG; 9442 first_reg_offset = next_cum.sysv_gregno - GP_ARG_MIN_REG;
8087 9443
8268 DECL_FIELD_CONTEXT (f_fpr) = record; 9624 DECL_FIELD_CONTEXT (f_fpr) = record;
8269 DECL_FIELD_CONTEXT (f_res) = record; 9625 DECL_FIELD_CONTEXT (f_res) = record;
8270 DECL_FIELD_CONTEXT (f_ovf) = record; 9626 DECL_FIELD_CONTEXT (f_ovf) = record;
8271 DECL_FIELD_CONTEXT (f_sav) = record; 9627 DECL_FIELD_CONTEXT (f_sav) = record;
8272 9628
8273 TREE_CHAIN (record) = type_decl; 9629 TYPE_STUB_DECL (record) = type_decl;
8274 TYPE_NAME (record) = type_decl; 9630 TYPE_NAME (record) = type_decl;
8275 TYPE_FIELDS (record) = f_gpr; 9631 TYPE_FIELDS (record) = f_gpr;
8276 TREE_CHAIN (f_gpr) = f_fpr; 9632 DECL_CHAIN (f_gpr) = f_fpr;
8277 TREE_CHAIN (f_fpr) = f_res; 9633 DECL_CHAIN (f_fpr) = f_res;
8278 TREE_CHAIN (f_res) = f_ovf; 9634 DECL_CHAIN (f_res) = f_ovf;
8279 TREE_CHAIN (f_ovf) = f_sav; 9635 DECL_CHAIN (f_ovf) = f_sav;
8280 9636
8281 layout_type (record); 9637 layout_type (record);
8282 9638
8283 /* The correct type is an array type of one element. */ 9639 /* The correct type is an array type of one element. */
8284 return build_array_type (record, build_index_type (size_zero_node)); 9640 return build_array_type (record, build_index_type (size_zero_node));
8299 std_expand_builtin_va_start (valist, nextarg); 9655 std_expand_builtin_va_start (valist, nextarg);
8300 return; 9656 return;
8301 } 9657 }
8302 9658
8303 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node)); 9659 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
8304 f_fpr = TREE_CHAIN (f_gpr); 9660 f_fpr = DECL_CHAIN (f_gpr);
8305 f_res = TREE_CHAIN (f_fpr); 9661 f_res = DECL_CHAIN (f_fpr);
8306 f_ovf = TREE_CHAIN (f_res); 9662 f_ovf = DECL_CHAIN (f_res);
8307 f_sav = TREE_CHAIN (f_ovf); 9663 f_sav = DECL_CHAIN (f_ovf);
9664
9665 valist = build_simple_mem_ref (valist);
9666 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
9667 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), unshare_expr (valist),
9668 f_fpr, NULL_TREE);
9669 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), unshare_expr (valist),
9670 f_ovf, NULL_TREE);
9671 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), unshare_expr (valist),
9672 f_sav, NULL_TREE);
9673
9674 /* Count number of gp and fp argument registers used. */
9675 words = crtl->args.info.words;
9676 n_gpr = MIN (crtl->args.info.sysv_gregno - GP_ARG_MIN_REG,
9677 GP_ARG_NUM_REG);
9678 n_fpr = MIN (crtl->args.info.fregno - FP_ARG_MIN_REG,
9679 FP_ARG_NUM_REG);
9680
9681 if (TARGET_DEBUG_ARG)
9682 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
9683 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
9684 words, n_gpr, n_fpr);
9685
9686 if (cfun->va_list_gpr_size)
9687 {
9688 t = build2 (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
9689 build_int_cst (NULL_TREE, n_gpr));
9690 TREE_SIDE_EFFECTS (t) = 1;
9691 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
9692 }
9693
9694 if (cfun->va_list_fpr_size)
9695 {
9696 t = build2 (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
9697 build_int_cst (NULL_TREE, n_fpr));
9698 TREE_SIDE_EFFECTS (t) = 1;
9699 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
9700
9701 #ifdef HAVE_AS_GNU_ATTRIBUTE
9702 if (call_ABI_of_interest (cfun->decl))
9703 rs6000_passes_float = true;
9704 #endif
9705 }
9706
9707 /* Find the overflow area. */
9708 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
9709 if (words != 0)
9710 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
9711 size_int (words * UNITS_PER_WORD));
9712 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
9713 TREE_SIDE_EFFECTS (t) = 1;
9714 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
9715
9716 /* If there were no va_arg invocations, don't set up the register
9717 save area. */
9718 if (!cfun->va_list_gpr_size
9719 && !cfun->va_list_fpr_size
9720 && n_gpr < GP_ARG_NUM_REG
9721 && n_fpr < FP_ARG_V4_MAX_REG)
9722 return;
9723
9724 /* Find the register save area. */
9725 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
9726 if (cfun->machine->varargs_save_offset)
9727 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
9728 size_int (cfun->machine->varargs_save_offset));
9729 t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
9730 TREE_SIDE_EFFECTS (t) = 1;
9731 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
9732 }
9733
9734 /* Implement va_arg. */
9735
9736 tree
9737 rs6000_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p,
9738 gimple_seq *post_p)
9739 {
9740 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
9741 tree gpr, fpr, ovf, sav, reg, t, u;
9742 int size, rsize, n_reg, sav_ofs, sav_scale;
9743 tree lab_false, lab_over, addr;
9744 int align;
9745 tree ptrtype = build_pointer_type_for_mode (type, ptr_mode, true);
9746 int regalign = 0;
9747 gimple stmt;
9748
9749 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
9750 {
9751 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
9752 return build_va_arg_indirect_ref (t);
9753 }
9754
9755 /* We need to deal with the fact that the darwin ppc64 ABI is defined by an
9756 earlier version of gcc, with the property that it always applied alignment
9757 adjustments to the va-args (even for zero-sized types). The cheapest way
9758 to deal with this is to replicate the effect of the part of
9759 std_gimplify_va_arg_expr that carries out the align adjust, for the case
9760 of relevance.
9761 We don't need to check for pass-by-reference because of the test above.
9762 We can return a simplifed answer, since we know there's no offset to add. */
9763
9764 if (TARGET_MACHO
9765 && rs6000_darwin64_abi
9766 && integer_zerop (TYPE_SIZE (type)))
9767 {
9768 unsigned HOST_WIDE_INT align, boundary;
9769 tree valist_tmp = get_initialized_tmp_var (valist, pre_p, NULL);
9770 align = PARM_BOUNDARY / BITS_PER_UNIT;
9771 boundary = rs6000_function_arg_boundary (TYPE_MODE (type), type);
9772 if (boundary > MAX_SUPPORTED_STACK_ALIGNMENT)
9773 boundary = MAX_SUPPORTED_STACK_ALIGNMENT;
9774 boundary /= BITS_PER_UNIT;
9775 if (boundary > align)
9776 {
9777 tree t ;
9778 /* This updates arg ptr by the amount that would be necessary
9779 to align the zero-sized (but not zero-alignment) item. */
9780 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
9781 fold_build2 (POINTER_PLUS_EXPR,
9782 TREE_TYPE (valist),
9783 valist_tmp, size_int (boundary - 1)));
9784 gimplify_and_add (t, pre_p);
9785
9786 t = fold_convert (sizetype, valist_tmp);
9787 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
9788 fold_convert (TREE_TYPE (valist),
9789 fold_build2 (BIT_AND_EXPR, sizetype, t,
9790 size_int (-boundary))));
9791 t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
9792 gimplify_and_add (t, pre_p);
9793 }
9794 /* Since it is zero-sized there's no increment for the item itself. */
9795 valist_tmp = fold_convert (build_pointer_type (type), valist_tmp);
9796 return build_va_arg_indirect_ref (valist_tmp);
9797 }
9798
9799 if (DEFAULT_ABI != ABI_V4)
9800 {
9801 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
9802 {
9803 tree elem_type = TREE_TYPE (type);
9804 enum machine_mode elem_mode = TYPE_MODE (elem_type);
9805 int elem_size = GET_MODE_SIZE (elem_mode);
9806
9807 if (elem_size < UNITS_PER_WORD)
9808 {
9809 tree real_part, imag_part;
9810 gimple_seq post = NULL;
9811
9812 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
9813 &post);
9814 /* Copy the value into a temporary, lest the formal temporary
9815 be reused out from under us. */
9816 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
9817 gimple_seq_add_seq (pre_p, post);
9818
9819 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
9820 post_p);
9821
9822 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
9823 }
9824 }
9825
9826 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
9827 }
9828
9829 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
9830 f_fpr = DECL_CHAIN (f_gpr);
9831 f_res = DECL_CHAIN (f_fpr);
9832 f_ovf = DECL_CHAIN (f_res);
9833 f_sav = DECL_CHAIN (f_ovf);
8308 9834
8309 valist = build_va_arg_indirect_ref (valist); 9835 valist = build_va_arg_indirect_ref (valist);
8310 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE); 9836 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
8311 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), unshare_expr (valist), 9837 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), unshare_expr (valist),
8312 f_fpr, NULL_TREE); 9838 f_fpr, NULL_TREE);
8313 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), unshare_expr (valist), 9839 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), unshare_expr (valist),
8314 f_ovf, NULL_TREE); 9840 f_ovf, NULL_TREE);
8315 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), unshare_expr (valist), 9841 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), unshare_expr (valist),
8316 f_sav, NULL_TREE); 9842 f_sav, NULL_TREE);
8317 9843
8318 /* Count number of gp and fp argument registers used. */
8319 words = crtl->args.info.words;
8320 n_gpr = MIN (crtl->args.info.sysv_gregno - GP_ARG_MIN_REG,
8321 GP_ARG_NUM_REG);
8322 n_fpr = MIN (crtl->args.info.fregno - FP_ARG_MIN_REG,
8323 FP_ARG_NUM_REG);
8324
8325 if (TARGET_DEBUG_ARG)
8326 fprintf (stderr, "va_start: words = "HOST_WIDE_INT_PRINT_DEC", n_gpr = "
8327 HOST_WIDE_INT_PRINT_DEC", n_fpr = "HOST_WIDE_INT_PRINT_DEC"\n",
8328 words, n_gpr, n_fpr);
8329
8330 if (cfun->va_list_gpr_size)
8331 {
8332 t = build2 (MODIFY_EXPR, TREE_TYPE (gpr), gpr,
8333 build_int_cst (NULL_TREE, n_gpr));
8334 TREE_SIDE_EFFECTS (t) = 1;
8335 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8336 }
8337
8338 if (cfun->va_list_fpr_size)
8339 {
8340 t = build2 (MODIFY_EXPR, TREE_TYPE (fpr), fpr,
8341 build_int_cst (NULL_TREE, n_fpr));
8342 TREE_SIDE_EFFECTS (t) = 1;
8343 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8344 }
8345
8346 /* Find the overflow area. */
8347 t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
8348 if (words != 0)
8349 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
8350 size_int (words * UNITS_PER_WORD));
8351 t = build2 (MODIFY_EXPR, TREE_TYPE (ovf), ovf, t);
8352 TREE_SIDE_EFFECTS (t) = 1;
8353 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8354
8355 /* If there were no va_arg invocations, don't set up the register
8356 save area. */
8357 if (!cfun->va_list_gpr_size
8358 && !cfun->va_list_fpr_size
8359 && n_gpr < GP_ARG_NUM_REG
8360 && n_fpr < FP_ARG_V4_MAX_REG)
8361 return;
8362
8363 /* Find the register save area. */
8364 t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
8365 if (cfun->machine->varargs_save_offset)
8366 t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
8367 size_int (cfun->machine->varargs_save_offset));
8368 t = build2 (MODIFY_EXPR, TREE_TYPE (sav), sav, t);
8369 TREE_SIDE_EFFECTS (t) = 1;
8370 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
8371 }
8372
8373 /* Implement va_arg. */
8374
8375 tree
8376 rs6000_gimplify_va_arg (tree valist, tree type, gimple_seq *pre_p,
8377 gimple_seq *post_p)
8378 {
8379 tree f_gpr, f_fpr, f_res, f_ovf, f_sav;
8380 tree gpr, fpr, ovf, sav, reg, t, u;
8381 int size, rsize, n_reg, sav_ofs, sav_scale;
8382 tree lab_false, lab_over, addr;
8383 int align;
8384 tree ptrtype = build_pointer_type_for_mode (type, ptr_mode, true);
8385 int regalign = 0;
8386 gimple stmt;
8387
8388 if (pass_by_reference (NULL, TYPE_MODE (type), type, false))
8389 {
8390 t = rs6000_gimplify_va_arg (valist, ptrtype, pre_p, post_p);
8391 return build_va_arg_indirect_ref (t);
8392 }
8393
8394 if (DEFAULT_ABI != ABI_V4)
8395 {
8396 if (targetm.calls.split_complex_arg && TREE_CODE (type) == COMPLEX_TYPE)
8397 {
8398 tree elem_type = TREE_TYPE (type);
8399 enum machine_mode elem_mode = TYPE_MODE (elem_type);
8400 int elem_size = GET_MODE_SIZE (elem_mode);
8401
8402 if (elem_size < UNITS_PER_WORD)
8403 {
8404 tree real_part, imag_part;
8405 gimple_seq post = NULL;
8406
8407 real_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
8408 &post);
8409 /* Copy the value into a temporary, lest the formal temporary
8410 be reused out from under us. */
8411 real_part = get_initialized_tmp_var (real_part, pre_p, &post);
8412 gimple_seq_add_seq (pre_p, post);
8413
8414 imag_part = rs6000_gimplify_va_arg (valist, elem_type, pre_p,
8415 post_p);
8416
8417 return build2 (COMPLEX_EXPR, type, real_part, imag_part);
8418 }
8419 }
8420
8421 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
8422 }
8423
8424 f_gpr = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
8425 f_fpr = TREE_CHAIN (f_gpr);
8426 f_res = TREE_CHAIN (f_fpr);
8427 f_ovf = TREE_CHAIN (f_res);
8428 f_sav = TREE_CHAIN (f_ovf);
8429
8430 valist = build_va_arg_indirect_ref (valist);
8431 gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
8432 fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), unshare_expr (valist),
8433 f_fpr, NULL_TREE);
8434 ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), unshare_expr (valist),
8435 f_ovf, NULL_TREE);
8436 sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), unshare_expr (valist),
8437 f_sav, NULL_TREE);
8438
8439 size = int_size_in_bytes (type); 9844 size = int_size_in_bytes (type);
8440 rsize = (size + 3) / 4; 9845 rsize = (size + 3) / 4;
8441 align = 1; 9846 align = 1;
8442 9847
8443 if (TARGET_HARD_FLOAT && TARGET_FPRS 9848 if (TARGET_HARD_FLOAT && TARGET_FPRS
8593 { 9998 {
8594 if ((mask & target_flags) || TARGET_PAIRED_FLOAT) 9999 if ((mask & target_flags) || TARGET_PAIRED_FLOAT)
8595 { 10000 {
8596 tree t; 10001 tree t;
8597 if (rs6000_builtin_decls[code]) 10002 if (rs6000_builtin_decls[code])
8598 fatal_error ("internal error: builtin function to %s already processed.", 10003 fatal_error ("internal error: builtin function to %s already processed",
8599 name); 10004 name);
8600 10005
8601 rs6000_builtin_decls[code] = t = 10006 rs6000_builtin_decls[code] = t =
8602 add_builtin_function (name, type, code, BUILT_IN_MD, 10007 add_builtin_function (name, type, code, BUILT_IN_MD,
8603 NULL, NULL_TREE); 10008 NULL, NULL_TREE);
8646 10051
8647 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */ 10052 /* Simple ternary operations: VECd = foo (VECa, VECb, VECc). */
8648 10053
8649 static const struct builtin_description bdesc_3arg[] = 10054 static const struct builtin_description bdesc_3arg[] =
8650 { 10055 {
8651 { MASK_ALTIVEC, CODE_FOR_altivec_vmaddfp, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP }, 10056 { MASK_ALTIVEC, CODE_FOR_fmav4sf4, "__builtin_altivec_vmaddfp", ALTIVEC_BUILTIN_VMADDFP },
8652 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS }, 10057 { MASK_ALTIVEC, CODE_FOR_altivec_vmhaddshs, "__builtin_altivec_vmhaddshs", ALTIVEC_BUILTIN_VMHADDSHS },
8653 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS }, 10058 { MASK_ALTIVEC, CODE_FOR_altivec_vmhraddshs, "__builtin_altivec_vmhraddshs", ALTIVEC_BUILTIN_VMHRADDSHS },
8654 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM}, 10059 { MASK_ALTIVEC, CODE_FOR_altivec_vmladduhm, "__builtin_altivec_vmladduhm", ALTIVEC_BUILTIN_VMLADDUHM},
8655 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM }, 10060 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumubm, "__builtin_altivec_vmsumubm", ALTIVEC_BUILTIN_VMSUMUBM },
8656 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM }, 10061 { MASK_ALTIVEC, CODE_FOR_altivec_vmsummbm, "__builtin_altivec_vmsummbm", ALTIVEC_BUILTIN_VMSUMMBM },
8657 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM }, 10062 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhm, "__builtin_altivec_vmsumuhm", ALTIVEC_BUILTIN_VMSUMUHM },
8658 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM }, 10063 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshm, "__builtin_altivec_vmsumshm", ALTIVEC_BUILTIN_VMSUMSHM },
8659 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS }, 10064 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumuhs, "__builtin_altivec_vmsumuhs", ALTIVEC_BUILTIN_VMSUMUHS },
8660 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS }, 10065 { MASK_ALTIVEC, CODE_FOR_altivec_vmsumshs, "__builtin_altivec_vmsumshs", ALTIVEC_BUILTIN_VMSUMSHS },
8661 { MASK_ALTIVEC, CODE_FOR_altivec_vnmsubfp, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP }, 10066 { MASK_ALTIVEC, CODE_FOR_nfmsv4sf4, "__builtin_altivec_vnmsubfp", ALTIVEC_BUILTIN_VNMSUBFP },
8662 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v2df, "__builtin_altivec_vperm_2df", ALTIVEC_BUILTIN_VPERM_2DF }, 10067 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v2df, "__builtin_altivec_vperm_2df", ALTIVEC_BUILTIN_VPERM_2DF },
8663 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v2di, "__builtin_altivec_vperm_2di", ALTIVEC_BUILTIN_VPERM_2DI }, 10068 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v2di, "__builtin_altivec_vperm_2di", ALTIVEC_BUILTIN_VPERM_2DI },
8664 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF }, 10069 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4sf, "__builtin_altivec_vperm_4sf", ALTIVEC_BUILTIN_VPERM_4SF },
8665 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI }, 10070 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v4si, "__builtin_altivec_vperm_4si", ALTIVEC_BUILTIN_VPERM_4SI },
8666 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI }, 10071 { MASK_ALTIVEC, CODE_FOR_altivec_vperm_v8hi, "__builtin_altivec_vperm_8hi", ALTIVEC_BUILTIN_VPERM_8HI },
8698 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS }, 10103 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vmsumuhs", ALTIVEC_BUILTIN_VEC_VMSUMUHS },
8699 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB }, 10104 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmsub", ALTIVEC_BUILTIN_VEC_NMSUB },
8700 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM }, 10105 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_perm", ALTIVEC_BUILTIN_VEC_PERM },
8701 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL }, 10106 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sel", ALTIVEC_BUILTIN_VEC_SEL },
8702 10107
8703 { MASK_VSX, CODE_FOR_vsx_fmaddv2df4, "__builtin_vsx_xvmadddp", VSX_BUILTIN_XVMADDDP }, 10108 { MASK_VSX, CODE_FOR_fmav2df4, "__builtin_vsx_xvmadddp", VSX_BUILTIN_XVMADDDP },
8704 { MASK_VSX, CODE_FOR_vsx_fmsubv2df4, "__builtin_vsx_xvmsubdp", VSX_BUILTIN_XVMSUBDP }, 10109 { MASK_VSX, CODE_FOR_fmsv2df4, "__builtin_vsx_xvmsubdp", VSX_BUILTIN_XVMSUBDP },
8705 { MASK_VSX, CODE_FOR_vsx_fnmaddv2df4, "__builtin_vsx_xvnmadddp", VSX_BUILTIN_XVNMADDDP }, 10110 { MASK_VSX, CODE_FOR_nfmav2df4, "__builtin_vsx_xvnmadddp", VSX_BUILTIN_XVNMADDDP },
8706 { MASK_VSX, CODE_FOR_vsx_fnmsubv2df4, "__builtin_vsx_xvnmsubdp", VSX_BUILTIN_XVNMSUBDP }, 10111 { MASK_VSX, CODE_FOR_nfmsv2df4, "__builtin_vsx_xvnmsubdp", VSX_BUILTIN_XVNMSUBDP },
8707 10112
8708 { MASK_VSX, CODE_FOR_vsx_fmaddv4sf4, "__builtin_vsx_xvmaddsp", VSX_BUILTIN_XVMADDSP }, 10113 { MASK_VSX, CODE_FOR_fmav4sf4, "__builtin_vsx_xvmaddsp", VSX_BUILTIN_XVMADDSP },
8709 { MASK_VSX, CODE_FOR_vsx_fmsubv4sf4, "__builtin_vsx_xvmsubsp", VSX_BUILTIN_XVMSUBSP }, 10114 { MASK_VSX, CODE_FOR_fmsv4sf4, "__builtin_vsx_xvmsubsp", VSX_BUILTIN_XVMSUBSP },
8710 { MASK_VSX, CODE_FOR_vsx_fnmaddv4sf4, "__builtin_vsx_xvnmaddsp", VSX_BUILTIN_XVNMADDSP }, 10115 { MASK_VSX, CODE_FOR_nfmav4sf4, "__builtin_vsx_xvnmaddsp", VSX_BUILTIN_XVNMADDSP },
8711 { MASK_VSX, CODE_FOR_vsx_fnmsubv4sf4, "__builtin_vsx_xvnmsubsp", VSX_BUILTIN_XVNMSUBSP }, 10116 { MASK_VSX, CODE_FOR_nfmsv4sf4, "__builtin_vsx_xvnmsubsp", VSX_BUILTIN_XVNMSUBSP },
8712 10117
8713 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msub", VSX_BUILTIN_VEC_MSUB }, 10118 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_msub", VSX_BUILTIN_VEC_MSUB },
8714 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmadd", VSX_BUILTIN_VEC_NMADD }, 10119 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_nmadd", VSX_BUILTIN_VEC_NMADD },
8715 10120
8716 { MASK_VSX, CODE_FOR_vector_select_v2di, "__builtin_vsx_xxsel_2di", VSX_BUILTIN_XXSEL_2DI }, 10121 { MASK_VSX, CODE_FOR_vector_select_v2di, "__builtin_vsx_xxsel_2di", VSX_BUILTIN_XXSEL_2DI },
8751 { MASK_VSX, CODE_FOR_vsx_xxsldwi_v4si, "__builtin_vsx_xxsldwi_4si", VSX_BUILTIN_XXSLDWI_4SI }, 10156 { MASK_VSX, CODE_FOR_vsx_xxsldwi_v4si, "__builtin_vsx_xxsldwi_4si", VSX_BUILTIN_XXSLDWI_4SI },
8752 { MASK_VSX, CODE_FOR_vsx_xxsldwi_v8hi, "__builtin_vsx_xxsldwi_8hi", VSX_BUILTIN_XXSLDWI_8HI }, 10157 { MASK_VSX, CODE_FOR_vsx_xxsldwi_v8hi, "__builtin_vsx_xxsldwi_8hi", VSX_BUILTIN_XXSLDWI_8HI },
8753 { MASK_VSX, CODE_FOR_vsx_xxsldwi_v16qi, "__builtin_vsx_xxsldwi_16qi", VSX_BUILTIN_XXSLDWI_16QI }, 10158 { MASK_VSX, CODE_FOR_vsx_xxsldwi_v16qi, "__builtin_vsx_xxsldwi_16qi", VSX_BUILTIN_XXSLDWI_16QI },
8754 { MASK_VSX, CODE_FOR_nothing, "__builtin_vsx_xxsldwi", VSX_BUILTIN_VEC_XXSLDWI }, 10159 { MASK_VSX, CODE_FOR_nothing, "__builtin_vsx_xxsldwi", VSX_BUILTIN_VEC_XXSLDWI },
8755 10160
8756 { 0, CODE_FOR_paired_msub, "__builtin_paired_msub", PAIRED_BUILTIN_MSUB }, 10161 { 0, CODE_FOR_fmsv2sf4, "__builtin_paired_msub", PAIRED_BUILTIN_MSUB },
8757 { 0, CODE_FOR_paired_madd, "__builtin_paired_madd", PAIRED_BUILTIN_MADD }, 10162 { 0, CODE_FOR_fmav2sf4, "__builtin_paired_madd", PAIRED_BUILTIN_MADD },
8758 { 0, CODE_FOR_paired_madds0, "__builtin_paired_madds0", PAIRED_BUILTIN_MADDS0 }, 10163 { 0, CODE_FOR_paired_madds0, "__builtin_paired_madds0", PAIRED_BUILTIN_MADDS0 },
8759 { 0, CODE_FOR_paired_madds1, "__builtin_paired_madds1", PAIRED_BUILTIN_MADDS1 }, 10164 { 0, CODE_FOR_paired_madds1, "__builtin_paired_madds1", PAIRED_BUILTIN_MADDS1 },
8760 { 0, CODE_FOR_paired_nmsub, "__builtin_paired_nmsub", PAIRED_BUILTIN_NMSUB }, 10165 { 0, CODE_FOR_nfmsv2sf4, "__builtin_paired_nmsub", PAIRED_BUILTIN_NMSUB },
8761 { 0, CODE_FOR_paired_nmadd, "__builtin_paired_nmadd", PAIRED_BUILTIN_NMADD }, 10166 { 0, CODE_FOR_nfmav2sf4, "__builtin_paired_nmadd", PAIRED_BUILTIN_NMADD },
8762 { 0, CODE_FOR_paired_sum0, "__builtin_paired_sum0", PAIRED_BUILTIN_SUM0 }, 10167 { 0, CODE_FOR_paired_sum0, "__builtin_paired_sum0", PAIRED_BUILTIN_SUM0 },
8763 { 0, CODE_FOR_paired_sum1, "__builtin_paired_sum1", PAIRED_BUILTIN_SUM1 }, 10168 { 0, CODE_FOR_paired_sum1, "__builtin_paired_sum1", PAIRED_BUILTIN_SUM1 },
8764 { 0, CODE_FOR_selv2sf4, "__builtin_paired_selv2sf4", PAIRED_BUILTIN_SELV2SF4 }, 10169 { 0, CODE_FOR_selv2sf4, "__builtin_paired_selv2sf4", PAIRED_BUILTIN_SELV2SF4 },
8765 }; 10170 };
8766 10171
8860 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS }, 10265 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswss, "__builtin_altivec_vpkswss", ALTIVEC_BUILTIN_VPKSWSS },
8861 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS }, 10266 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuhus, "__builtin_altivec_vpkuhus", ALTIVEC_BUILTIN_VPKUHUS },
8862 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS }, 10267 { MASK_ALTIVEC, CODE_FOR_altivec_vpkshus, "__builtin_altivec_vpkshus", ALTIVEC_BUILTIN_VPKSHUS },
8863 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS }, 10268 { MASK_ALTIVEC, CODE_FOR_altivec_vpkuwus, "__builtin_altivec_vpkuwus", ALTIVEC_BUILTIN_VPKUWUS },
8864 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS }, 10269 { MASK_ALTIVEC, CODE_FOR_altivec_vpkswus, "__builtin_altivec_vpkswus", ALTIVEC_BUILTIN_VPKSWUS },
10270 { MASK_ALTIVEC, CODE_FOR_recipv4sf3, "__builtin_altivec_vrecipdivfp", ALTIVEC_BUILTIN_VRECIPFP },
8865 { MASK_ALTIVEC, CODE_FOR_vrotlv16qi3, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB }, 10271 { MASK_ALTIVEC, CODE_FOR_vrotlv16qi3, "__builtin_altivec_vrlb", ALTIVEC_BUILTIN_VRLB },
8866 { MASK_ALTIVEC, CODE_FOR_vrotlv8hi3, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH }, 10272 { MASK_ALTIVEC, CODE_FOR_vrotlv8hi3, "__builtin_altivec_vrlh", ALTIVEC_BUILTIN_VRLH },
8867 { MASK_ALTIVEC, CODE_FOR_vrotlv4si3, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW }, 10273 { MASK_ALTIVEC, CODE_FOR_vrotlv4si3, "__builtin_altivec_vrlw", ALTIVEC_BUILTIN_VRLW },
8868 { MASK_ALTIVEC, CODE_FOR_vashlv16qi3, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB }, 10274 { MASK_ALTIVEC, CODE_FOR_vashlv16qi3, "__builtin_altivec_vslb", ALTIVEC_BUILTIN_VSLB },
8869 { MASK_ALTIVEC, CODE_FOR_vashlv8hi3, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH }, 10275 { MASK_ALTIVEC, CODE_FOR_vashlv8hi3, "__builtin_altivec_vslh", ALTIVEC_BUILTIN_VSLH },
8902 10308
8903 { MASK_VSX, CODE_FOR_addv2df3, "__builtin_vsx_xvadddp", VSX_BUILTIN_XVADDDP }, 10309 { MASK_VSX, CODE_FOR_addv2df3, "__builtin_vsx_xvadddp", VSX_BUILTIN_XVADDDP },
8904 { MASK_VSX, CODE_FOR_subv2df3, "__builtin_vsx_xvsubdp", VSX_BUILTIN_XVSUBDP }, 10310 { MASK_VSX, CODE_FOR_subv2df3, "__builtin_vsx_xvsubdp", VSX_BUILTIN_XVSUBDP },
8905 { MASK_VSX, CODE_FOR_mulv2df3, "__builtin_vsx_xvmuldp", VSX_BUILTIN_XVMULDP }, 10311 { MASK_VSX, CODE_FOR_mulv2df3, "__builtin_vsx_xvmuldp", VSX_BUILTIN_XVMULDP },
8906 { MASK_VSX, CODE_FOR_divv2df3, "__builtin_vsx_xvdivdp", VSX_BUILTIN_XVDIVDP }, 10312 { MASK_VSX, CODE_FOR_divv2df3, "__builtin_vsx_xvdivdp", VSX_BUILTIN_XVDIVDP },
10313 { MASK_VSX, CODE_FOR_recipv2df3, "__builtin_vsx_xvrecipdivdp", VSX_BUILTIN_RECIP_V2DF },
8907 { MASK_VSX, CODE_FOR_sminv2df3, "__builtin_vsx_xvmindp", VSX_BUILTIN_XVMINDP }, 10314 { MASK_VSX, CODE_FOR_sminv2df3, "__builtin_vsx_xvmindp", VSX_BUILTIN_XVMINDP },
8908 { MASK_VSX, CODE_FOR_smaxv2df3, "__builtin_vsx_xvmaxdp", VSX_BUILTIN_XVMAXDP }, 10315 { MASK_VSX, CODE_FOR_smaxv2df3, "__builtin_vsx_xvmaxdp", VSX_BUILTIN_XVMAXDP },
8909 { MASK_VSX, CODE_FOR_vsx_tdivv2df3_fe, "__builtin_vsx_xvtdivdp_fe", VSX_BUILTIN_XVTDIVDP_FE }, 10316 { MASK_VSX, CODE_FOR_vsx_tdivv2df3_fe, "__builtin_vsx_xvtdivdp_fe", VSX_BUILTIN_XVTDIVDP_FE },
8910 { MASK_VSX, CODE_FOR_vsx_tdivv2df3_fg, "__builtin_vsx_xvtdivdp_fg", VSX_BUILTIN_XVTDIVDP_FG }, 10317 { MASK_VSX, CODE_FOR_vsx_tdivv2df3_fg, "__builtin_vsx_xvtdivdp_fg", VSX_BUILTIN_XVTDIVDP_FG },
8911 { MASK_VSX, CODE_FOR_vector_eqv2df, "__builtin_vsx_xvcmpeqdp", VSX_BUILTIN_XVCMPEQDP }, 10318 { MASK_VSX, CODE_FOR_vector_eqv2df, "__builtin_vsx_xvcmpeqdp", VSX_BUILTIN_XVCMPEQDP },
8914 10321
8915 { MASK_VSX, CODE_FOR_addv4sf3, "__builtin_vsx_xvaddsp", VSX_BUILTIN_XVADDSP }, 10322 { MASK_VSX, CODE_FOR_addv4sf3, "__builtin_vsx_xvaddsp", VSX_BUILTIN_XVADDSP },
8916 { MASK_VSX, CODE_FOR_subv4sf3, "__builtin_vsx_xvsubsp", VSX_BUILTIN_XVSUBSP }, 10323 { MASK_VSX, CODE_FOR_subv4sf3, "__builtin_vsx_xvsubsp", VSX_BUILTIN_XVSUBSP },
8917 { MASK_VSX, CODE_FOR_mulv4sf3, "__builtin_vsx_xvmulsp", VSX_BUILTIN_XVMULSP }, 10324 { MASK_VSX, CODE_FOR_mulv4sf3, "__builtin_vsx_xvmulsp", VSX_BUILTIN_XVMULSP },
8918 { MASK_VSX, CODE_FOR_divv4sf3, "__builtin_vsx_xvdivsp", VSX_BUILTIN_XVDIVSP }, 10325 { MASK_VSX, CODE_FOR_divv4sf3, "__builtin_vsx_xvdivsp", VSX_BUILTIN_XVDIVSP },
10326 { MASK_VSX, CODE_FOR_recipv4sf3, "__builtin_vsx_xvrecipdivsp", VSX_BUILTIN_RECIP_V4SF },
8919 { MASK_VSX, CODE_FOR_sminv4sf3, "__builtin_vsx_xvminsp", VSX_BUILTIN_XVMINSP }, 10327 { MASK_VSX, CODE_FOR_sminv4sf3, "__builtin_vsx_xvminsp", VSX_BUILTIN_XVMINSP },
8920 { MASK_VSX, CODE_FOR_smaxv4sf3, "__builtin_vsx_xvmaxsp", VSX_BUILTIN_XVMAXSP }, 10328 { MASK_VSX, CODE_FOR_smaxv4sf3, "__builtin_vsx_xvmaxsp", VSX_BUILTIN_XVMAXSP },
8921 { MASK_VSX, CODE_FOR_vsx_tdivv4sf3_fe, "__builtin_vsx_xvtdivsp_fe", VSX_BUILTIN_XVTDIVSP_FE }, 10329 { MASK_VSX, CODE_FOR_vsx_tdivv4sf3_fe, "__builtin_vsx_xvtdivsp_fe", VSX_BUILTIN_XVTDIVSP_FE },
8922 { MASK_VSX, CODE_FOR_vsx_tdivv4sf3_fg, "__builtin_vsx_xvtdivsp_fg", VSX_BUILTIN_XVTDIVSP_FG }, 10330 { MASK_VSX, CODE_FOR_vsx_tdivv4sf3_fg, "__builtin_vsx_xvtdivsp_fg", VSX_BUILTIN_XVTDIVSP_FG },
8923 { MASK_VSX, CODE_FOR_vector_eqv4sf, "__builtin_vsx_xvcmpeqsp", VSX_BUILTIN_XVCMPEQSP }, 10331 { MASK_VSX, CODE_FOR_vector_eqv4sf, "__builtin_vsx_xvcmpeqsp", VSX_BUILTIN_XVCMPEQSP },
9030 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS }, 10438 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshss", ALTIVEC_BUILTIN_VEC_VPKSHSS },
9031 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS }, 10439 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkuhus", ALTIVEC_BUILTIN_VEC_VPKUHUS },
9032 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU }, 10440 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_packsu", ALTIVEC_BUILTIN_VEC_PACKSU },
9033 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS }, 10441 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkswus", ALTIVEC_BUILTIN_VEC_VPKSWUS },
9034 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS }, 10442 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vpkshus", ALTIVEC_BUILTIN_VEC_VPKSHUS },
10443 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_recipdiv", ALTIVEC_BUILTIN_VEC_RECIP },
9035 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL }, 10444 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rl", ALTIVEC_BUILTIN_VEC_RL },
9036 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW }, 10445 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlw", ALTIVEC_BUILTIN_VEC_VRLW },
9037 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH }, 10446 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlh", ALTIVEC_BUILTIN_VEC_VRLH },
9038 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB }, 10447 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vrlb", ALTIVEC_BUILTIN_VEC_VRLB },
9039 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL }, 10448 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_sl", ALTIVEC_BUILTIN_VEC_SL },
9359 10768
9360 static struct builtin_description bdesc_1arg[] = 10769 static struct builtin_description bdesc_1arg[] =
9361 { 10770 {
9362 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP }, 10771 { MASK_ALTIVEC, CODE_FOR_altivec_vexptefp, "__builtin_altivec_vexptefp", ALTIVEC_BUILTIN_VEXPTEFP },
9363 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP }, 10772 { MASK_ALTIVEC, CODE_FOR_altivec_vlogefp, "__builtin_altivec_vlogefp", ALTIVEC_BUILTIN_VLOGEFP },
9364 { MASK_ALTIVEC, CODE_FOR_altivec_vrefp, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP }, 10773 { MASK_ALTIVEC, CODE_FOR_rev4sf2, "__builtin_altivec_vrefp", ALTIVEC_BUILTIN_VREFP },
9365 { MASK_ALTIVEC, CODE_FOR_vector_floorv4sf2, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM }, 10774 { MASK_ALTIVEC, CODE_FOR_vector_floorv4sf2, "__builtin_altivec_vrfim", ALTIVEC_BUILTIN_VRFIM },
9366 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN }, 10775 { MASK_ALTIVEC, CODE_FOR_altivec_vrfin, "__builtin_altivec_vrfin", ALTIVEC_BUILTIN_VRFIN },
9367 { MASK_ALTIVEC, CODE_FOR_vector_ceilv4sf2, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP }, 10776 { MASK_ALTIVEC, CODE_FOR_vector_ceilv4sf2, "__builtin_altivec_vrfip", ALTIVEC_BUILTIN_VRFIP },
9368 { MASK_ALTIVEC, CODE_FOR_vector_btruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ }, 10777 { MASK_ALTIVEC, CODE_FOR_vector_btruncv4sf2, "__builtin_altivec_vrfiz", ALTIVEC_BUILTIN_VRFIZ },
9369 { MASK_ALTIVEC, CODE_FOR_altivec_vrsqrtefp, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP }, 10778 { MASK_ALTIVEC, CODE_FOR_rsqrtv4sf2, "__builtin_altivec_vrsqrtfp", ALTIVEC_BUILTIN_VRSQRTFP },
10779 { MASK_ALTIVEC, CODE_FOR_rsqrtev4sf2, "__builtin_altivec_vrsqrtefp", ALTIVEC_BUILTIN_VRSQRTEFP },
9370 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB }, 10780 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisb, "__builtin_altivec_vspltisb", ALTIVEC_BUILTIN_VSPLTISB },
9371 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH }, 10781 { MASK_ALTIVEC, CODE_FOR_altivec_vspltish, "__builtin_altivec_vspltish", ALTIVEC_BUILTIN_VSPLTISH },
9372 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW }, 10782 { MASK_ALTIVEC, CODE_FOR_altivec_vspltisw, "__builtin_altivec_vspltisw", ALTIVEC_BUILTIN_VSPLTISW },
9373 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB }, 10783 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhsb, "__builtin_altivec_vupkhsb", ALTIVEC_BUILTIN_VUPKHSB },
9374 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX }, 10784 { MASK_ALTIVEC, CODE_FOR_altivec_vupkhpx, "__builtin_altivec_vupkhpx", ALTIVEC_BUILTIN_VUPKHPX },
9377 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX }, 10787 { MASK_ALTIVEC, CODE_FOR_altivec_vupklpx, "__builtin_altivec_vupklpx", ALTIVEC_BUILTIN_VUPKLPX },
9378 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH }, 10788 { MASK_ALTIVEC, CODE_FOR_altivec_vupklsh, "__builtin_altivec_vupklsh", ALTIVEC_BUILTIN_VUPKLSH },
9379 10789
9380 { MASK_VSX, CODE_FOR_negv2df2, "__builtin_vsx_xvnegdp", VSX_BUILTIN_XVNEGDP }, 10790 { MASK_VSX, CODE_FOR_negv2df2, "__builtin_vsx_xvnegdp", VSX_BUILTIN_XVNEGDP },
9381 { MASK_VSX, CODE_FOR_sqrtv2df2, "__builtin_vsx_xvsqrtdp", VSX_BUILTIN_XVSQRTDP }, 10791 { MASK_VSX, CODE_FOR_sqrtv2df2, "__builtin_vsx_xvsqrtdp", VSX_BUILTIN_XVSQRTDP },
9382 { MASK_VSX, CODE_FOR_vsx_rsqrtev2df2, "__builtin_vsx_xvrsqrtedp", VSX_BUILTIN_XVRSQRTEDP }, 10792 { MASK_VSX, CODE_FOR_rsqrtv2df2, "__builtin_vsx_xvrsqrtdp", VSX_BUILTIN_VEC_RSQRT_V2DF },
10793 { MASK_VSX, CODE_FOR_rsqrtev2df2, "__builtin_vsx_xvrsqrtedp", VSX_BUILTIN_XVRSQRTEDP },
9383 { MASK_VSX, CODE_FOR_vsx_tsqrtv2df2_fe, "__builtin_vsx_xvtsqrtdp_fe", VSX_BUILTIN_XVTSQRTDP_FE }, 10794 { MASK_VSX, CODE_FOR_vsx_tsqrtv2df2_fe, "__builtin_vsx_xvtsqrtdp_fe", VSX_BUILTIN_XVTSQRTDP_FE },
9384 { MASK_VSX, CODE_FOR_vsx_tsqrtv2df2_fg, "__builtin_vsx_xvtsqrtdp_fg", VSX_BUILTIN_XVTSQRTDP_FG }, 10795 { MASK_VSX, CODE_FOR_vsx_tsqrtv2df2_fg, "__builtin_vsx_xvtsqrtdp_fg", VSX_BUILTIN_XVTSQRTDP_FG },
9385 { MASK_VSX, CODE_FOR_vsx_frev2df2, "__builtin_vsx_xvredp", VSX_BUILTIN_XVREDP }, 10796 { MASK_VSX, CODE_FOR_vsx_frev2df2, "__builtin_vsx_xvredp", VSX_BUILTIN_XVREDP },
9386 10797
9387 { MASK_VSX, CODE_FOR_negv4sf2, "__builtin_vsx_xvnegsp", VSX_BUILTIN_XVNEGSP }, 10798 { MASK_VSX, CODE_FOR_negv4sf2, "__builtin_vsx_xvnegsp", VSX_BUILTIN_XVNEGSP },
9388 { MASK_VSX, CODE_FOR_sqrtv4sf2, "__builtin_vsx_xvsqrtsp", VSX_BUILTIN_XVSQRTSP }, 10799 { MASK_VSX, CODE_FOR_sqrtv4sf2, "__builtin_vsx_xvsqrtsp", VSX_BUILTIN_XVSQRTSP },
9389 { MASK_VSX, CODE_FOR_vsx_rsqrtev4sf2, "__builtin_vsx_xvrsqrtesp", VSX_BUILTIN_XVRSQRTESP }, 10800 { MASK_VSX, CODE_FOR_rsqrtv4sf2, "__builtin_vsx_xvrsqrtsp", VSX_BUILTIN_VEC_RSQRT_V4SF },
10801 { MASK_VSX, CODE_FOR_rsqrtev4sf2, "__builtin_vsx_xvrsqrtesp", VSX_BUILTIN_XVRSQRTESP },
9390 { MASK_VSX, CODE_FOR_vsx_tsqrtv4sf2_fe, "__builtin_vsx_xvtsqrtsp_fe", VSX_BUILTIN_XVTSQRTSP_FE }, 10802 { MASK_VSX, CODE_FOR_vsx_tsqrtv4sf2_fe, "__builtin_vsx_xvtsqrtsp_fe", VSX_BUILTIN_XVTSQRTSP_FE },
9391 { MASK_VSX, CODE_FOR_vsx_tsqrtv4sf2_fg, "__builtin_vsx_xvtsqrtsp_fg", VSX_BUILTIN_XVTSQRTSP_FG }, 10803 { MASK_VSX, CODE_FOR_vsx_tsqrtv4sf2_fg, "__builtin_vsx_xvtsqrtsp_fg", VSX_BUILTIN_XVTSQRTSP_FG },
9392 { MASK_VSX, CODE_FOR_vsx_frev4sf2, "__builtin_vsx_xvresp", VSX_BUILTIN_XVRESP }, 10804 { MASK_VSX, CODE_FOR_vsx_frev4sf2, "__builtin_vsx_xvresp", VSX_BUILTIN_XVRESP },
9393 10805
9394 { MASK_VSX, CODE_FOR_vsx_xscvdpsp, "__builtin_vsx_xscvdpsp", VSX_BUILTIN_XSCVDPSP }, 10806 { MASK_VSX, CODE_FOR_vsx_xscvdpsp, "__builtin_vsx_xscvdpsp", VSX_BUILTIN_XSCVDPSP },
9443 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR }, 10855 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_floor", ALTIVEC_BUILTIN_VEC_FLOOR },
9444 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE }, 10856 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_loge", ALTIVEC_BUILTIN_VEC_LOGE },
9445 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR }, 10857 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_mtvscr", ALTIVEC_BUILTIN_VEC_MTVSCR },
9446 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE }, 10858 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_re", ALTIVEC_BUILTIN_VEC_RE },
9447 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND }, 10859 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_round", ALTIVEC_BUILTIN_VEC_ROUND },
10860 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrt", ALTIVEC_BUILTIN_VEC_RSQRT },
9448 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE }, 10861 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_rsqrte", ALTIVEC_BUILTIN_VEC_RSQRTE },
9449 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC }, 10862 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_trunc", ALTIVEC_BUILTIN_VEC_TRUNC },
9450 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH }, 10863 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_unpackh", ALTIVEC_BUILTIN_VEC_UNPACKH },
9451 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH }, 10864 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhsh", ALTIVEC_BUILTIN_VEC_VUPKHSH },
9452 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX }, 10865 { MASK_ALTIVEC, CODE_FOR_nothing, "__builtin_vec_vupkhpx", ALTIVEC_BUILTIN_VEC_VUPKHPX },
9910 rtx op0 = expand_normal (arg0); 11323 rtx op0 = expand_normal (arg0);
9911 rtx op1 = expand_normal (arg1); 11324 rtx op1 = expand_normal (arg1);
9912 rtx op2 = expand_normal (arg2); 11325 rtx op2 = expand_normal (arg2);
9913 rtx pat, addr; 11326 rtx pat, addr;
9914 enum machine_mode tmode = insn_data[icode].operand[0].mode; 11327 enum machine_mode tmode = insn_data[icode].operand[0].mode;
11328 enum machine_mode smode = insn_data[icode].operand[1].mode;
9915 enum machine_mode mode1 = Pmode; 11329 enum machine_mode mode1 = Pmode;
9916 enum machine_mode mode2 = Pmode; 11330 enum machine_mode mode2 = Pmode;
9917 11331
9918 /* Invalid arguments. Bail before doing anything stoopid! */ 11332 /* Invalid arguments. Bail before doing anything stoopid! */
9919 if (arg0 == error_mark_node 11333 if (arg0 == error_mark_node
9920 || arg1 == error_mark_node 11334 || arg1 == error_mark_node
9921 || arg2 == error_mark_node) 11335 || arg2 == error_mark_node)
9922 return const0_rtx; 11336 return const0_rtx;
9923 11337
9924 if (! (*insn_data[icode].operand[1].predicate) (op0, tmode)) 11338 if (! (*insn_data[icode].operand[1].predicate) (op0, smode))
9925 op0 = copy_to_mode_reg (tmode, op0); 11339 op0 = copy_to_mode_reg (smode, op0);
9926 11340
9927 op2 = copy_to_mode_reg (mode2, op2); 11341 op2 = copy_to_mode_reg (mode2, op2);
9928 11342
9929 if (op1 == const0_rtx) 11343 if (op1 == const0_rtx)
9930 { 11344 {
9965 if (arg0 == error_mark_node 11379 if (arg0 == error_mark_node
9966 || arg1 == error_mark_node 11380 || arg1 == error_mark_node
9967 || arg2 == error_mark_node) 11381 || arg2 == error_mark_node)
9968 return const0_rtx; 11382 return const0_rtx;
9969 11383
9970 switch (icode) 11384 /* Check and prepare argument depending on the instruction code.
9971 { 11385
9972 case CODE_FOR_altivec_vsldoi_v4sf: 11386 Note that a switch statement instead of the sequence of tests
9973 case CODE_FOR_altivec_vsldoi_v4si: 11387 would be incorrect as many of the CODE_FOR values could be
9974 case CODE_FOR_altivec_vsldoi_v8hi: 11388 CODE_FOR_nothing and that would yield multiple alternatives
9975 case CODE_FOR_altivec_vsldoi_v16qi: 11389 with identical values. We'd never reach here at runtime in
11390 this case. */
11391 if (icode == CODE_FOR_altivec_vsldoi_v4sf
11392 || icode == CODE_FOR_altivec_vsldoi_v4si
11393 || icode == CODE_FOR_altivec_vsldoi_v8hi
11394 || icode == CODE_FOR_altivec_vsldoi_v16qi)
11395 {
9976 /* Only allow 4-bit unsigned literals. */ 11396 /* Only allow 4-bit unsigned literals. */
9977 STRIP_NOPS (arg2); 11397 STRIP_NOPS (arg2);
9978 if (TREE_CODE (arg2) != INTEGER_CST 11398 if (TREE_CODE (arg2) != INTEGER_CST
9979 || TREE_INT_CST_LOW (arg2) & ~0xf) 11399 || TREE_INT_CST_LOW (arg2) & ~0xf)
9980 { 11400 {
9981 error ("argument 3 must be a 4-bit unsigned literal"); 11401 error ("argument 3 must be a 4-bit unsigned literal");
9982 return const0_rtx; 11402 return const0_rtx;
9983 } 11403 }
9984 break; 11404 }
9985 11405 else if (icode == CODE_FOR_vsx_xxpermdi_v2df
9986 case CODE_FOR_vsx_xxpermdi_v2df: 11406 || icode == CODE_FOR_vsx_xxpermdi_v2di
9987 case CODE_FOR_vsx_xxpermdi_v2di: 11407 || icode == CODE_FOR_vsx_xxsldwi_v16qi
9988 case CODE_FOR_vsx_xxsldwi_v16qi: 11408 || icode == CODE_FOR_vsx_xxsldwi_v8hi
9989 case CODE_FOR_vsx_xxsldwi_v8hi: 11409 || icode == CODE_FOR_vsx_xxsldwi_v4si
9990 case CODE_FOR_vsx_xxsldwi_v4si: 11410 || icode == CODE_FOR_vsx_xxsldwi_v4sf
9991 case CODE_FOR_vsx_xxsldwi_v4sf: 11411 || icode == CODE_FOR_vsx_xxsldwi_v2di
9992 case CODE_FOR_vsx_xxsldwi_v2di: 11412 || icode == CODE_FOR_vsx_xxsldwi_v2df)
9993 case CODE_FOR_vsx_xxsldwi_v2df: 11413 {
9994 /* Only allow 2-bit unsigned literals. */ 11414 /* Only allow 2-bit unsigned literals. */
9995 STRIP_NOPS (arg2); 11415 STRIP_NOPS (arg2);
9996 if (TREE_CODE (arg2) != INTEGER_CST 11416 if (TREE_CODE (arg2) != INTEGER_CST
9997 || TREE_INT_CST_LOW (arg2) & ~0x3) 11417 || TREE_INT_CST_LOW (arg2) & ~0x3)
9998 { 11418 {
9999 error ("argument 3 must be a 2-bit unsigned literal"); 11419 error ("argument 3 must be a 2-bit unsigned literal");
10000 return const0_rtx; 11420 return const0_rtx;
10001 } 11421 }
10002 break; 11422 }
10003 11423 else if (icode == CODE_FOR_vsx_set_v2df
10004 case CODE_FOR_vsx_set_v2df: 11424 || icode == CODE_FOR_vsx_set_v2di)
10005 case CODE_FOR_vsx_set_v2di: 11425 {
10006 /* Only allow 1-bit unsigned literals. */ 11426 /* Only allow 1-bit unsigned literals. */
10007 STRIP_NOPS (arg2); 11427 STRIP_NOPS (arg2);
10008 if (TREE_CODE (arg2) != INTEGER_CST 11428 if (TREE_CODE (arg2) != INTEGER_CST
10009 || TREE_INT_CST_LOW (arg2) & ~0x1) 11429 || TREE_INT_CST_LOW (arg2) & ~0x1)
10010 { 11430 {
10011 error ("argument 3 must be a 1-bit unsigned literal"); 11431 error ("argument 3 must be a 1-bit unsigned literal");
10012 return const0_rtx; 11432 return const0_rtx;
10013 } 11433 }
10014 break;
10015
10016 default:
10017 break;
10018 } 11434 }
10019 11435
10020 if (target == 0 11436 if (target == 0
10021 || GET_MODE (target) != tmode 11437 || GET_MODE (target) != tmode
10022 || ! (*insn_data[icode].operand[0].predicate) (target, tmode)) 11438 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
10052 enum insn_code icode; 11468 enum insn_code icode;
10053 11469
10054 switch (fcode) 11470 switch (fcode)
10055 { 11471 {
10056 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi: 11472 case ALTIVEC_BUILTIN_LD_INTERNAL_16qi:
10057 icode = CODE_FOR_vector_load_v16qi; 11473 icode = CODE_FOR_vector_altivec_load_v16qi;
10058 break; 11474 break;
10059 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi: 11475 case ALTIVEC_BUILTIN_LD_INTERNAL_8hi:
10060 icode = CODE_FOR_vector_load_v8hi; 11476 icode = CODE_FOR_vector_altivec_load_v8hi;
10061 break; 11477 break;
10062 case ALTIVEC_BUILTIN_LD_INTERNAL_4si: 11478 case ALTIVEC_BUILTIN_LD_INTERNAL_4si:
10063 icode = CODE_FOR_vector_load_v4si; 11479 icode = CODE_FOR_vector_altivec_load_v4si;
10064 break; 11480 break;
10065 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf: 11481 case ALTIVEC_BUILTIN_LD_INTERNAL_4sf:
10066 icode = CODE_FOR_vector_load_v4sf; 11482 icode = CODE_FOR_vector_altivec_load_v4sf;
11483 break;
11484 case ALTIVEC_BUILTIN_LD_INTERNAL_2df:
11485 icode = CODE_FOR_vector_altivec_load_v2df;
11486 break;
11487 case ALTIVEC_BUILTIN_LD_INTERNAL_2di:
11488 icode = CODE_FOR_vector_altivec_load_v2di;
10067 break; 11489 break;
10068 default: 11490 default:
10069 *expandedp = false; 11491 *expandedp = false;
10070 return NULL_RTX; 11492 return NULL_RTX;
10071 } 11493 }
10105 enum insn_code icode; 11527 enum insn_code icode;
10106 11528
10107 switch (fcode) 11529 switch (fcode)
10108 { 11530 {
10109 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi: 11531 case ALTIVEC_BUILTIN_ST_INTERNAL_16qi:
10110 icode = CODE_FOR_vector_store_v16qi; 11532 icode = CODE_FOR_vector_altivec_store_v16qi;
10111 break; 11533 break;
10112 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi: 11534 case ALTIVEC_BUILTIN_ST_INTERNAL_8hi:
10113 icode = CODE_FOR_vector_store_v8hi; 11535 icode = CODE_FOR_vector_altivec_store_v8hi;
10114 break; 11536 break;
10115 case ALTIVEC_BUILTIN_ST_INTERNAL_4si: 11537 case ALTIVEC_BUILTIN_ST_INTERNAL_4si:
10116 icode = CODE_FOR_vector_store_v4si; 11538 icode = CODE_FOR_vector_altivec_store_v4si;
10117 break; 11539 break;
10118 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf: 11540 case ALTIVEC_BUILTIN_ST_INTERNAL_4sf:
10119 icode = CODE_FOR_vector_store_v4sf; 11541 icode = CODE_FOR_vector_altivec_store_v4sf;
11542 break;
11543 case ALTIVEC_BUILTIN_ST_INTERNAL_2df:
11544 icode = CODE_FOR_vector_altivec_store_v2df;
11545 break;
11546 case ALTIVEC_BUILTIN_ST_INTERNAL_2di:
11547 icode = CODE_FOR_vector_altivec_store_v2di;
10120 break; 11548 break;
10121 default: 11549 default:
10122 *expandedp = false; 11550 *expandedp = false;
10123 return NULL_RTX; 11551 return NULL_RTX;
10124 } 11552 }
10346 *expandedp = true; 11774 *expandedp = true;
10347 11775
10348 switch (fcode) 11776 switch (fcode)
10349 { 11777 {
10350 case ALTIVEC_BUILTIN_STVX: 11778 case ALTIVEC_BUILTIN_STVX:
10351 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx, exp); 11779 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v4si, exp);
10352 case ALTIVEC_BUILTIN_STVEBX: 11780 case ALTIVEC_BUILTIN_STVEBX:
10353 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp); 11781 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
10354 case ALTIVEC_BUILTIN_STVEHX: 11782 case ALTIVEC_BUILTIN_STVEHX:
10355 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, exp); 11783 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvehx, exp);
10356 case ALTIVEC_BUILTIN_STVEWX: 11784 case ALTIVEC_BUILTIN_STVEWX:
10364 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvlxl, exp); 11792 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvlxl, exp);
10365 case ALTIVEC_BUILTIN_STVRX: 11793 case ALTIVEC_BUILTIN_STVRX:
10366 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvrx, exp); 11794 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvrx, exp);
10367 case ALTIVEC_BUILTIN_STVRXL: 11795 case ALTIVEC_BUILTIN_STVRXL:
10368 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvrxl, exp); 11796 return altivec_expand_stv_builtin (CODE_FOR_altivec_stvrxl, exp);
11797
11798 case VSX_BUILTIN_STXVD2X_V2DF:
11799 return altivec_expand_stv_builtin (CODE_FOR_vsx_store_v2df, exp);
11800 case VSX_BUILTIN_STXVD2X_V2DI:
11801 return altivec_expand_stv_builtin (CODE_FOR_vsx_store_v2di, exp);
11802 case VSX_BUILTIN_STXVW4X_V4SF:
11803 return altivec_expand_stv_builtin (CODE_FOR_vsx_store_v4sf, exp);
11804 case VSX_BUILTIN_STXVW4X_V4SI:
11805 return altivec_expand_stv_builtin (CODE_FOR_vsx_store_v4si, exp);
11806 case VSX_BUILTIN_STXVW4X_V8HI:
11807 return altivec_expand_stv_builtin (CODE_FOR_vsx_store_v8hi, exp);
11808 case VSX_BUILTIN_STXVW4X_V16QI:
11809 return altivec_expand_stv_builtin (CODE_FOR_vsx_store_v16qi, exp);
10369 11810
10370 case ALTIVEC_BUILTIN_MFVSCR: 11811 case ALTIVEC_BUILTIN_MFVSCR:
10371 icode = CODE_FOR_altivec_mfvscr; 11812 icode = CODE_FOR_altivec_mfvscr;
10372 tmode = insn_data[icode].operand[0].mode; 11813 tmode = insn_data[icode].operand[0].mode;
10373 11814
10489 exp, target, false); 11930 exp, target, false);
10490 case ALTIVEC_BUILTIN_LVXL: 11931 case ALTIVEC_BUILTIN_LVXL:
10491 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl, 11932 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl,
10492 exp, target, false); 11933 exp, target, false);
10493 case ALTIVEC_BUILTIN_LVX: 11934 case ALTIVEC_BUILTIN_LVX:
10494 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx, 11935 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v4si,
10495 exp, target, false); 11936 exp, target, false);
10496 case ALTIVEC_BUILTIN_LVLX: 11937 case ALTIVEC_BUILTIN_LVLX:
10497 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvlx, 11938 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvlx,
10498 exp, target, true); 11939 exp, target, true);
10499 case ALTIVEC_BUILTIN_LVLXL: 11940 case ALTIVEC_BUILTIN_LVLXL:
10503 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvrx, 11944 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvrx,
10504 exp, target, true); 11945 exp, target, true);
10505 case ALTIVEC_BUILTIN_LVRXL: 11946 case ALTIVEC_BUILTIN_LVRXL:
10506 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvrxl, 11947 return altivec_expand_lv_builtin (CODE_FOR_altivec_lvrxl,
10507 exp, target, true); 11948 exp, target, true);
11949 case VSX_BUILTIN_LXVD2X_V2DF:
11950 return altivec_expand_lv_builtin (CODE_FOR_vsx_load_v2df,
11951 exp, target, false);
11952 case VSX_BUILTIN_LXVD2X_V2DI:
11953 return altivec_expand_lv_builtin (CODE_FOR_vsx_load_v2di,
11954 exp, target, false);
11955 case VSX_BUILTIN_LXVW4X_V4SF:
11956 return altivec_expand_lv_builtin (CODE_FOR_vsx_load_v4sf,
11957 exp, target, false);
11958 case VSX_BUILTIN_LXVW4X_V4SI:
11959 return altivec_expand_lv_builtin (CODE_FOR_vsx_load_v4si,
11960 exp, target, false);
11961 case VSX_BUILTIN_LXVW4X_V8HI:
11962 return altivec_expand_lv_builtin (CODE_FOR_vsx_load_v8hi,
11963 exp, target, false);
11964 case VSX_BUILTIN_LXVW4X_V16QI:
11965 return altivec_expand_lv_builtin (CODE_FOR_vsx_load_v16qi,
11966 exp, target, false);
11967 break;
10508 default: 11968 default:
10509 break; 11969 break;
10510 /* Fall through. */ 11970 /* Fall through. */
10511 } 11971 }
10512 11972
10958 const struct builtin_description *d; 12418 const struct builtin_description *d;
10959 size_t i; 12419 size_t i;
10960 rtx ret; 12420 rtx ret;
10961 bool success; 12421 bool success;
10962 12422
10963 if (fcode == RS6000_BUILTIN_RECIP) 12423 switch (fcode)
12424 {
12425 case RS6000_BUILTIN_RECIP:
10964 return rs6000_expand_binop_builtin (CODE_FOR_recipdf3, exp, target); 12426 return rs6000_expand_binop_builtin (CODE_FOR_recipdf3, exp, target);
10965 12427
10966 if (fcode == RS6000_BUILTIN_RECIPF) 12428 case RS6000_BUILTIN_RECIPF:
10967 return rs6000_expand_binop_builtin (CODE_FOR_recipsf3, exp, target); 12429 return rs6000_expand_binop_builtin (CODE_FOR_recipsf3, exp, target);
10968 12430
10969 if (fcode == RS6000_BUILTIN_RSQRTF) 12431 case RS6000_BUILTIN_RSQRTF:
10970 return rs6000_expand_unop_builtin (CODE_FOR_rsqrtsf2, exp, target); 12432 return rs6000_expand_unop_builtin (CODE_FOR_rsqrtsf2, exp, target);
10971 12433
10972 if (fcode == RS6000_BUILTIN_BSWAP_HI) 12434 case RS6000_BUILTIN_RSQRT:
10973 return rs6000_expand_unop_builtin (CODE_FOR_bswaphi2, exp, target); 12435 return rs6000_expand_unop_builtin (CODE_FOR_rsqrtdf2, exp, target);
10974 12436
10975 if (fcode == POWER7_BUILTIN_BPERMD) 12437 case RS6000_BUILTIN_BSWAP_HI:
10976 return rs6000_expand_binop_builtin (((TARGET_64BIT) 12438 return rs6000_expand_unop_builtin (CODE_FOR_bswaphi2, exp, target);
10977 ? CODE_FOR_bpermd_di 12439
10978 : CODE_FOR_bpermd_si), exp, target); 12440 case POWER7_BUILTIN_BPERMD:
10979 12441 return rs6000_expand_binop_builtin (((TARGET_64BIT)
10980 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_LOAD 12442 ? CODE_FOR_bpermd_di
10981 || fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE) 12443 : CODE_FOR_bpermd_si), exp, target);
10982 { 12444
10983 int icode = (int) CODE_FOR_altivec_lvsr; 12445 case ALTIVEC_BUILTIN_MASK_FOR_LOAD:
10984 enum machine_mode tmode = insn_data[icode].operand[0].mode; 12446 case ALTIVEC_BUILTIN_MASK_FOR_STORE:
10985 enum machine_mode mode = insn_data[icode].operand[1].mode; 12447 {
10986 tree arg; 12448 int icode = (int) CODE_FOR_altivec_lvsr;
10987 rtx op, addr, pat; 12449 enum machine_mode tmode = insn_data[icode].operand[0].mode;
10988 12450 enum machine_mode mode = insn_data[icode].operand[1].mode;
10989 gcc_assert (TARGET_ALTIVEC); 12451 tree arg;
10990 12452 rtx op, addr, pat;
10991 arg = CALL_EXPR_ARG (exp, 0); 12453
10992 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == POINTER_TYPE); 12454 gcc_assert (TARGET_ALTIVEC);
10993 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL); 12455
10994 addr = memory_address (mode, op); 12456 arg = CALL_EXPR_ARG (exp, 0);
10995 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE) 12457 gcc_assert (POINTER_TYPE_P (TREE_TYPE (arg)));
10996 op = addr; 12458 op = expand_expr (arg, NULL_RTX, Pmode, EXPAND_NORMAL);
10997 else 12459 addr = memory_address (mode, op);
10998 { 12460 if (fcode == ALTIVEC_BUILTIN_MASK_FOR_STORE)
10999 /* For the load case need to negate the address. */ 12461 op = addr;
11000 op = gen_reg_rtx (GET_MODE (addr)); 12462 else
11001 emit_insn (gen_rtx_SET (VOIDmode, op, 12463 {
11002 gen_rtx_NEG (GET_MODE (addr), addr))); 12464 /* For the load case need to negate the address. */
11003 } 12465 op = gen_reg_rtx (GET_MODE (addr));
11004 op = gen_rtx_MEM (mode, op); 12466 emit_insn (gen_rtx_SET (VOIDmode, op,
11005 12467 gen_rtx_NEG (GET_MODE (addr), addr)));
11006 if (target == 0 12468 }
11007 || GET_MODE (target) != tmode 12469 op = gen_rtx_MEM (mode, op);
11008 || ! (*insn_data[icode].operand[0].predicate) (target, tmode)) 12470
11009 target = gen_reg_rtx (tmode); 12471 if (target == 0
11010 12472 || GET_MODE (target) != tmode
11011 /*pat = gen_altivec_lvsr (target, op);*/ 12473 || ! (*insn_data[icode].operand[0].predicate) (target, tmode))
11012 pat = GEN_FCN (icode) (target, op); 12474 target = gen_reg_rtx (tmode);
11013 if (!pat) 12475
11014 return 0; 12476 /*pat = gen_altivec_lvsr (target, op);*/
11015 emit_insn (pat); 12477 pat = GEN_FCN (icode) (target, op);
11016 12478 if (!pat)
11017 return target; 12479 return 0;
11018 } 12480 emit_insn (pat);
11019 12481
12482 return target;
12483 }
12484
12485 case ALTIVEC_BUILTIN_VCFUX:
12486 case ALTIVEC_BUILTIN_VCFSX:
12487 case ALTIVEC_BUILTIN_VCTUXS:
12488 case ALTIVEC_BUILTIN_VCTSXS:
11020 /* FIXME: There's got to be a nicer way to handle this case than 12489 /* FIXME: There's got to be a nicer way to handle this case than
11021 constructing a new CALL_EXPR. */ 12490 constructing a new CALL_EXPR. */
11022 if (fcode == ALTIVEC_BUILTIN_VCFUX
11023 || fcode == ALTIVEC_BUILTIN_VCFSX
11024 || fcode == ALTIVEC_BUILTIN_VCTUXS
11025 || fcode == ALTIVEC_BUILTIN_VCTSXS)
11026 {
11027 if (call_expr_nargs (exp) == 1) 12491 if (call_expr_nargs (exp) == 1)
11028 exp = build_call_nary (TREE_TYPE (exp), CALL_EXPR_FN (exp), 12492 {
11029 2, CALL_EXPR_ARG (exp, 0), integer_zero_node); 12493 exp = build_call_nary (TREE_TYPE (exp), CALL_EXPR_FN (exp),
12494 2, CALL_EXPR_ARG (exp, 0), integer_zero_node);
12495 }
12496 break;
12497
12498 default:
12499 break;
11030 } 12500 }
11031 12501
11032 if (TARGET_ALTIVEC) 12502 if (TARGET_ALTIVEC)
11033 { 12503 {
11034 ret = altivec_expand_builtin (exp, target, &success); 12504 ret = altivec_expand_builtin (exp, target, &success);
11076 12546
11077 static void 12547 static void
11078 rs6000_init_builtins (void) 12548 rs6000_init_builtins (void)
11079 { 12549 {
11080 tree tdecl; 12550 tree tdecl;
12551 tree ftype;
11081 12552
11082 V2SI_type_node = build_vector_type (intSI_type_node, 2); 12553 V2SI_type_node = build_vector_type (intSI_type_node, 2);
11083 V2SF_type_node = build_vector_type (float_type_node, 2); 12554 V2SF_type_node = build_vector_type (float_type_node, 2);
11084 V2DI_type_node = build_vector_type (intDI_type_node, 2); 12555 V2DI_type_node = build_vector_type (intDI_type_node, 2);
11085 V2DF_type_node = build_vector_type (double_type_node, 2); 12556 V2DF_type_node = build_vector_type (double_type_node, 2);
11109 bool_long_type_node = build_distinct_type_copy (unsigned_intDI_type_node); 12580 bool_long_type_node = build_distinct_type_copy (unsigned_intDI_type_node);
11110 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node); 12581 pixel_type_node = build_distinct_type_copy (unsigned_intHI_type_node);
11111 12582
11112 long_integer_type_internal_node = long_integer_type_node; 12583 long_integer_type_internal_node = long_integer_type_node;
11113 long_unsigned_type_internal_node = long_unsigned_type_node; 12584 long_unsigned_type_internal_node = long_unsigned_type_node;
12585 long_long_integer_type_internal_node = long_long_integer_type_node;
12586 long_long_unsigned_type_internal_node = long_long_unsigned_type_node;
11114 intQI_type_internal_node = intQI_type_node; 12587 intQI_type_internal_node = intQI_type_node;
11115 uintQI_type_internal_node = unsigned_intQI_type_node; 12588 uintQI_type_internal_node = unsigned_intQI_type_node;
11116 intHI_type_internal_node = intHI_type_node; 12589 intHI_type_internal_node = intHI_type_node;
11117 uintHI_type_internal_node = unsigned_intHI_type_node; 12590 uintHI_type_internal_node = unsigned_intHI_type_node;
11118 intSI_type_internal_node = intSI_type_node; 12591 intSI_type_internal_node = intSI_type_node;
11119 uintSI_type_internal_node = unsigned_intSI_type_node; 12592 uintSI_type_internal_node = unsigned_intSI_type_node;
11120 intDI_type_internal_node = intDI_type_node; 12593 intDI_type_internal_node = intDI_type_node;
11121 uintDI_type_internal_node = unsigned_intDI_type_node; 12594 uintDI_type_internal_node = unsigned_intDI_type_node;
11122 float_type_internal_node = float_type_node; 12595 float_type_internal_node = float_type_node;
11123 double_type_internal_node = float_type_node; 12596 double_type_internal_node = double_type_node;
11124 void_type_internal_node = void_type_node; 12597 void_type_internal_node = void_type_node;
11125 12598
11126 /* Initialize the modes for builtin_function_type, mapping a machine mode to 12599 /* Initialize the modes for builtin_function_type, mapping a machine mode to
11127 tree type node. */ 12600 tree type node. */
11128 builtin_mode_to_type[QImode][0] = integer_type_node; 12601 builtin_mode_to_type[QImode][0] = integer_type_node;
11265 spe_init_builtins (); 12738 spe_init_builtins ();
11266 if (TARGET_ALTIVEC) 12739 if (TARGET_ALTIVEC)
11267 altivec_init_builtins (); 12740 altivec_init_builtins ();
11268 if (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT || TARGET_VSX) 12741 if (TARGET_ALTIVEC || TARGET_SPE || TARGET_PAIRED_FLOAT || TARGET_VSX)
11269 rs6000_common_init_builtins (); 12742 rs6000_common_init_builtins ();
11270 if (TARGET_PPC_GFXOPT) 12743 if (TARGET_FRE)
11271 { 12744 {
11272 tree ftype = builtin_function_type (SFmode, SFmode, SFmode, VOIDmode, 12745 ftype = builtin_function_type (DFmode, DFmode, DFmode, VOIDmode,
11273 RS6000_BUILTIN_RECIPF, 12746 RS6000_BUILTIN_RECIP,
11274 "__builtin_recipdivf"); 12747 "__builtin_recipdiv");
12748 def_builtin (MASK_POPCNTB, "__builtin_recipdiv", ftype,
12749 RS6000_BUILTIN_RECIP);
12750 }
12751 if (TARGET_FRES)
12752 {
12753 ftype = builtin_function_type (SFmode, SFmode, SFmode, VOIDmode,
12754 RS6000_BUILTIN_RECIPF,
12755 "__builtin_recipdivf");
11275 def_builtin (MASK_PPC_GFXOPT, "__builtin_recipdivf", ftype, 12756 def_builtin (MASK_PPC_GFXOPT, "__builtin_recipdivf", ftype,
11276 RS6000_BUILTIN_RECIPF); 12757 RS6000_BUILTIN_RECIPF);
11277 12758 }
12759 if (TARGET_FRSQRTE)
12760 {
12761 ftype = builtin_function_type (DFmode, DFmode, VOIDmode, VOIDmode,
12762 RS6000_BUILTIN_RSQRT,
12763 "__builtin_rsqrt");
12764 def_builtin (MASK_PPC_GFXOPT, "__builtin_rsqrt", ftype,
12765 RS6000_BUILTIN_RSQRT);
12766 }
12767 if (TARGET_FRSQRTES)
12768 {
11278 ftype = builtin_function_type (SFmode, SFmode, VOIDmode, VOIDmode, 12769 ftype = builtin_function_type (SFmode, SFmode, VOIDmode, VOIDmode,
11279 RS6000_BUILTIN_RSQRTF, 12770 RS6000_BUILTIN_RSQRTF,
11280 "__builtin_rsqrtf"); 12771 "__builtin_rsqrtf");
11281 def_builtin (MASK_PPC_GFXOPT, "__builtin_rsqrtf", ftype, 12772 def_builtin (MASK_PPC_GFXOPT, "__builtin_rsqrtf", ftype,
11282 RS6000_BUILTIN_RSQRTF); 12773 RS6000_BUILTIN_RSQRTF);
11283 }
11284 if (TARGET_POPCNTB)
11285 {
11286 tree ftype = builtin_function_type (DFmode, DFmode, DFmode, VOIDmode,
11287 RS6000_BUILTIN_RECIP,
11288 "__builtin_recipdiv");
11289 def_builtin (MASK_POPCNTB, "__builtin_recipdiv", ftype,
11290 RS6000_BUILTIN_RECIP);
11291
11292 } 12774 }
11293 if (TARGET_POPCNTD) 12775 if (TARGET_POPCNTD)
11294 { 12776 {
11295 enum machine_mode mode = (TARGET_64BIT) ? DImode : SImode; 12777 enum machine_mode mode = (TARGET_64BIT) ? DImode : SImode;
11296 tree ftype = builtin_function_type (mode, mode, mode, VOIDmode, 12778 tree ftype = builtin_function_type (mode, mode, mode, VOIDmode,
11641 const struct builtin_description *d; 13123 const struct builtin_description *d;
11642 const struct builtin_description_predicates *dp; 13124 const struct builtin_description_predicates *dp;
11643 size_t i; 13125 size_t i;
11644 tree ftype; 13126 tree ftype;
11645 13127
11646 tree pfloat_type_node = build_pointer_type (float_type_node);
11647 tree pint_type_node = build_pointer_type (integer_type_node);
11648 tree pshort_type_node = build_pointer_type (short_integer_type_node);
11649 tree pchar_type_node = build_pointer_type (char_type_node);
11650
11651 tree pvoid_type_node = build_pointer_type (void_type_node); 13128 tree pvoid_type_node = build_pointer_type (void_type_node);
11652 13129
11653 tree pcfloat_type_node = build_pointer_type (build_qualified_type (float_type_node, TYPE_QUAL_CONST)); 13130 tree pcvoid_type_node
11654 tree pcint_type_node = build_pointer_type (build_qualified_type (integer_type_node, TYPE_QUAL_CONST)); 13131 = build_pointer_type (build_qualified_type (void_type_node,
11655 tree pcshort_type_node = build_pointer_type (build_qualified_type (short_integer_type_node, TYPE_QUAL_CONST)); 13132 TYPE_QUAL_CONST));
11656 tree pcchar_type_node = build_pointer_type (build_qualified_type (char_type_node, TYPE_QUAL_CONST));
11657
11658 tree pcvoid_type_node = build_pointer_type (build_qualified_type (void_type_node, TYPE_QUAL_CONST));
11659 13133
11660 tree int_ftype_opaque 13134 tree int_ftype_opaque
11661 = build_function_type_list (integer_type_node, 13135 = build_function_type_list (integer_type_node,
11662 opaque_V4SI_type_node, NULL_TREE); 13136 opaque_V4SI_type_node, NULL_TREE);
11663 tree opaque_ftype_opaque 13137 tree opaque_ftype_opaque
11676 opaque_V4SI_type_node, NULL_TREE); 13150 opaque_V4SI_type_node, NULL_TREE);
11677 tree int_ftype_int_v4si_v4si 13151 tree int_ftype_int_v4si_v4si
11678 = build_function_type_list (integer_type_node, 13152 = build_function_type_list (integer_type_node,
11679 integer_type_node, V4SI_type_node, 13153 integer_type_node, V4SI_type_node,
11680 V4SI_type_node, NULL_TREE); 13154 V4SI_type_node, NULL_TREE);
11681 tree v4sf_ftype_pcfloat
11682 = build_function_type_list (V4SF_type_node, pcfloat_type_node, NULL_TREE);
11683 tree void_ftype_pfloat_v4sf
11684 = build_function_type_list (void_type_node,
11685 pfloat_type_node, V4SF_type_node, NULL_TREE);
11686 tree v4si_ftype_pcint
11687 = build_function_type_list (V4SI_type_node, pcint_type_node, NULL_TREE);
11688 tree void_ftype_pint_v4si
11689 = build_function_type_list (void_type_node,
11690 pint_type_node, V4SI_type_node, NULL_TREE);
11691 tree v8hi_ftype_pcshort
11692 = build_function_type_list (V8HI_type_node, pcshort_type_node, NULL_TREE);
11693 tree void_ftype_pshort_v8hi
11694 = build_function_type_list (void_type_node,
11695 pshort_type_node, V8HI_type_node, NULL_TREE);
11696 tree v16qi_ftype_pcchar
11697 = build_function_type_list (V16QI_type_node, pcchar_type_node, NULL_TREE);
11698 tree void_ftype_pchar_v16qi
11699 = build_function_type_list (void_type_node,
11700 pchar_type_node, V16QI_type_node, NULL_TREE);
11701 tree void_ftype_v4si 13155 tree void_ftype_v4si
11702 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE); 13156 = build_function_type_list (void_type_node, V4SI_type_node, NULL_TREE);
11703 tree v8hi_ftype_void 13157 tree v8hi_ftype_void
11704 = build_function_type (V8HI_type_node, void_list_node); 13158 = build_function_type (V8HI_type_node, void_list_node);
11705 tree void_ftype_void 13159 tree void_ftype_void
11707 tree void_ftype_int 13161 tree void_ftype_int
11708 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE); 13162 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
11709 13163
11710 tree opaque_ftype_long_pcvoid 13164 tree opaque_ftype_long_pcvoid
11711 = build_function_type_list (opaque_V4SI_type_node, 13165 = build_function_type_list (opaque_V4SI_type_node,
11712 long_integer_type_node, pcvoid_type_node, NULL_TREE); 13166 long_integer_type_node, pcvoid_type_node,
13167 NULL_TREE);
11713 tree v16qi_ftype_long_pcvoid 13168 tree v16qi_ftype_long_pcvoid
11714 = build_function_type_list (V16QI_type_node, 13169 = build_function_type_list (V16QI_type_node,
11715 long_integer_type_node, pcvoid_type_node, NULL_TREE); 13170 long_integer_type_node, pcvoid_type_node,
13171 NULL_TREE);
11716 tree v8hi_ftype_long_pcvoid 13172 tree v8hi_ftype_long_pcvoid
11717 = build_function_type_list (V8HI_type_node, 13173 = build_function_type_list (V8HI_type_node,
11718 long_integer_type_node, pcvoid_type_node, NULL_TREE); 13174 long_integer_type_node, pcvoid_type_node,
13175 NULL_TREE);
11719 tree v4si_ftype_long_pcvoid 13176 tree v4si_ftype_long_pcvoid
11720 = build_function_type_list (V4SI_type_node, 13177 = build_function_type_list (V4SI_type_node,
11721 long_integer_type_node, pcvoid_type_node, NULL_TREE); 13178 long_integer_type_node, pcvoid_type_node,
13179 NULL_TREE);
13180 tree v4sf_ftype_long_pcvoid
13181 = build_function_type_list (V4SF_type_node,
13182 long_integer_type_node, pcvoid_type_node,
13183 NULL_TREE);
13184 tree v2df_ftype_long_pcvoid
13185 = build_function_type_list (V2DF_type_node,
13186 long_integer_type_node, pcvoid_type_node,
13187 NULL_TREE);
13188 tree v2di_ftype_long_pcvoid
13189 = build_function_type_list (V2DI_type_node,
13190 long_integer_type_node, pcvoid_type_node,
13191 NULL_TREE);
11722 13192
11723 tree void_ftype_opaque_long_pvoid 13193 tree void_ftype_opaque_long_pvoid
11724 = build_function_type_list (void_type_node, 13194 = build_function_type_list (void_type_node,
11725 opaque_V4SI_type_node, long_integer_type_node, 13195 opaque_V4SI_type_node, long_integer_type_node,
11726 pvoid_type_node, NULL_TREE); 13196 pvoid_type_node, NULL_TREE);
11733 V16QI_type_node, long_integer_type_node, 13203 V16QI_type_node, long_integer_type_node,
11734 pvoid_type_node, NULL_TREE); 13204 pvoid_type_node, NULL_TREE);
11735 tree void_ftype_v8hi_long_pvoid 13205 tree void_ftype_v8hi_long_pvoid
11736 = build_function_type_list (void_type_node, 13206 = build_function_type_list (void_type_node,
11737 V8HI_type_node, long_integer_type_node, 13207 V8HI_type_node, long_integer_type_node,
13208 pvoid_type_node, NULL_TREE);
13209 tree void_ftype_v4sf_long_pvoid
13210 = build_function_type_list (void_type_node,
13211 V4SF_type_node, long_integer_type_node,
13212 pvoid_type_node, NULL_TREE);
13213 tree void_ftype_v2df_long_pvoid
13214 = build_function_type_list (void_type_node,
13215 V2DF_type_node, long_integer_type_node,
13216 pvoid_type_node, NULL_TREE);
13217 tree void_ftype_v2di_long_pvoid
13218 = build_function_type_list (void_type_node,
13219 V2DI_type_node, long_integer_type_node,
11738 pvoid_type_node, NULL_TREE); 13220 pvoid_type_node, NULL_TREE);
11739 tree int_ftype_int_v8hi_v8hi 13221 tree int_ftype_int_v8hi_v8hi
11740 = build_function_type_list (integer_type_node, 13222 = build_function_type_list (integer_type_node,
11741 integer_type_node, V8HI_type_node, 13223 integer_type_node, V8HI_type_node,
11742 V8HI_type_node, NULL_TREE); 13224 V8HI_type_node, NULL_TREE);
11765 tree void_ftype_pcvoid_int_int 13247 tree void_ftype_pcvoid_int_int
11766 = build_function_type_list (void_type_node, 13248 = build_function_type_list (void_type_node,
11767 pcvoid_type_node, integer_type_node, 13249 pcvoid_type_node, integer_type_node,
11768 integer_type_node, NULL_TREE); 13250 integer_type_node, NULL_TREE);
11769 13251
11770 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4sf", v4sf_ftype_pcfloat,
11771 ALTIVEC_BUILTIN_LD_INTERNAL_4sf);
11772 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4sf", void_ftype_pfloat_v4sf,
11773 ALTIVEC_BUILTIN_ST_INTERNAL_4sf);
11774 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_4si", v4si_ftype_pcint,
11775 ALTIVEC_BUILTIN_LD_INTERNAL_4si);
11776 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_4si", void_ftype_pint_v4si,
11777 ALTIVEC_BUILTIN_ST_INTERNAL_4si);
11778 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_8hi", v8hi_ftype_pcshort,
11779 ALTIVEC_BUILTIN_LD_INTERNAL_8hi);
11780 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_8hi", void_ftype_pshort_v8hi,
11781 ALTIVEC_BUILTIN_ST_INTERNAL_8hi);
11782 def_builtin (MASK_ALTIVEC, "__builtin_altivec_ld_internal_16qi", v16qi_ftype_pcchar,
11783 ALTIVEC_BUILTIN_LD_INTERNAL_16qi);
11784 def_builtin (MASK_ALTIVEC, "__builtin_altivec_st_internal_16qi", void_ftype_pchar_v16qi,
11785 ALTIVEC_BUILTIN_ST_INTERNAL_16qi);
11786 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR); 13252 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mtvscr", void_ftype_v4si, ALTIVEC_BUILTIN_MTVSCR);
11787 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR); 13253 def_builtin (MASK_ALTIVEC, "__builtin_altivec_mfvscr", v8hi_ftype_void, ALTIVEC_BUILTIN_MFVSCR);
11788 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL); 13254 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dssall", void_ftype_void, ALTIVEC_BUILTIN_DSSALL);
11789 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS); 13255 def_builtin (MASK_ALTIVEC, "__builtin_altivec_dss", void_ftype_int, ALTIVEC_BUILTIN_DSS);
11790 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL); 13256 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvsl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVSL);
11812 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL); 13278 def_builtin (MASK_ALTIVEC, "__builtin_vec_stl", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STL);
11813 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX); 13279 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvewx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEWX);
11814 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX); 13280 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvebx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEBX);
11815 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX); 13281 def_builtin (MASK_ALTIVEC, "__builtin_vec_stvehx", void_ftype_opaque_long_pvoid, ALTIVEC_BUILTIN_VEC_STVEHX);
11816 13282
13283 def_builtin (MASK_VSX, "__builtin_vsx_lxvd2x_v2df", v2df_ftype_long_pcvoid,
13284 VSX_BUILTIN_LXVD2X_V2DF);
13285 def_builtin (MASK_VSX, "__builtin_vsx_lxvd2x_v2di", v2di_ftype_long_pcvoid,
13286 VSX_BUILTIN_LXVD2X_V2DI);
13287 def_builtin (MASK_VSX, "__builtin_vsx_lxvw4x_v4sf", v4sf_ftype_long_pcvoid,
13288 VSX_BUILTIN_LXVW4X_V4SF);
13289 def_builtin (MASK_VSX, "__builtin_vsx_lxvw4x_v4si", v4si_ftype_long_pcvoid,
13290 VSX_BUILTIN_LXVW4X_V4SI);
13291 def_builtin (MASK_VSX, "__builtin_vsx_lxvw4x_v8hi",
13292 v8hi_ftype_long_pcvoid, VSX_BUILTIN_LXVW4X_V8HI);
13293 def_builtin (MASK_VSX, "__builtin_vsx_lxvw4x_v16qi",
13294 v16qi_ftype_long_pcvoid, VSX_BUILTIN_LXVW4X_V16QI);
13295 def_builtin (MASK_VSX, "__builtin_vsx_stxvd2x_v2df",
13296 void_ftype_v2df_long_pvoid, VSX_BUILTIN_STXVD2X_V2DF);
13297 def_builtin (MASK_VSX, "__builtin_vsx_stxvd2x_v2di",
13298 void_ftype_v2di_long_pvoid, VSX_BUILTIN_STXVD2X_V2DI);
13299 def_builtin (MASK_VSX, "__builtin_vsx_stxvw4x_v4sf",
13300 void_ftype_v4sf_long_pvoid, VSX_BUILTIN_STXVW4X_V4SF);
13301 def_builtin (MASK_VSX, "__builtin_vsx_stxvw4x_v4si",
13302 void_ftype_v4si_long_pvoid, VSX_BUILTIN_STXVW4X_V4SI);
13303 def_builtin (MASK_VSX, "__builtin_vsx_stxvw4x_v8hi",
13304 void_ftype_v8hi_long_pvoid, VSX_BUILTIN_STXVW4X_V8HI);
13305 def_builtin (MASK_VSX, "__builtin_vsx_stxvw4x_v16qi",
13306 void_ftype_v16qi_long_pvoid, VSX_BUILTIN_STXVW4X_V16QI);
13307 def_builtin (MASK_VSX, "__builtin_vec_vsx_ld", opaque_ftype_long_pcvoid,
13308 VSX_BUILTIN_VEC_LD);
13309 def_builtin (MASK_VSX, "__builtin_vec_vsx_st", void_ftype_opaque_long_pvoid,
13310 VSX_BUILTIN_VEC_ST);
13311
11817 if (rs6000_cpu == PROCESSOR_CELL) 13312 if (rs6000_cpu == PROCESSOR_CELL)
11818 { 13313 {
11819 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvlx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVLX); 13314 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvlx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVLX);
11820 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvlxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVLXL); 13315 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvlxl", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVLXL);
11821 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvrx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVRX); 13316 def_builtin (MASK_ALTIVEC, "__builtin_altivec_lvrx", v16qi_ftype_long_pcvoid, ALTIVEC_BUILTIN_LVRX);
12244 } 13739 }
12245 13740
12246 found = htab_find_slot (builtin_hash_table, &h, INSERT); 13741 found = htab_find_slot (builtin_hash_table, &h, INSERT);
12247 if (*found == NULL) 13742 if (*found == NULL)
12248 { 13743 {
12249 h2 = GGC_NEW (struct builtin_hash_struct); 13744 h2 = ggc_alloc_builtin_hash_struct ();
12250 *h2 = h; 13745 *h2 = h;
12251 *found = (void *)h2; 13746 *found = (void *)h2;
12252 args = void_list_node; 13747 args = void_list_node;
12253 13748
12254 for (i = num_args - 1; i >= 0; i--) 13749 for (i = num_args - 1; i >= 0; i--)
12650 /* Anything to move? */ 14145 /* Anything to move? */
12651 bytes = INTVAL (bytes_rtx); 14146 bytes = INTVAL (bytes_rtx);
12652 if (bytes <= 0) 14147 if (bytes <= 0)
12653 return 1; 14148 return 1;
12654 14149
12655 /* store_one_arg depends on expand_block_move to handle at least the size of 14150 if (bytes > rs6000_block_move_inline_limit)
12656 reg_parm_stack_space. */
12657 if (bytes > (TARGET_POWERPC64 ? 64 : 32))
12658 return 0; 14151 return 0;
12659 14152
12660 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes) 14153 for (offset = 0; bytes > 0; offset += move_bytes, bytes -= move_bytes)
12661 { 14154 {
12662 union { 14155 union {
13254 case PARM_DECL: 14747 case PARM_DECL:
13255 case FIELD_DECL: 14748 case FIELD_DECL:
13256 case RESULT_DECL: 14749 case RESULT_DECL:
13257 case SSA_NAME: 14750 case SSA_NAME:
13258 case REAL_CST: 14751 case REAL_CST:
13259 case INDIRECT_REF: 14752 case MEM_REF:
13260 case ALIGN_INDIRECT_REF:
13261 case MISALIGNED_INDIRECT_REF:
13262 case VIEW_CONVERT_EXPR: 14753 case VIEW_CONVERT_EXPR:
13263 if (TYPE_MODE (TREE_TYPE (*tp)) == SDmode) 14754 if (TYPE_MODE (TREE_TYPE (*tp)) == SDmode)
13264 return *tp; 14755 return *tp;
13265 break; 14756 break;
13266 default: 14757 default:
13300 needed for the immediate register. 14791 needed for the immediate register.
13301 14792
13302 For VSX and Altivec, we may need a register to convert sp+offset into 14793 For VSX and Altivec, we may need a register to convert sp+offset into
13303 reg+sp. */ 14794 reg+sp. */
13304 14795
13305 static enum reg_class 14796 static reg_class_t
13306 rs6000_secondary_reload (bool in_p, 14797 rs6000_secondary_reload (bool in_p,
13307 rtx x, 14798 rtx x,
13308 enum reg_class rclass, 14799 reg_class_t rclass_i,
13309 enum machine_mode mode, 14800 enum machine_mode mode,
13310 secondary_reload_info *sri) 14801 secondary_reload_info *sri)
13311 { 14802 {
13312 enum reg_class ret = ALL_REGS; 14803 enum reg_class rclass = (enum reg_class) rclass_i;
14804 reg_class_t ret = ALL_REGS;
13313 enum insn_code icode; 14805 enum insn_code icode;
13314 bool default_p = false; 14806 bool default_p = false;
13315 14807
13316 sri->icode = CODE_FOR_nothing; 14808 sri->icode = CODE_FOR_nothing;
13317 14809
13699 15191
13700 We need two IRA_COVER_CLASSES, one for pre-VSX, and the other for VSX to 15192 We need two IRA_COVER_CLASSES, one for pre-VSX, and the other for VSX to
13701 account for the Altivec and Floating registers being subsets of the VSX 15193 account for the Altivec and Floating registers being subsets of the VSX
13702 register set under VSX, but distinct register sets on pre-VSX machines. */ 15194 register set under VSX, but distinct register sets on pre-VSX machines. */
13703 15195
13704 static const enum reg_class * 15196 static const reg_class_t *
13705 rs6000_ira_cover_classes (void) 15197 rs6000_ira_cover_classes (void)
13706 { 15198 {
13707 static const enum reg_class cover_pre_vsx[] = IRA_COVER_CLASSES_PRE_VSX; 15199 static const reg_class_t cover_pre_vsx[] = IRA_COVER_CLASSES_PRE_VSX;
13708 static const enum reg_class cover_vsx[] = IRA_COVER_CLASSES_VSX; 15200 static const reg_class_t cover_vsx[] = IRA_COVER_CLASSES_VSX;
13709 15201
13710 return (TARGET_VSX) ? cover_vsx : cover_pre_vsx; 15202 return (TARGET_VSX) ? cover_vsx : cover_pre_vsx;
13711 } 15203 }
13712 15204
13713 /* Allocate a 64-bit stack slot to be used for copying SDmode 15205 /* Allocate a 64-bit stack slot to be used for copying SDmode
13734 return; 15226 return;
13735 } 15227 }
13736 } 15228 }
13737 15229
13738 /* Check for any SDmode parameters of the function. */ 15230 /* Check for any SDmode parameters of the function. */
13739 for (t = DECL_ARGUMENTS (cfun->decl); t; t = TREE_CHAIN (t)) 15231 for (t = DECL_ARGUMENTS (cfun->decl); t; t = DECL_CHAIN (t))
13740 { 15232 {
13741 if (TREE_TYPE (t) == error_mark_node) 15233 if (TREE_TYPE (t) == error_mark_node)
13742 continue; 15234 continue;
13743 15235
13744 if (TYPE_MODE (TREE_TYPE (t)) == SDmode 15236 if (TYPE_MODE (TREE_TYPE (t)) == SDmode
13795 return NO_REGS; 15287 return NO_REGS;
13796 15288
13797 if (GET_MODE_CLASS (mode) == MODE_INT && rclass == NON_SPECIAL_REGS) 15289 if (GET_MODE_CLASS (mode) == MODE_INT && rclass == NON_SPECIAL_REGS)
13798 return GENERAL_REGS; 15290 return GENERAL_REGS;
13799 15291
13800 /* For VSX, prefer the traditional registers for DF if the address is of the 15292 /* For VSX, prefer the traditional registers for 64-bit values because we can
13801 form reg+offset because we can use the non-VSX loads. Prefer the Altivec 15293 use the non-VSX loads. Prefer the Altivec registers if Altivec is
13802 registers if Altivec is handling the vector operations (i.e. V16QI, V8HI, 15294 handling the vector operations (i.e. V16QI, V8HI, and V4SI), or if we
13803 and V4SI). */ 15295 prefer Altivec loads.. */
13804 if (rclass == VSX_REGS && VECTOR_MEM_VSX_P (mode)) 15296 if (rclass == VSX_REGS)
13805 { 15297 {
13806 if (mode == DFmode && GET_CODE (x) == MEM) 15298 if (GET_MODE_SIZE (mode) <= 8)
13807 { 15299 return FLOAT_REGS;
13808 rtx addr = XEXP (x, 0); 15300
13809 15301 if (VECTOR_UNIT_ALTIVEC_P (mode) || VECTOR_MEM_ALTIVEC_P (mode))
13810 if (legitimate_indirect_address_p (addr, false)) /* reg */
13811 return VSX_REGS;
13812
13813 if (legitimate_indexed_address_p (addr, false)) /* reg+reg */
13814 return VSX_REGS;
13815
13816 if (GET_CODE (addr) == PRE_MODIFY
13817 && legitimate_indexed_address_p (XEXP (addr, 0), false))
13818 return VSX_REGS;
13819
13820 return FLOAT_REGS;
13821 }
13822
13823 if (VECTOR_UNIT_ALTIVEC_P (mode))
13824 return ALTIVEC_REGS; 15302 return ALTIVEC_REGS;
13825 15303
13826 return rclass; 15304 return rclass;
13827 } 15305 }
13828 15306
14147 crtl->uses_pic_offset_table = 1; 15625 crtl->uses_pic_offset_table = 1;
14148 15626
14149 return pic_offset_table_rtx; 15627 return pic_offset_table_rtx;
14150 } 15628 }
14151 15629
15630 static rs6000_stack_t stack_info;
15631
14152 /* Function to init struct machine_function. 15632 /* Function to init struct machine_function.
14153 This will be called, via a pointer variable, 15633 This will be called, via a pointer variable,
14154 from push_function_context. */ 15634 from push_function_context. */
14155 15635
14156 static struct machine_function * 15636 static struct machine_function *
14157 rs6000_init_machine_status (void) 15637 rs6000_init_machine_status (void)
14158 { 15638 {
14159 return GGC_CNEW (machine_function); 15639 stack_info.reload_completed = 0;
15640 return ggc_alloc_cleared_machine_function ();
14160 } 15641 }
14161 15642
14162 /* These macros test for integers and extract the low-order bits. */ 15643 /* These macros test for integers and extract the low-order bits. */
14163 #define INT_P(X) \ 15644 #define INT_P(X) \
14164 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \ 15645 ((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
14294 case ABI_V4: 15775 case ABI_V4:
14295 case ABI_DARWIN: 15776 case ABI_DARWIN:
14296 break; 15777 break;
14297 } 15778 }
14298 } 15779 }
14299 if (TARGET_AIX) 15780
14300 RS6000_OUTPUT_BASENAME (file, fname); 15781 RS6000_OUTPUT_BASENAME (file, fname);
14301 else
14302 assemble_name (file, fname);
14303 } 15782 }
14304 15783
14305 /* Print an operand. Recognize special options, documented below. */ 15784 /* Print an operand. Recognize special options, documented below. */
14306 15785
14307 #if TARGET_ELF 15786 #if TARGET_ELF
14469 15948
14470 case 'K': 15949 case 'K':
14471 /* X must be a symbolic constant on ELF. Write an 15950 /* X must be a symbolic constant on ELF. Write an
14472 expression suitable for an 'addi' that adds in the low 16 15951 expression suitable for an 'addi' that adds in the low 16
14473 bits of the MEM. */ 15952 bits of the MEM. */
14474 if (GET_CODE (x) != CONST) 15953 if (GET_CODE (x) == CONST)
14475 {
14476 print_operand_address (file, x);
14477 fputs ("@l", file);
14478 }
14479 else
14480 { 15954 {
14481 if (GET_CODE (XEXP (x, 0)) != PLUS 15955 if (GET_CODE (XEXP (x, 0)) != PLUS
14482 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF 15956 || (GET_CODE (XEXP (XEXP (x, 0), 0)) != SYMBOL_REF
14483 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF) 15957 && GET_CODE (XEXP (XEXP (x, 0), 0)) != LABEL_REF)
14484 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT) 15958 || GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
14485 output_operand_lossage ("invalid %%K value"); 15959 output_operand_lossage ("invalid %%K value");
14486 print_operand_address (file, XEXP (XEXP (x, 0), 0)); 15960 }
14487 fputs ("@l", file); 15961 print_operand_address (file, x);
14488 /* For GNU as, there must be a non-alphanumeric character 15962 fputs ("@l", file);
14489 between 'l' and the number. The '-' is added by
14490 print_operand() already. */
14491 if (INTVAL (XEXP (XEXP (x, 0), 1)) >= 0)
14492 fputs ("+", file);
14493 print_operand (file, XEXP (XEXP (x, 0), 1), 0);
14494 }
14495 return; 15963 return;
14496 15964
14497 /* %l is output_asm_label. */ 15965 /* %l is output_asm_label. */
14498 15966
14499 case 'L': 15967 case 'L':
14861 /* For macho, check to see if we need a stub. */ 16329 /* For macho, check to see if we need a stub. */
14862 if (TARGET_MACHO) 16330 if (TARGET_MACHO)
14863 { 16331 {
14864 const char *name = XSTR (x, 0); 16332 const char *name = XSTR (x, 0);
14865 #if TARGET_MACHO 16333 #if TARGET_MACHO
14866 if (MACHOPIC_INDIRECT 16334 if (darwin_emit_branch_islands
16335 && MACHOPIC_INDIRECT
14867 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION) 16336 && machopic_classify_symbol (x) == MACHOPIC_UNDEFINED_FUNCTION)
14868 name = machopic_indirection_name (x, /*stub_p=*/true); 16337 name = machopic_indirection_name (x, /*stub_p=*/true);
14869 #endif 16338 #endif
14870 assemble_name (file, name); 16339 assemble_name (file, name);
14871 } 16340 }
14977 output_address (XEXP (XEXP (x, 0), 1)); 16446 output_address (XEXP (XEXP (x, 0), 1));
14978 else 16447 else
14979 output_address (XEXP (x, 0)); 16448 output_address (XEXP (x, 0));
14980 } 16449 }
14981 else 16450 else
14982 output_addr_const (file, x); 16451 {
16452 if (toc_relative_expr_p (x))
16453 /* This hack along with a corresponding hack in
16454 rs6000_output_addr_const_extra arranges to output addends
16455 where the assembler expects to find them. eg.
16456 (const (plus (unspec [symbol_ref ("x") tocrel]) 4))
16457 without this hack would be output as "x@toc+4". We
16458 want "x+4@toc". */
16459 output_addr_const (file, tocrel_base);
16460 else
16461 output_addr_const (file, x);
16462 }
14983 return; 16463 return;
14984 16464
14985 case '&': 16465 case '&':
14986 assemble_name (file, rs6000_get_some_local_dynamic_name ()); 16466 assemble_name (file, rs6000_get_some_local_dynamic_name ());
14987 return; 16467 return;
15019 reg_names[ REGNO (XEXP (x, 1)) ]); 16499 reg_names[ REGNO (XEXP (x, 1)) ]);
15020 } 16500 }
15021 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT) 16501 else if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
15022 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)", 16502 fprintf (file, HOST_WIDE_INT_PRINT_DEC "(%s)",
15023 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]); 16503 INTVAL (XEXP (x, 1)), reg_names[ REGNO (XEXP (x, 0)) ]);
16504 #if TARGET_MACHO
16505 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
16506 && CONSTANT_P (XEXP (x, 1)))
16507 {
16508 fprintf (file, "lo16(");
16509 output_addr_const (file, XEXP (x, 1));
16510 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
16511 }
16512 #endif
16513 else if (legitimate_constant_pool_address_p (x, QImode, true))
16514 {
16515 /* This hack along with a corresponding hack in
16516 rs6000_output_addr_const_extra arranges to output addends
16517 where the assembler expects to find them. eg.
16518 (lo_sum (reg 9)
16519 . (const (plus (unspec [symbol_ref ("x") tocrel]) 8)))
16520 without this hack would be output as "x@toc+8@l(9)". We
16521 want "x+8@toc@l(9)". */
16522 output_addr_const (file, tocrel_base);
16523 if (GET_CODE (x) == LO_SUM)
16524 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
16525 else
16526 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
16527 }
15024 #if TARGET_ELF 16528 #if TARGET_ELF
15025 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG 16529 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
15026 && CONSTANT_P (XEXP (x, 1))) 16530 && CONSTANT_P (XEXP (x, 1)))
15027 { 16531 {
15028 output_addr_const (file, XEXP (x, 1)); 16532 output_addr_const (file, XEXP (x, 1));
15029 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]); 16533 fprintf (file, "@l(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
15030 } 16534 }
15031 #endif 16535 #endif
15032 #if TARGET_MACHO
15033 else if (GET_CODE (x) == LO_SUM && GET_CODE (XEXP (x, 0)) == REG
15034 && CONSTANT_P (XEXP (x, 1)))
15035 {
15036 fprintf (file, "lo16(");
15037 output_addr_const (file, XEXP (x, 1));
15038 fprintf (file, ")(%s)", reg_names[ REGNO (XEXP (x, 0)) ]);
15039 }
15040 #endif
15041 else if (legitimate_constant_pool_address_p (x))
15042 {
15043 output_addr_const (file, XEXP (x, 1));
15044 fprintf (file, "(%s)", reg_names[REGNO (XEXP (x, 0))]);
15045 }
15046 else 16536 else
15047 gcc_unreachable (); 16537 gcc_unreachable ();
15048 } 16538 }
15049 16539
15050 /* Implement OUTPUT_ADDR_CONST_EXTRA for address X. */ 16540 /* Implement TARGET_OUTPUT_ADDR_CONST_EXTRA. */
15051 16541
15052 bool 16542 static bool
15053 rs6000_output_addr_const_extra (FILE *file, rtx x) 16543 rs6000_output_addr_const_extra (FILE *file, rtx x)
15054 { 16544 {
15055 if (GET_CODE (x) == UNSPEC) 16545 if (GET_CODE (x) == UNSPEC)
15056 switch (XINT (x, 1)) 16546 switch (XINT (x, 1))
15057 { 16547 {
15058 case UNSPEC_TOCREL: 16548 case UNSPEC_TOCREL:
15059 x = XVECEXP (x, 0, 0); 16549 gcc_assert (GET_CODE (XVECEXP (x, 0, 0)) == SYMBOL_REF);
15060 gcc_assert (GET_CODE (x) == SYMBOL_REF); 16550 output_addr_const (file, XVECEXP (x, 0, 0));
15061 output_addr_const (file, x); 16551 if (x == tocrel_base && tocrel_offset != const0_rtx)
16552 {
16553 if (INTVAL (tocrel_offset) >= 0)
16554 fprintf (file, "+");
16555 output_addr_const (file, tocrel_offset);
16556 }
15062 if (!TARGET_AIX || (TARGET_ELF && TARGET_MINIMAL_TOC)) 16557 if (!TARGET_AIX || (TARGET_ELF && TARGET_MINIMAL_TOC))
15063 { 16558 {
15064 putc ('-', file); 16559 putc ('-', file);
15065 assemble_name (file, toc_label_name); 16560 assemble_name (file, toc_label_name);
15066 } 16561 }
15380 if (comp_mode == CCFPmode && TARGET_XL_COMPAT 16875 if (comp_mode == CCFPmode && TARGET_XL_COMPAT
15381 && GET_MODE (op0) == TFmode 16876 && GET_MODE (op0) == TFmode
15382 && !TARGET_IEEEQUAD 16877 && !TARGET_IEEEQUAD
15383 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128) 16878 && TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_LONG_DOUBLE_128)
15384 emit_insn (gen_rtx_PARALLEL (VOIDmode, 16879 emit_insn (gen_rtx_PARALLEL (VOIDmode,
15385 gen_rtvec (9, 16880 gen_rtvec (10,
15386 gen_rtx_SET (VOIDmode, 16881 gen_rtx_SET (VOIDmode,
15387 compare_result, 16882 compare_result,
15388 gen_rtx_COMPARE (comp_mode, op0, op1)), 16883 gen_rtx_COMPARE (comp_mode, op0, op1)),
15389 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)), 16884 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
15390 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)), 16885 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
15391 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)), 16886 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
15392 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)), 16887 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
15393 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)), 16888 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
15394 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)), 16889 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
15395 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)), 16890 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
15396 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode))))); 16891 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (DFmode)),
16892 gen_rtx_CLOBBER (VOIDmode, gen_rtx_SCRATCH (Pmode)))));
15397 else if (GET_CODE (op1) == UNSPEC 16893 else if (GET_CODE (op1) == UNSPEC
15398 && XINT (op1, 1) == UNSPEC_SP_TEST) 16894 && XINT (op1, 1) == UNSPEC_SP_TEST)
15399 { 16895 {
15400 rtx op1b = XVECEXP (op1, 0, 0); 16896 rtx op1b = XVECEXP (op1, 0, 0);
15401 comp_mode = CCEQmode; 16897 comp_mode = CCEQmode;
15450 16946
15451 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx); 16947 return gen_rtx_fmt_ee (code, VOIDmode, compare_result, const0_rtx);
15452 } 16948 }
15453 16949
15454 16950
15455 /* Emit the RTL for an sCOND pattern. */ 16951 /* Emit the RTL for an sISEL pattern. */
15456 16952
15457 void 16953 void
15458 rs6000_emit_sISEL (enum machine_mode mode, rtx operands[]) 16954 rs6000_emit_sISEL (enum machine_mode mode ATTRIBUTE_UNUSED, rtx operands[])
15459 { 16955 {
15460 rtx condition_rtx; 16956 rs6000_emit_int_cmove (operands[0], operands[1], const1_rtx, const0_rtx);
15461 enum machine_mode op_mode;
15462 enum rtx_code cond_code;
15463 rtx result = operands[0];
15464
15465 condition_rtx = rs6000_generate_compare (operands[1], mode);
15466 cond_code = GET_CODE (condition_rtx);
15467
15468 op_mode = GET_MODE (XEXP (operands[1], 0));
15469 if (op_mode == VOIDmode)
15470 op_mode = GET_MODE (XEXP (operands[1], 1));
15471
15472 if (TARGET_POWERPC64 && GET_MODE (result) == DImode)
15473 {
15474 PUT_MODE (condition_rtx, DImode);
15475 if (cond_code == GEU || cond_code == GTU || cond_code == LEU
15476 || cond_code == LTU)
15477 emit_insn (gen_isel_unsigned_di (result, condition_rtx,
15478 force_reg (DImode, const1_rtx),
15479 force_reg (DImode, const0_rtx),
15480 XEXP (condition_rtx, 0)));
15481 else
15482 emit_insn (gen_isel_signed_di (result, condition_rtx,
15483 force_reg (DImode, const1_rtx),
15484 force_reg (DImode, const0_rtx),
15485 XEXP (condition_rtx, 0)));
15486 }
15487 else
15488 {
15489 PUT_MODE (condition_rtx, SImode);
15490 if (cond_code == GEU || cond_code == GTU || cond_code == LEU
15491 || cond_code == LTU)
15492 emit_insn (gen_isel_unsigned_si (result, condition_rtx,
15493 force_reg (SImode, const1_rtx),
15494 force_reg (SImode, const0_rtx),
15495 XEXP (condition_rtx, 0)));
15496 else
15497 emit_insn (gen_isel_signed_si (result, condition_rtx,
15498 force_reg (SImode, const1_rtx),
15499 force_reg (SImode, const0_rtx),
15500 XEXP (condition_rtx, 0)));
15501 }
15502 } 16957 }
15503 16958
15504 void 16959 void
15505 rs6000_emit_sCOND (enum machine_mode mode, rtx operands[]) 16960 rs6000_emit_sCOND (enum machine_mode mode, rtx operands[])
15506 { 16961 {
15814 17269
15815 rev_code = reverse_condition_maybe_unordered (rcode); 17270 rev_code = reverse_condition_maybe_unordered (rcode);
15816 if (rev_code == UNKNOWN) 17271 if (rev_code == UNKNOWN)
15817 return NULL_RTX; 17272 return NULL_RTX;
15818 17273
15819 nor_code = optab_handler (one_cmpl_optab, (int)dmode)->insn_code; 17274 nor_code = optab_handler (one_cmpl_optab, dmode);
15820 if (nor_code == CODE_FOR_nothing) 17275 if (nor_code == CODE_FOR_nothing)
15821 return NULL_RTX; 17276 return NULL_RTX;
15822 17277
15823 mask2 = rs6000_emit_vector_compare (rev_code, op0, op1, dmode); 17278 mask2 = rs6000_emit_vector_compare (rev_code, op0, op1, dmode);
15824 if (!mask2) 17279 if (!mask2)
15859 17314
15860 default: 17315 default:
15861 gcc_unreachable (); 17316 gcc_unreachable ();
15862 } 17317 }
15863 17318
15864 ior_code = optab_handler (ior_optab, (int)dmode)->insn_code; 17319 ior_code = optab_handler (ior_optab, dmode);
15865 if (ior_code == CODE_FOR_nothing) 17320 if (ior_code == CODE_FOR_nothing)
15866 return NULL_RTX; 17321 return NULL_RTX;
15867 17322
15868 c_rtx = rs6000_emit_vector_compare (new_code, op0, op1, dmode); 17323 c_rtx = rs6000_emit_vector_compare (new_code, op0, op1, dmode);
15869 if (!c_rtx) 17324 if (!c_rtx)
16163 static int 17618 static int
16164 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond) 17619 rs6000_emit_int_cmove (rtx dest, rtx op, rtx true_cond, rtx false_cond)
16165 { 17620 {
16166 rtx condition_rtx, cr; 17621 rtx condition_rtx, cr;
16167 enum machine_mode mode = GET_MODE (dest); 17622 enum machine_mode mode = GET_MODE (dest);
17623 enum rtx_code cond_code;
17624 rtx (*isel_func) (rtx, rtx, rtx, rtx, rtx);
17625 bool signedp;
16168 17626
16169 if (mode != SImode && (!TARGET_POWERPC64 || mode != DImode)) 17627 if (mode != SImode && (!TARGET_POWERPC64 || mode != DImode))
16170 return 0; 17628 return 0;
16171 17629
16172 /* We still have to do the compare, because isel doesn't do a 17630 /* We still have to do the compare, because isel doesn't do a
16173 compare, it just looks at the CRx bits set by a previous compare 17631 compare, it just looks at the CRx bits set by a previous compare
16174 instruction. */ 17632 instruction. */
16175 condition_rtx = rs6000_generate_compare (op, mode); 17633 condition_rtx = rs6000_generate_compare (op, mode);
17634 cond_code = GET_CODE (condition_rtx);
16176 cr = XEXP (condition_rtx, 0); 17635 cr = XEXP (condition_rtx, 0);
16177 17636 signedp = GET_MODE (cr) == CCmode;
16178 if (mode == SImode) 17637
16179 { 17638 isel_func = (mode == SImode
16180 if (GET_MODE (cr) == CCmode) 17639 ? (signedp ? gen_isel_signed_si : gen_isel_unsigned_si)
16181 emit_insn (gen_isel_signed_si (dest, condition_rtx, 17640 : (signedp ? gen_isel_signed_di : gen_isel_unsigned_di));
16182 true_cond, false_cond, cr)); 17641
16183 else 17642 switch (cond_code)
16184 emit_insn (gen_isel_unsigned_si (dest, condition_rtx, 17643 {
16185 true_cond, false_cond, cr)); 17644 case LT: case GT: case LTU: case GTU: case EQ:
16186 } 17645 /* isel handles these directly. */
16187 else 17646 break;
16188 { 17647
16189 if (GET_MODE (cr) == CCmode) 17648 default:
16190 emit_insn (gen_isel_signed_di (dest, condition_rtx, 17649 /* We need to swap the sense of the comparison. */
16191 true_cond, false_cond, cr)); 17650 {
16192 else 17651 rtx t = true_cond;
16193 emit_insn (gen_isel_unsigned_di (dest, condition_rtx, 17652 true_cond = false_cond;
16194 true_cond, false_cond, cr)); 17653 false_cond = t;
16195 } 17654 PUT_CODE (condition_rtx, reverse_condition (cond_code));
17655 }
17656 break;
17657 }
17658
17659 false_cond = force_reg (mode, false_cond);
17660 if (true_cond != const0_rtx)
17661 true_cond = force_reg (mode, true_cond);
17662
17663 emit_insn (isel_func (dest, condition_rtx, true_cond, false_cond, cr));
16196 17664
16197 return 1; 17665 return 1;
16198 } 17666 }
16199 17667
16200 const char * 17668 const char *
16201 output_isel (rtx *operands) 17669 output_isel (rtx *operands)
16202 { 17670 {
16203 enum rtx_code code; 17671 enum rtx_code code;
16204 17672
16205 code = GET_CODE (operands[1]); 17673 code = GET_CODE (operands[1]);
17674
16206 if (code == GE || code == GEU || code == LE || code == LEU || code == NE) 17675 if (code == GE || code == GEU || code == LE || code == LEU || code == NE)
16207 { 17676 {
17677 gcc_assert (GET_CODE (operands[2]) == REG
17678 && GET_CODE (operands[3]) == REG);
16208 PUT_CODE (operands[1], reverse_condition (code)); 17679 PUT_CODE (operands[1], reverse_condition (code));
16209 return "isel %0,%3,%2,%j1"; 17680 return "isel %0,%3,%2,%j1";
16210 } 17681 }
16211 else 17682
16212 return "isel %0,%2,%3,%j1"; 17683 return "isel %0,%2,%3,%j1";
16213 } 17684 }
16214 17685
16215 void 17686 void
16216 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1) 17687 rs6000_emit_minmax (rtx dest, enum rtx_code code, rtx op0, rtx op1)
16217 { 17688 {
16218 enum machine_mode mode = GET_MODE (op0); 17689 enum machine_mode mode = GET_MODE (op0);
16219 enum rtx_code c; 17690 enum rtx_code c;
16220 rtx target; 17691 rtx target;
16221 17692
16222 /* VSX/altivec have direct min/max insns. */ 17693 /* VSX/altivec have direct min/max insns. */
16223 if ((code == SMAX || code == SMIN) && VECTOR_UNIT_ALTIVEC_OR_VSX_P (mode)) 17694 if ((code == SMAX || code == SMIN)
17695 && (VECTOR_UNIT_ALTIVEC_OR_VSX_P (mode)
17696 || (mode == SFmode && VECTOR_UNIT_VSX_P (DFmode))))
16224 { 17697 {
16225 emit_insn (gen_rtx_SET (VOIDmode, 17698 emit_insn (gen_rtx_SET (VOIDmode,
16226 dest, 17699 dest,
16227 gen_rtx_fmt_ee (code, mode, op0, op1))); 17700 gen_rtx_fmt_ee (code, mode, op0, op1)));
16228 return; 17701 return;
17057 if (REGNO (reg) == ALTIVEC_ARG_RETURN) 18530 if (REGNO (reg) == ALTIVEC_ARG_RETURN)
17058 *yes = true; 18531 *yes = true;
17059 } 18532 }
17060 18533
17061 18534
18535 /* Determine the strategy for savings/restoring registers. */
18536
18537 enum {
18538 SAVRES_MULTIPLE = 0x1,
18539 SAVE_INLINE_FPRS = 0x2,
18540 SAVE_INLINE_GPRS = 0x4,
18541 REST_INLINE_FPRS = 0x8,
18542 REST_INLINE_GPRS = 0x10,
18543 SAVE_NOINLINE_GPRS_SAVES_LR = 0x20,
18544 SAVE_NOINLINE_FPRS_SAVES_LR = 0x40,
18545 REST_NOINLINE_FPRS_DOESNT_RESTORE_LR = 0x80
18546 };
18547
18548 static int
18549 rs6000_savres_strategy (rs6000_stack_t *info,
18550 bool using_static_chain_p)
18551 {
18552 int strategy = 0;
18553
18554 if (TARGET_MULTIPLE
18555 && !TARGET_POWERPC64
18556 && !(TARGET_SPE_ABI && info->spe_64bit_regs_used)
18557 && info->first_gp_reg_save < 31
18558 && no_global_regs_above (info->first_gp_reg_save, /*gpr=*/true))
18559 strategy |= SAVRES_MULTIPLE;
18560
18561 if (crtl->calls_eh_return
18562 || cfun->machine->ra_need_lr
18563 || info->total_size > 32767)
18564 strategy |= (SAVE_INLINE_FPRS | REST_INLINE_FPRS
18565 | SAVE_INLINE_GPRS | REST_INLINE_GPRS);
18566
18567 if (info->first_fp_reg_save == 64
18568 || FP_SAVE_INLINE (info->first_fp_reg_save)
18569 /* The out-of-line FP routines use double-precision stores;
18570 we can't use those routines if we don't have such stores. */
18571 || (TARGET_HARD_FLOAT && !TARGET_DOUBLE_FLOAT)
18572 || !no_global_regs_above (info->first_fp_reg_save, /*gpr=*/false))
18573 strategy |= SAVE_INLINE_FPRS | REST_INLINE_FPRS;
18574
18575 if (info->first_gp_reg_save == 32
18576 || GP_SAVE_INLINE (info->first_gp_reg_save)
18577 || !((strategy & SAVRES_MULTIPLE)
18578 || no_global_regs_above (info->first_gp_reg_save, /*gpr=*/true)))
18579 strategy |= SAVE_INLINE_GPRS | REST_INLINE_GPRS;
18580
18581 /* Don't bother to try to save things out-of-line if r11 is occupied
18582 by the static chain. It would require too much fiddling and the
18583 static chain is rarely used anyway. */
18584 if (using_static_chain_p)
18585 strategy |= SAVE_INLINE_FPRS | SAVE_INLINE_GPRS;
18586
18587 /* If we are going to use store multiple, then don't even bother
18588 with the out-of-line routines, since the store-multiple
18589 instruction will always be smaller. */
18590 if ((strategy & SAVRES_MULTIPLE))
18591 strategy |= SAVE_INLINE_GPRS;
18592
18593 /* The situation is more complicated with load multiple. We'd
18594 prefer to use the out-of-line routines for restores, since the
18595 "exit" out-of-line routines can handle the restore of LR and the
18596 frame teardown. However if doesn't make sense to use the
18597 out-of-line routine if that is the only reason we'd need to save
18598 LR, and we can't use the "exit" out-of-line gpr restore if we
18599 have saved some fprs; In those cases it is advantageous to use
18600 load multiple when available. */
18601 if ((strategy & SAVRES_MULTIPLE)
18602 && (!info->lr_save_p
18603 || info->first_fp_reg_save != 64))
18604 strategy |= REST_INLINE_GPRS;
18605
18606 /* We can only use load multiple or the out-of-line routines to
18607 restore if we've used store multiple or out-of-line routines
18608 in the prologue, i.e. if we've saved all the registers from
18609 first_gp_reg_save. Otherwise, we risk loading garbage. */
18610 if ((strategy & (SAVE_INLINE_GPRS | SAVRES_MULTIPLE)) == SAVE_INLINE_GPRS)
18611 strategy |= REST_INLINE_GPRS;
18612
18613 /* Saving CR interferes with the exit routines used on the SPE, so
18614 just punt here. */
18615 if (TARGET_SPE_ABI
18616 && info->spe_64bit_regs_used
18617 && info->cr_save_p)
18618 strategy |= REST_INLINE_GPRS;
18619
18620 #ifdef POWERPC_LINUX
18621 if (TARGET_64BIT)
18622 {
18623 if (!(strategy & SAVE_INLINE_FPRS))
18624 strategy |= SAVE_NOINLINE_FPRS_SAVES_LR;
18625 else if (!(strategy & SAVE_INLINE_GPRS)
18626 && info->first_fp_reg_save == 64)
18627 strategy |= SAVE_NOINLINE_GPRS_SAVES_LR;
18628 }
18629 #else
18630 if (TARGET_AIX && !(strategy & REST_INLINE_FPRS))
18631 strategy |= REST_NOINLINE_FPRS_DOESNT_RESTORE_LR;
18632 #endif
18633 return strategy;
18634 }
18635
17062 /* Calculate the stack information for the current function. This is 18636 /* Calculate the stack information for the current function. This is
17063 complicated by having two separate calling sequences, the AIX calling 18637 complicated by having two separate calling sequences, the AIX calling
17064 sequence and the V.4 calling sequence. 18638 sequence and the V.4 calling sequence.
17065 18639
17066 AIX (and Darwin/Mac OS X) stack frames look like: 18640 AIX (and Darwin/Mac OS X) stack frames look like:
17157 #endif 18731 #endif
17158 18732
17159 static rs6000_stack_t * 18733 static rs6000_stack_t *
17160 rs6000_stack_info (void) 18734 rs6000_stack_info (void)
17161 { 18735 {
17162 static rs6000_stack_t info; 18736 #ifdef ENABLE_CHECKING
17163 rs6000_stack_t *info_ptr = &info; 18737 static rs6000_stack_t info_save;
18738 #endif
18739 rs6000_stack_t *info_ptr = &stack_info;
17164 int reg_size = TARGET_32BIT ? 4 : 8; 18740 int reg_size = TARGET_32BIT ? 4 : 8;
17165 int ehrd_size; 18741 int ehrd_size;
17166 int save_align; 18742 int save_align;
17167 int first_gp; 18743 int first_gp;
17168 HOST_WIDE_INT non_fixed_size; 18744 HOST_WIDE_INT non_fixed_size;
17169 18745 bool using_static_chain_p;
17170 memset (&info, 0, sizeof (info)); 18746
18747 #ifdef ENABLE_CHECKING
18748 memcpy (&info_save, &stack_info, sizeof stack_info);
18749 #else
18750 if (reload_completed && info_ptr->reload_completed)
18751 return info_ptr;
18752 #endif
18753
18754 memset (&stack_info, 0, sizeof (stack_info));
18755 info_ptr->reload_completed = reload_completed;
17171 18756
17172 if (TARGET_SPE) 18757 if (TARGET_SPE)
17173 { 18758 {
17174 /* Cache value so we don't rescan instruction chain over and over. */ 18759 /* Cache value so we don't rescan instruction chain over and over. */
17175 if (cfun->machine->insn_chain_scanned_p == 0) 18760 if (cfun->machine->insn_chain_scanned_p == 0)
17222 18807
17223 /* Does this function call anything? */ 18808 /* Does this function call anything? */
17224 info_ptr->calls_p = (! current_function_is_leaf 18809 info_ptr->calls_p = (! current_function_is_leaf
17225 || cfun->machine->ra_needs_full_frame); 18810 || cfun->machine->ra_needs_full_frame);
17226 18811
17227 /* Determine if we need to save the link register. */
17228 if ((DEFAULT_ABI == ABI_AIX
17229 && crtl->profile
17230 && !TARGET_PROFILE_KERNEL)
17231 #ifdef TARGET_RELOCATABLE
17232 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
17233 #endif
17234 || (info_ptr->first_fp_reg_save != 64
17235 && !FP_SAVE_INLINE (info_ptr->first_fp_reg_save))
17236 || (DEFAULT_ABI == ABI_V4 && cfun->calls_alloca)
17237 || info_ptr->calls_p
17238 || rs6000_ra_ever_killed ())
17239 {
17240 info_ptr->lr_save_p = 1;
17241 df_set_regs_ever_live (LR_REGNO, true);
17242 }
17243
17244 /* Determine if we need to save the condition code registers. */ 18812 /* Determine if we need to save the condition code registers. */
17245 if (df_regs_ever_live_p (CR2_REGNO) 18813 if (df_regs_ever_live_p (CR2_REGNO)
17246 || df_regs_ever_live_p (CR3_REGNO) 18814 || df_regs_ever_live_p (CR3_REGNO)
17247 || df_regs_ever_live_p (CR4_REGNO)) 18815 || df_regs_ever_live_p (CR4_REGNO))
17248 { 18816 {
17407 + info_ptr->save_size); 18975 + info_ptr->save_size);
17408 18976
17409 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size, 18977 info_ptr->total_size = RS6000_ALIGN (non_fixed_size + info_ptr->fixed_size,
17410 ABI_STACK_BOUNDARY / BITS_PER_UNIT); 18978 ABI_STACK_BOUNDARY / BITS_PER_UNIT);
17411 18979
18980 /* Determine if we need to save the link register. */
18981 if (info_ptr->calls_p
18982 || (DEFAULT_ABI == ABI_AIX
18983 && crtl->profile
18984 && !TARGET_PROFILE_KERNEL)
18985 || (DEFAULT_ABI == ABI_V4 && cfun->calls_alloca)
18986 #ifdef TARGET_RELOCATABLE
18987 || (TARGET_RELOCATABLE && (get_pool_size () != 0))
18988 #endif
18989 || rs6000_ra_ever_killed ())
18990 info_ptr->lr_save_p = 1;
18991
18992 using_static_chain_p = (cfun->static_chain_decl != NULL_TREE
18993 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM)
18994 && call_used_regs[STATIC_CHAIN_REGNUM]);
18995 info_ptr->savres_strategy = rs6000_savres_strategy (info_ptr,
18996 using_static_chain_p);
18997
18998 if (!(info_ptr->savres_strategy & SAVE_INLINE_GPRS)
18999 || !(info_ptr->savres_strategy & SAVE_INLINE_FPRS)
19000 || !(info_ptr->savres_strategy & REST_INLINE_GPRS)
19001 || !(info_ptr->savres_strategy & REST_INLINE_FPRS))
19002 info_ptr->lr_save_p = 1;
19003
19004 if (info_ptr->lr_save_p)
19005 df_set_regs_ever_live (LR_REGNO, true);
19006
17412 /* Determine if we need to allocate any stack frame: 19007 /* Determine if we need to allocate any stack frame:
17413 19008
17414 For AIX we need to push the stack if a frame pointer is needed 19009 For AIX we need to push the stack if a frame pointer is needed
17415 (because the stack might be dynamically adjusted), if we are 19010 (because the stack might be dynamically adjusted), if we are
17416 debugging, if we make calls, or if the sum of fp_save, gp_save, 19011 debugging, if we make calls, or if the sum of fp_save, gp_save,
17458 info_ptr->lr_save_offset = 0; 19053 info_ptr->lr_save_offset = 0;
17459 19054
17460 if (! info_ptr->cr_save_p) 19055 if (! info_ptr->cr_save_p)
17461 info_ptr->cr_save_offset = 0; 19056 info_ptr->cr_save_offset = 0;
17462 19057
19058 #ifdef ENABLE_CHECKING
19059 gcc_assert (!(reload_completed && info_save.reload_completed)
19060 || memcmp (&info_save, &stack_info, sizeof stack_info) == 0);
19061 #endif
17463 return info_ptr; 19062 return info_ptr;
17464 } 19063 }
17465 19064
17466 /* Return true if the current function uses any GPRs in 64-bit SIMD 19065 /* Return true if the current function uses any GPRs in 64-bit SIMD
17467 mode. */ 19066 mode. */
17785 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic) 19384 if (TARGET_ELF && TARGET_SECURE_PLT && DEFAULT_ABI != ABI_AIX && flag_pic)
17786 { 19385 {
17787 char buf[30]; 19386 char buf[30];
17788 rtx lab, tmp1, tmp2, got; 19387 rtx lab, tmp1, tmp2, got;
17789 19388
17790 ASM_GENERATE_INTERNAL_LABEL (buf, "LCF", rs6000_pic_labelno); 19389 lab = gen_label_rtx ();
19390 ASM_GENERATE_INTERNAL_LABEL (buf, "L", CODE_LABEL_NUMBER (lab));
17791 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)); 19391 lab = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
17792 if (flag_pic == 2) 19392 if (flag_pic == 2)
17793 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name); 19393 got = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
17794 else 19394 else
17795 got = rs6000_got_sym (); 19395 got = rs6000_got_sym ();
17798 { 19398 {
17799 tmp1 = gen_reg_rtx (Pmode); 19399 tmp1 = gen_reg_rtx (Pmode);
17800 tmp2 = gen_reg_rtx (Pmode); 19400 tmp2 = gen_reg_rtx (Pmode);
17801 } 19401 }
17802 emit_insn (gen_load_toc_v4_PIC_1 (lab)); 19402 emit_insn (gen_load_toc_v4_PIC_1 (lab));
17803 emit_move_insn (tmp1, 19403 emit_move_insn (tmp1, gen_rtx_REG (Pmode, LR_REGNO));
17804 gen_rtx_REG (Pmode, LR_REGNO));
17805 emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab)); 19404 emit_insn (gen_load_toc_v4_PIC_3b (tmp2, tmp1, got, lab));
17806 emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab)); 19405 emit_insn (gen_load_toc_v4_PIC_3c (dest, tmp2, got, lab));
17807 } 19406 }
17808 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1) 19407 else if (TARGET_ELF && DEFAULT_ABI == ABI_V4 && flag_pic == 1)
17809 { 19408 {
17826 19425
17827 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno); 19426 ASM_GENERATE_INTERNAL_LABEL (buf, "LCL", rs6000_pic_labelno);
17828 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf)); 19427 symL = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
17829 19428
17830 emit_insn (gen_load_toc_v4_PIC_1 (symF)); 19429 emit_insn (gen_load_toc_v4_PIC_1 (symF));
17831 emit_move_insn (dest, 19430 emit_move_insn (dest, gen_rtx_REG (Pmode, LR_REGNO));
17832 gen_rtx_REG (Pmode, LR_REGNO));
17833 emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest, symL, symF)); 19431 emit_insn (gen_load_toc_v4_PIC_2 (temp0, dest, symL, symF));
17834 } 19432 }
17835 else 19433 else
17836 { 19434 {
17837 rtx tocsym; 19435 rtx tocsym, lab;
17838 19436
17839 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name); 19437 tocsym = gen_rtx_SYMBOL_REF (Pmode, toc_label_name);
17840 emit_insn (gen_load_toc_v4_PIC_1b (tocsym)); 19438 lab = gen_label_rtx ();
17841 emit_move_insn (dest, 19439 emit_insn (gen_load_toc_v4_PIC_1b (tocsym, lab));
17842 gen_rtx_REG (Pmode, LR_REGNO)); 19440 emit_move_insn (dest, gen_rtx_REG (Pmode, LR_REGNO));
17843 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest)); 19441 emit_move_insn (temp0, gen_rtx_MEM (Pmode, dest));
17844 } 19442 }
17845 emit_insn (gen_addsi3 (dest, temp0, dest)); 19443 emit_insn (gen_addsi3 (dest, temp0, dest));
17846 } 19444 }
17847 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC) 19445 else if (TARGET_ELF && !TARGET_AIX && flag_pic == 0 && TARGET_MINIMAL_TOC)
17950 return 0; 19548 return 0;
17951 } 19549 }
17952 #endif 19550 #endif
17953 19551
17954 rtx 19552 rtx
17955 create_TOC_reference (rtx symbol) 19553 create_TOC_reference (rtx symbol, rtx largetoc_reg)
17956 { 19554 {
19555 rtx tocrel, tocreg;
19556
17957 if (TARGET_DEBUG_ADDR) 19557 if (TARGET_DEBUG_ADDR)
17958 { 19558 {
17959 if (GET_CODE (symbol) == SYMBOL_REF) 19559 if (GET_CODE (symbol) == SYMBOL_REF)
17960 fprintf (stderr, "\ncreate_TOC_reference, (symbol_ref %s)\n", 19560 fprintf (stderr, "\ncreate_TOC_reference, (symbol_ref %s)\n",
17961 XSTR (symbol, 0)); 19561 XSTR (symbol, 0));
17967 } 19567 }
17968 } 19568 }
17969 19569
17970 if (!can_create_pseudo_p ()) 19570 if (!can_create_pseudo_p ())
17971 df_set_regs_ever_live (TOC_REGISTER, true); 19571 df_set_regs_ever_live (TOC_REGISTER, true);
17972 return gen_rtx_PLUS (Pmode, 19572
17973 gen_rtx_REG (Pmode, TOC_REGISTER), 19573 tocrel = gen_rtx_CONST (Pmode,
17974 gen_rtx_CONST (Pmode, 19574 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, symbol),
17975 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, symbol), UNSPEC_TOCREL))); 19575 UNSPEC_TOCREL));
19576 tocreg = gen_rtx_REG (Pmode, TOC_REGISTER);
19577 if (TARGET_CMODEL != CMODEL_SMALL)
19578 {
19579 rtx hi = gen_rtx_PLUS (Pmode, tocreg, gen_rtx_HIGH (Pmode, tocrel));
19580 if (largetoc_reg != NULL)
19581 {
19582 emit_move_insn (largetoc_reg, hi);
19583 hi = largetoc_reg;
19584 }
19585 return gen_rtx_LO_SUM (Pmode, hi, copy_rtx (tocrel));
19586 }
19587 else
19588 return gen_rtx_PLUS (Pmode, tocreg, tocrel);
17976 } 19589 }
17977 19590
17978 /* Issue assembly directives that create a reference to the given DWARF 19591 /* Issue assembly directives that create a reference to the given DWARF
17979 FRAME_TABLE_LABEL from the current function section. */ 19592 FRAME_TABLE_LABEL from the current function section. */
17980 void 19593 void
17981 rs6000_aix_asm_output_dwarf_table_ref (char * frame_table_label) 19594 rs6000_aix_asm_output_dwarf_table_ref (char * frame_table_label)
17982 { 19595 {
17983 fprintf (asm_out_file, "\t.ref %s\n", 19596 fprintf (asm_out_file, "\t.ref %s\n",
17984 TARGET_STRIP_NAME_ENCODING (frame_table_label)); 19597 TARGET_STRIP_NAME_ENCODING (frame_table_label));
17985 }
17986
17987 /* If _Unwind_* has been called from within the same module,
17988 toc register is not guaranteed to be saved to 40(1) on function
17989 entry. Save it there in that case. */
17990
17991 void
17992 rs6000_aix_emit_builtin_unwind_init (void)
17993 {
17994 rtx mem;
17995 rtx stack_top = gen_reg_rtx (Pmode);
17996 rtx opcode_addr = gen_reg_rtx (Pmode);
17997 rtx opcode = gen_reg_rtx (SImode);
17998 rtx tocompare = gen_reg_rtx (SImode);
17999 rtx no_toc_save_needed = gen_label_rtx ();
18000
18001 mem = gen_frame_mem (Pmode, hard_frame_pointer_rtx);
18002 emit_move_insn (stack_top, mem);
18003
18004 mem = gen_frame_mem (Pmode,
18005 gen_rtx_PLUS (Pmode, stack_top,
18006 GEN_INT (2 * GET_MODE_SIZE (Pmode))));
18007 emit_move_insn (opcode_addr, mem);
18008 emit_move_insn (opcode, gen_rtx_MEM (SImode, opcode_addr));
18009 emit_move_insn (tocompare, gen_int_mode (TARGET_32BIT ? 0x80410014
18010 : 0xE8410028, SImode));
18011
18012 do_compare_rtx_and_jump (opcode, tocompare, EQ, 1,
18013 SImode, NULL_RTX, NULL_RTX,
18014 no_toc_save_needed, -1);
18015
18016 mem = gen_frame_mem (Pmode,
18017 gen_rtx_PLUS (Pmode, stack_top,
18018 GEN_INT (5 * GET_MODE_SIZE (Pmode))));
18019 emit_move_insn (mem, gen_rtx_REG (Pmode, 2));
18020 emit_label (no_toc_save_needed);
18021 } 19598 }
18022 19599
18023 /* This ties together stack memory (MEM with an alias set of frame_alias_set) 19600 /* This ties together stack memory (MEM with an alias set of frame_alias_set)
18024 and the change to the stack pointer. */ 19601 and the change to the stack pointer. */
18025 19602
18116 gen_rtx_SET (VOIDmode, stack_reg, 19693 gen_rtx_SET (VOIDmode, stack_reg,
18117 gen_rtx_PLUS (Pmode, stack_reg, 19694 gen_rtx_PLUS (Pmode, stack_reg,
18118 GEN_INT (-size)))); 19695 GEN_INT (-size))));
18119 } 19696 }
18120 19697
19698 #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
19699
19700 #if PROBE_INTERVAL > 32768
19701 #error Cannot use indexed addressing mode for stack probing
19702 #endif
19703
19704 /* Emit code to probe a range of stack addresses from FIRST to FIRST+SIZE,
19705 inclusive. These are offsets from the current stack pointer. */
19706
19707 static void
19708 rs6000_emit_probe_stack_range (HOST_WIDE_INT first, HOST_WIDE_INT size)
19709 {
19710 /* See if we have a constant small number of probes to generate. If so,
19711 that's the easy case. */
19712 if (first + size <= 32768)
19713 {
19714 HOST_WIDE_INT i;
19715
19716 /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
19717 it exceeds SIZE. If only one probe is needed, this will not
19718 generate any code. Then probe at FIRST + SIZE. */
19719 for (i = PROBE_INTERVAL; i < size; i += PROBE_INTERVAL)
19720 emit_stack_probe (plus_constant (stack_pointer_rtx, -(first + i)));
19721
19722 emit_stack_probe (plus_constant (stack_pointer_rtx, -(first + size)));
19723 }
19724
19725 /* Otherwise, do the same as above, but in a loop. Note that we must be
19726 extra careful with variables wrapping around because we might be at
19727 the very top (or the very bottom) of the address space and we have
19728 to be able to handle this case properly; in particular, we use an
19729 equality test for the loop condition. */
19730 else
19731 {
19732 HOST_WIDE_INT rounded_size;
19733 rtx r12 = gen_rtx_REG (Pmode, 12);
19734 rtx r0 = gen_rtx_REG (Pmode, 0);
19735
19736 /* Sanity check for the addressing mode we're going to use. */
19737 gcc_assert (first <= 32768);
19738
19739 /* Step 1: round SIZE to the previous multiple of the interval. */
19740
19741 rounded_size = size & -PROBE_INTERVAL;
19742
19743
19744 /* Step 2: compute initial and final value of the loop counter. */
19745
19746 /* TEST_ADDR = SP + FIRST. */
19747 emit_insn (gen_rtx_SET (VOIDmode, r12,
19748 plus_constant (stack_pointer_rtx, -first)));
19749
19750 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
19751 if (rounded_size > 32768)
19752 {
19753 emit_move_insn (r0, GEN_INT (-rounded_size));
19754 emit_insn (gen_rtx_SET (VOIDmode, r0,
19755 gen_rtx_PLUS (Pmode, r12, r0)));
19756 }
19757 else
19758 emit_insn (gen_rtx_SET (VOIDmode, r0,
19759 plus_constant (r12, -rounded_size)));
19760
19761
19762 /* Step 3: the loop
19763
19764 while (TEST_ADDR != LAST_ADDR)
19765 {
19766 TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
19767 probe at TEST_ADDR
19768 }
19769
19770 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
19771 until it is equal to ROUNDED_SIZE. */
19772
19773 if (TARGET_64BIT)
19774 emit_insn (gen_probe_stack_rangedi (r12, r12, r0));
19775 else
19776 emit_insn (gen_probe_stack_rangesi (r12, r12, r0));
19777
19778
19779 /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
19780 that SIZE is equal to ROUNDED_SIZE. */
19781
19782 if (size != rounded_size)
19783 emit_stack_probe (plus_constant (r12, rounded_size - size));
19784 }
19785 }
19786
19787 /* Probe a range of stack addresses from REG1 to REG2 inclusive. These are
19788 absolute addresses. */
19789
19790 const char *
19791 output_probe_stack_range (rtx reg1, rtx reg2)
19792 {
19793 static int labelno = 0;
19794 char loop_lab[32], end_lab[32];
19795 rtx xops[2];
19796
19797 ASM_GENERATE_INTERNAL_LABEL (loop_lab, "LPSRL", labelno);
19798 ASM_GENERATE_INTERNAL_LABEL (end_lab, "LPSRE", labelno++);
19799
19800 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, loop_lab);
19801
19802 /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
19803 xops[0] = reg1;
19804 xops[1] = reg2;
19805 if (TARGET_64BIT)
19806 output_asm_insn ("{cmp|cmpd} 0,%0,%1", xops);
19807 else
19808 output_asm_insn ("{cmp|cmpw} 0,%0,%1", xops);
19809
19810 fputs ("\tbeq 0,", asm_out_file);
19811 assemble_name_raw (asm_out_file, end_lab);
19812 fputc ('\n', asm_out_file);
19813
19814 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
19815 xops[1] = GEN_INT (-PROBE_INTERVAL);
19816 output_asm_insn ("{cal %0,%1(%0)|addi %0,%0,%1}", xops);
19817
19818 /* Probe at TEST_ADDR and branch. */
19819 output_asm_insn ("{st|stw} 0,0(%0)", xops);
19820 fprintf (asm_out_file, "\tb ");
19821 assemble_name_raw (asm_out_file, loop_lab);
19822 fputc ('\n', asm_out_file);
19823
19824 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, end_lab);
19825
19826 return "";
19827 }
19828
18121 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced 19829 /* Add to 'insn' a note which is PATTERN (INSN) but with REG replaced
18122 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2 19830 with (plus:P (reg 1) VAL), and with REG2 replaced with RREG if REG2
18123 is not NULL. It would be nice if dwarf2out_frame_debug_expr could 19831 is not NULL. It would be nice if dwarf2out_frame_debug_expr could
18124 deduce these equivalences by itself so it wasn't necessary to hold 19832 deduce these equivalences by itself so it wasn't necessary to hold
18125 its hand so much. */ 19833 its hand so much. */
18438 prefix = savep ? SAVE_FP_PREFIX : RESTORE_FP_PREFIX; 20146 prefix = savep ? SAVE_FP_PREFIX : RESTORE_FP_PREFIX;
18439 suffix = savep ? SAVE_FP_SUFFIX : RESTORE_FP_SUFFIX; 20147 suffix = savep ? SAVE_FP_SUFFIX : RESTORE_FP_SUFFIX;
18440 } 20148 }
18441 } 20149 }
18442 else if (DEFAULT_ABI == ABI_DARWIN) 20150 else if (DEFAULT_ABI == ABI_DARWIN)
18443 sorry ("Out-of-line save/restore routines not supported on Darwin"); 20151 sorry ("out-of-line save/restore routines not supported on Darwin");
18444 20152
18445 sprintf (savres_routine_name, "%s%d%s", prefix, regno, suffix); 20153 sprintf (savres_routine_name, "%s%d%s", prefix, regno, suffix);
18446 20154
18447 return savres_routine_name; 20155 return savres_routine_name;
18448 } 20156 }
18597 /* Determine whether the gp REG is really used. */ 20305 /* Determine whether the gp REG is really used. */
18598 20306
18599 static bool 20307 static bool
18600 rs6000_reg_live_or_pic_offset_p (int reg) 20308 rs6000_reg_live_or_pic_offset_p (int reg)
18601 { 20309 {
18602 return ((df_regs_ever_live_p (reg) 20310 /* If the function calls eh_return, claim used all the registers that would
20311 be checked for liveness otherwise. This is required for the PIC offset
20312 register with -mminimal-toc on AIX, as it is advertised as "fixed" for
20313 register allocation purposes in this case. */
20314
20315 return (((crtl->calls_eh_return || df_regs_ever_live_p (reg))
18603 && (!call_used_regs[reg] 20316 && (!call_used_regs[reg]
18604 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM 20317 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
20318 && !TARGET_SINGLE_PIC_BASE
18605 && TARGET_TOC && TARGET_MINIMAL_TOC))) 20319 && TARGET_TOC && TARGET_MINIMAL_TOC)))
18606 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM 20320 || (reg == RS6000_PIC_OFFSET_TABLE_REGNUM
20321 && !TARGET_SINGLE_PIC_BASE
18607 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0) 20322 && ((DEFAULT_ABI == ABI_V4 && flag_pic != 0)
18608 || (DEFAULT_ABI == ABI_DARWIN && flag_pic)))); 20323 || (DEFAULT_ABI == ABI_DARWIN && flag_pic))));
18609 }
18610
18611 enum {
18612 SAVRES_MULTIPLE = 0x1,
18613 SAVRES_INLINE_FPRS = 0x2,
18614 SAVRES_INLINE_GPRS = 0x4,
18615 SAVRES_NOINLINE_GPRS_SAVES_LR = 0x8,
18616 SAVRES_NOINLINE_FPRS_SAVES_LR = 0x10,
18617 SAVRES_NOINLINE_FPRS_DOESNT_RESTORE_LR = 0x20
18618 };
18619
18620 /* Determine the strategy for savings/restoring registers. */
18621
18622 static int
18623 rs6000_savres_strategy (rs6000_stack_t *info, bool savep,
18624 int using_static_chain_p, int sibcall)
18625 {
18626 bool using_multiple_p;
18627 bool common;
18628 bool savres_fprs_inline;
18629 bool savres_gprs_inline;
18630 bool noclobber_global_gprs
18631 = no_global_regs_above (info->first_gp_reg_save, /*gpr=*/true);
18632 int strategy;
18633
18634 using_multiple_p = (TARGET_MULTIPLE && ! TARGET_POWERPC64
18635 && (!TARGET_SPE_ABI
18636 || info->spe_64bit_regs_used == 0)
18637 && info->first_gp_reg_save < 31
18638 && noclobber_global_gprs);
18639 /* Don't bother to try to save things out-of-line if r11 is occupied
18640 by the static chain. It would require too much fiddling and the
18641 static chain is rarely used anyway. */
18642 common = (using_static_chain_p
18643 || sibcall
18644 || crtl->calls_eh_return
18645 || !info->lr_save_p
18646 || cfun->machine->ra_need_lr
18647 || info->total_size > 32767);
18648 savres_fprs_inline = (common
18649 || info->first_fp_reg_save == 64
18650 || !no_global_regs_above (info->first_fp_reg_save,
18651 /*gpr=*/false)
18652 /* The out-of-line FP routines use
18653 double-precision stores; we can't use those
18654 routines if we don't have such stores. */
18655 || (TARGET_HARD_FLOAT && !TARGET_DOUBLE_FLOAT)
18656 || FP_SAVE_INLINE (info->first_fp_reg_save));
18657 savres_gprs_inline = (common
18658 /* Saving CR interferes with the exit routines
18659 used on the SPE, so just punt here. */
18660 || (!savep
18661 && TARGET_SPE_ABI
18662 && info->spe_64bit_regs_used != 0
18663 && info->cr_save_p != 0)
18664 || info->first_gp_reg_save == 32
18665 || !noclobber_global_gprs
18666 || GP_SAVE_INLINE (info->first_gp_reg_save));
18667
18668 if (savep)
18669 /* If we are going to use store multiple, then don't even bother
18670 with the out-of-line routines, since the store-multiple instruction
18671 will always be smaller. */
18672 savres_gprs_inline = savres_gprs_inline || using_multiple_p;
18673 else
18674 {
18675 /* The situation is more complicated with load multiple. We'd
18676 prefer to use the out-of-line routines for restores, since the
18677 "exit" out-of-line routines can handle the restore of LR and
18678 the frame teardown. But we can only use the out-of-line
18679 routines if we know that we've used store multiple or
18680 out-of-line routines in the prologue, i.e. if we've saved all
18681 the registers from first_gp_reg_save. Otherwise, we risk
18682 loading garbage from the stack. Furthermore, we can only use
18683 the "exit" out-of-line gpr restore if we haven't saved any
18684 fprs. */
18685 bool saved_all = !savres_gprs_inline || using_multiple_p;
18686
18687 if (saved_all && info->first_fp_reg_save != 64)
18688 /* We can't use the exit routine; use load multiple if it's
18689 available. */
18690 savres_gprs_inline = savres_gprs_inline || using_multiple_p;
18691 }
18692
18693 strategy = (using_multiple_p
18694 | (savres_fprs_inline << 1)
18695 | (savres_gprs_inline << 2));
18696 #ifdef POWERPC_LINUX
18697 if (TARGET_64BIT)
18698 {
18699 if (!savres_fprs_inline)
18700 strategy |= SAVRES_NOINLINE_FPRS_SAVES_LR;
18701 else if (!savres_gprs_inline && info->first_fp_reg_save == 64)
18702 strategy |= SAVRES_NOINLINE_GPRS_SAVES_LR;
18703 }
18704 #else
18705 if (TARGET_AIX && !savres_fprs_inline)
18706 strategy |= SAVRES_NOINLINE_FPRS_DOESNT_RESTORE_LR;
18707 #endif
18708 return strategy;
18709 } 20324 }
18710 20325
18711 /* Emit function prologue as insns. */ 20326 /* Emit function prologue as insns. */
18712 20327
18713 void 20328 void
18728 int using_static_chain_p = (cfun->static_chain_decl != NULL_TREE 20343 int using_static_chain_p = (cfun->static_chain_decl != NULL_TREE
18729 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM) 20344 && df_regs_ever_live_p (STATIC_CHAIN_REGNUM)
18730 && call_used_regs[STATIC_CHAIN_REGNUM]); 20345 && call_used_regs[STATIC_CHAIN_REGNUM]);
18731 HOST_WIDE_INT sp_offset = 0; 20346 HOST_WIDE_INT sp_offset = 0;
18732 20347
20348 if (flag_stack_usage)
20349 current_function_static_stack_size = info->total_size;
20350
20351 if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK && info->total_size)
20352 rs6000_emit_probe_stack_range (STACK_CHECK_PROTECT, info->total_size);
20353
18733 if (TARGET_FIX_AND_CONTINUE) 20354 if (TARGET_FIX_AND_CONTINUE)
18734 { 20355 {
18735 /* gdb on darwin arranges to forward a function from the old 20356 /* gdb on darwin arranges to forward a function from the old
18736 address by modifying the first 5 instructions of the function 20357 address by modifying the first 5 instructions of the function
18737 to branch to the overriding function. This is necessary to 20358 to branch to the overriding function. This is necessary to
18748 { 20369 {
18749 reg_mode = V2SImode; 20370 reg_mode = V2SImode;
18750 reg_size = 8; 20371 reg_size = 8;
18751 } 20372 }
18752 20373
18753 strategy = rs6000_savres_strategy (info, /*savep=*/true, 20374 strategy = info->savres_strategy;
18754 /*static_chain_p=*/using_static_chain_p,
18755 /*sibcall=*/0);
18756 using_store_multiple = strategy & SAVRES_MULTIPLE; 20375 using_store_multiple = strategy & SAVRES_MULTIPLE;
18757 saving_FPRs_inline = strategy & SAVRES_INLINE_FPRS; 20376 saving_FPRs_inline = strategy & SAVE_INLINE_FPRS;
18758 saving_GPRs_inline = strategy & SAVRES_INLINE_GPRS; 20377 saving_GPRs_inline = strategy & SAVE_INLINE_GPRS;
18759 20378
18760 /* For V.4, update stack before we do any saving and set back pointer. */ 20379 /* For V.4, update stack before we do any saving and set back pointer. */
18761 if (! WORLD_SAVE_P (info) 20380 if (! WORLD_SAVE_P (info)
18762 && info->push_p 20381 && info->push_p
18763 && (DEFAULT_ABI == ABI_V4 20382 && (DEFAULT_ABI == ABI_V4
18923 20542
18924 insn = emit_move_insn (gen_rtx_REG (Pmode, 0), 20543 insn = emit_move_insn (gen_rtx_REG (Pmode, 0),
18925 gen_rtx_REG (Pmode, LR_REGNO)); 20544 gen_rtx_REG (Pmode, LR_REGNO));
18926 RTX_FRAME_RELATED_P (insn) = 1; 20545 RTX_FRAME_RELATED_P (insn) = 1;
18927 20546
18928 if (!(strategy & (SAVRES_NOINLINE_GPRS_SAVES_LR 20547 if (!(strategy & (SAVE_NOINLINE_GPRS_SAVES_LR
18929 | SAVRES_NOINLINE_FPRS_SAVES_LR))) 20548 | SAVE_NOINLINE_FPRS_SAVES_LR)))
18930 { 20549 {
18931 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx, 20550 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
18932 GEN_INT (info->lr_save_offset + sp_offset)); 20551 GEN_INT (info->lr_save_offset + sp_offset));
18933 reg = gen_rtx_REG (Pmode, 0); 20552 reg = gen_rtx_REG (Pmode, 0);
18934 mem = gen_rtx_MEM (Pmode, addr); 20553 mem = gen_rtx_MEM (Pmode, addr);
18984 par = rs6000_make_savres_rtx (info, frame_reg_rtx, 20603 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
18985 info->fp_save_offset + sp_offset, 20604 info->fp_save_offset + sp_offset,
18986 DFmode, 20605 DFmode,
18987 /*savep=*/true, /*gpr=*/false, 20606 /*savep=*/true, /*gpr=*/false,
18988 /*lr=*/(strategy 20607 /*lr=*/(strategy
18989 & SAVRES_NOINLINE_FPRS_SAVES_LR) 20608 & SAVE_NOINLINE_FPRS_SAVES_LR)
18990 != 0); 20609 != 0);
18991 insn = emit_insn (par); 20610 insn = emit_insn (par);
18992 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size, 20611 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
18993 NULL_RTX, NULL_RTX); 20612 NULL_RTX, NULL_RTX);
18994 } 20613 }
19111 par = rs6000_make_savres_rtx (info, frame_reg_rtx, 20730 par = rs6000_make_savres_rtx (info, frame_reg_rtx,
19112 info->gp_save_offset + sp_offset, 20731 info->gp_save_offset + sp_offset,
19113 reg_mode, 20732 reg_mode,
19114 /*savep=*/true, /*gpr=*/true, 20733 /*savep=*/true, /*gpr=*/true,
19115 /*lr=*/(strategy 20734 /*lr=*/(strategy
19116 & SAVRES_NOINLINE_GPRS_SAVES_LR) 20735 & SAVE_NOINLINE_GPRS_SAVES_LR)
19117 != 0); 20736 != 0);
19118 insn = emit_insn (par); 20737 insn = emit_insn (par);
19119 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size, 20738 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
19120 NULL_RTX, NULL_RTX); 20739 NULL_RTX, NULL_RTX);
19121 } 20740 }
19165 easiest way to get the frame unwind information emitted. */ 20784 easiest way to get the frame unwind information emitted. */
19166 if (crtl->calls_eh_return) 20785 if (crtl->calls_eh_return)
19167 { 20786 {
19168 unsigned int i, regno; 20787 unsigned int i, regno;
19169 20788
19170 /* In AIX ABI we need to pretend we save r2 here. */
19171 if (TARGET_AIX)
19172 {
19173 rtx addr, reg, mem;
19174
19175 reg = gen_rtx_REG (reg_mode, 2);
19176 addr = gen_rtx_PLUS (Pmode, frame_reg_rtx,
19177 GEN_INT (sp_offset + 5 * reg_size));
19178 mem = gen_frame_mem (reg_mode, addr);
19179
19180 insn = emit_move_insn (mem, reg);
19181 rs6000_frame_related (insn, frame_ptr_rtx, info->total_size,
19182 NULL_RTX, NULL_RTX);
19183 PATTERN (insn) = gen_blockage ();
19184 }
19185
19186 for (i = 0; ; ++i) 20789 for (i = 0; ; ++i)
19187 { 20790 {
19188 regno = EH_RETURN_DATA_REGNO (i); 20791 regno = EH_RETURN_DATA_REGNO (i);
19189 if (regno == INVALID_REGNUM) 20792 if (regno == INVALID_REGNUM)
19190 break; 20793 break;
19192 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno, 20795 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, regno,
19193 info->ehrd_offset + sp_offset 20796 info->ehrd_offset + sp_offset
19194 + reg_size * (int) i, 20797 + reg_size * (int) i,
19195 info->total_size); 20798 info->total_size);
19196 } 20799 }
20800 }
20801
20802 /* In AIX ABI we need to make sure r2 is really saved. */
20803 if (TARGET_AIX && crtl->calls_eh_return)
20804 {
20805 rtx tmp_reg, tmp_reg_si, hi, lo, compare_result, toc_save_done, jump;
20806 long toc_restore_insn;
20807
20808 gcc_assert (frame_reg_rtx == frame_ptr_rtx
20809 || frame_reg_rtx == sp_reg_rtx);
20810 tmp_reg = gen_rtx_REG (Pmode, 11);
20811 tmp_reg_si = gen_rtx_REG (SImode, 11);
20812 if (using_static_chain_p)
20813 emit_move_insn (gen_rtx_REG (Pmode, 0), tmp_reg);
20814 gcc_assert (saving_GPRs_inline && saving_FPRs_inline);
20815 emit_move_insn (tmp_reg, gen_rtx_REG (Pmode, LR_REGNO));
20816 /* Peek at instruction to which this function returns. If it's
20817 restoring r2, then we know we've already saved r2. We can't
20818 unconditionally save r2 because the value we have will already
20819 be updated if we arrived at this function via a plt call or
20820 toc adjusting stub. */
20821 emit_move_insn (tmp_reg_si, gen_rtx_MEM (SImode, tmp_reg));
20822 toc_restore_insn = TARGET_32BIT ? 0x80410014 : 0xE8410028;
20823 hi = gen_int_mode (toc_restore_insn & ~0xffff, SImode);
20824 emit_insn (gen_xorsi3 (tmp_reg_si, tmp_reg_si, hi));
20825 compare_result = gen_rtx_REG (CCUNSmode, CR0_REGNO);
20826 validate_condition_mode (EQ, CCUNSmode);
20827 lo = gen_int_mode (toc_restore_insn & 0xffff, SImode);
20828 emit_insn (gen_rtx_SET (VOIDmode, compare_result,
20829 gen_rtx_COMPARE (CCUNSmode, tmp_reg_si, lo)));
20830 toc_save_done = gen_label_rtx ();
20831 jump = gen_rtx_IF_THEN_ELSE (VOIDmode,
20832 gen_rtx_EQ (VOIDmode, compare_result,
20833 const0_rtx),
20834 gen_rtx_LABEL_REF (VOIDmode, toc_save_done),
20835 pc_rtx);
20836 jump = emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, jump));
20837 JUMP_LABEL (jump) = toc_save_done;
20838 LABEL_NUSES (toc_save_done) += 1;
20839
20840 emit_frame_save (frame_reg_rtx, frame_ptr_rtx, reg_mode, 2,
20841 sp_offset + 5 * reg_size, info->total_size);
20842 emit_label (toc_save_done);
20843 if (using_static_chain_p)
20844 emit_move_insn (tmp_reg, gen_rtx_REG (Pmode, 0));
19197 } 20845 }
19198 20846
19199 /* Save CR if we use any that must be preserved. */ 20847 /* Save CR if we use any that must be preserved. */
19200 if (!WORLD_SAVE_P (info) && info->cr_save_p) 20848 if (!WORLD_SAVE_P (info) && info->cr_save_p)
19201 { 20849 {
19322 /* Include the registers in the mask. */ 20970 /* Include the registers in the mask. */
19323 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask))); 20971 emit_insn (gen_iorsi3 (reg, reg, GEN_INT ((int) info->vrsave_mask)));
19324 20972
19325 insn = emit_insn (generate_set_vrsave (reg, info, 0)); 20973 insn = emit_insn (generate_set_vrsave (reg, info, 0));
19326 } 20974 }
20975
20976 if (TARGET_SINGLE_PIC_BASE)
20977 return; /* Do not set PIC register */
19327 20978
19328 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */ 20979 /* If we are using RS6000_PIC_OFFSET_TABLE_REGNUM, we need to set it up. */
19329 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0) 20980 if ((TARGET_TOC && TARGET_MINIMAL_TOC && get_pool_size () != 0)
19330 || (DEFAULT_ABI == ABI_V4 20981 || (DEFAULT_ABI == ABI_V4
19331 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT)) 20982 && (flag_pic == 1 || (flag_pic && TARGET_SECURE_PLT))
19390 if (TARGET_DEBUG_STACK) 21041 if (TARGET_DEBUG_STACK)
19391 debug_stack_info (info); 21042 debug_stack_info (info);
19392 21043
19393 /* Write .extern for any function we will call to save and restore 21044 /* Write .extern for any function we will call to save and restore
19394 fp values. */ 21045 fp values. */
19395 if (info->first_fp_reg_save < 64 21046 if (info->first_fp_reg_save < 64)
19396 && !FP_SAVE_INLINE (info->first_fp_reg_save))
19397 { 21047 {
19398 char *name; 21048 char *name;
19399 int regno = info->first_fp_reg_save - 32; 21049 int regno = info->first_fp_reg_save - 32;
19400 21050
19401 name = rs6000_savres_routine_name (info, regno, /*savep=*/true, 21051 if ((info->savres_strategy & SAVE_INLINE_FPRS) == 0)
19402 /*gpr=*/false, /*lr=*/false); 21052 {
19403 fprintf (file, "\t.extern %s\n", name); 21053 name = rs6000_savres_routine_name (info, regno, /*savep=*/true,
19404 21054 /*gpr=*/false, /*lr=*/false);
19405 name = rs6000_savres_routine_name (info, regno, /*savep=*/false, 21055 fprintf (file, "\t.extern %s\n", name);
19406 /*gpr=*/false, /*lr=*/true); 21056 }
19407 fprintf (file, "\t.extern %s\n", name); 21057 if ((info->savres_strategy & REST_INLINE_FPRS) == 0)
21058 {
21059 name = rs6000_savres_routine_name (info, regno, /*savep=*/false,
21060 /*gpr=*/false, /*lr=*/true);
21061 fprintf (file, "\t.extern %s\n", name);
21062 }
19408 } 21063 }
19409 21064
19410 /* Write .extern for AIX common mode routines, if needed. */ 21065 /* Write .extern for AIX common mode routines, if needed. */
19411 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined) 21066 if (! TARGET_POWER && ! TARGET_POWERPC && ! common_mode_defined)
19412 { 21067 {
19548 { 21203 {
19549 reg_mode = V2SImode; 21204 reg_mode = V2SImode;
19550 reg_size = 8; 21205 reg_size = 8;
19551 } 21206 }
19552 21207
19553 strategy = rs6000_savres_strategy (info, /*savep=*/false, 21208 strategy = info->savres_strategy;
19554 /*static_chain_p=*/0, sibcall);
19555 using_load_multiple = strategy & SAVRES_MULTIPLE; 21209 using_load_multiple = strategy & SAVRES_MULTIPLE;
19556 restoring_FPRs_inline = strategy & SAVRES_INLINE_FPRS; 21210 restoring_FPRs_inline = sibcall || (strategy & REST_INLINE_FPRS);
19557 restoring_GPRs_inline = strategy & SAVRES_INLINE_GPRS; 21211 restoring_GPRs_inline = sibcall || (strategy & REST_INLINE_GPRS);
19558 using_mtcr_multiple = (rs6000_cpu == PROCESSOR_PPC601 21212 using_mtcr_multiple = (rs6000_cpu == PROCESSOR_PPC601
19559 || rs6000_cpu == PROCESSOR_PPC603 21213 || rs6000_cpu == PROCESSOR_PPC603
19560 || rs6000_cpu == PROCESSOR_PPC750 21214 || rs6000_cpu == PROCESSOR_PPC750
19561 || optimize_size); 21215 || optimize_size);
19562 /* Restore via the backchain when we have a large frame, since this 21216 /* Restore via the backchain when we have a large frame, since this
19570 > 32767 21224 > 32767
19571 || (cfun->calls_alloca 21225 || (cfun->calls_alloca
19572 && !frame_pointer_needed)); 21226 && !frame_pointer_needed));
19573 restore_lr = (info->lr_save_p 21227 restore_lr = (info->lr_save_p
19574 && (restoring_FPRs_inline 21228 && (restoring_FPRs_inline
19575 || (strategy & SAVRES_NOINLINE_FPRS_DOESNT_RESTORE_LR)) 21229 || (strategy & REST_NOINLINE_FPRS_DOESNT_RESTORE_LR))
19576 && (restoring_GPRs_inline 21230 && (restoring_GPRs_inline
19577 || info->first_fp_reg_save < 64)); 21231 || info->first_fp_reg_save < 64));
19578 21232
19579 if (WORLD_SAVE_P (info)) 21233 if (WORLD_SAVE_P (info))
19580 { 21234 {
19782 else if (frame_pointer_needed) 21436 else if (frame_pointer_needed)
19783 { 21437 {
19784 frame_reg_rtx = sp_reg_rtx; 21438 frame_reg_rtx = sp_reg_rtx;
19785 if (DEFAULT_ABI == ABI_V4) 21439 if (DEFAULT_ABI == ABI_V4)
19786 frame_reg_rtx = gen_rtx_REG (Pmode, 11); 21440 frame_reg_rtx = gen_rtx_REG (Pmode, 11);
21441 /* Prevent reordering memory accesses against stack pointer restore. */
21442 else if (cfun->calls_alloca
21443 || offset_below_red_zone_p (-info->total_size))
21444 {
21445 rtx mem1 = gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx);
21446 rtx mem2 = gen_rtx_MEM (BLKmode, sp_reg_rtx);
21447 MEM_NOTRAP_P (mem1) = 1;
21448 MEM_NOTRAP_P (mem2) = 1;
21449 emit_insn (gen_frame_tie (mem1, mem2));
21450 }
19787 21451
19788 insn = emit_insn (gen_add3_insn (frame_reg_rtx, hard_frame_pointer_rtx, 21452 insn = emit_insn (gen_add3_insn (frame_reg_rtx, hard_frame_pointer_rtx,
19789 GEN_INT (info->total_size))); 21453 GEN_INT (info->total_size)));
19790 sp_offset = 0; 21454 sp_offset = 0;
19791 } 21455 }
19792 else if (info->push_p 21456 else if (info->push_p
19793 && DEFAULT_ABI != ABI_V4 21457 && DEFAULT_ABI != ABI_V4
19794 && !crtl->calls_eh_return) 21458 && !crtl->calls_eh_return)
19795 { 21459 {
21460 /* Prevent reordering memory accesses against stack pointer restore. */
21461 if (cfun->calls_alloca
21462 || offset_below_red_zone_p (-info->total_size))
21463 {
21464 rtx mem = gen_rtx_MEM (BLKmode, sp_reg_rtx);
21465 MEM_NOTRAP_P (mem) = 1;
21466 emit_insn (gen_stack_tie (mem));
21467 }
19796 insn = emit_insn (gen_add3_insn (sp_reg_rtx, sp_reg_rtx, 21468 insn = emit_insn (gen_add3_insn (sp_reg_rtx, sp_reg_rtx,
19797 GEN_INT (info->total_size))); 21469 GEN_INT (info->total_size)));
19798 sp_offset = 0; 21470 sp_offset = 0;
19799 } 21471 }
19800 if (insn && frame_reg_rtx == sp_reg_rtx) 21472 if (insn && frame_reg_rtx == sp_reg_rtx)
20191 } 21863 }
20192 21864
20193 if (!sibcall) 21865 if (!sibcall)
20194 { 21866 {
20195 rtvec p; 21867 rtvec p;
20196 bool lr = (strategy & SAVRES_NOINLINE_FPRS_DOESNT_RESTORE_LR) == 0; 21868 bool lr = (strategy & REST_NOINLINE_FPRS_DOESNT_RESTORE_LR) == 0;
20197 if (! restoring_FPRs_inline) 21869 if (! restoring_FPRs_inline)
20198 p = rtvec_alloc (4 + 64 - info->first_fp_reg_save); 21870 p = rtvec_alloc (4 + 64 - info->first_fp_reg_save);
20199 else 21871 else
20200 p = rtvec_alloc (2); 21872 p = rtvec_alloc (2);
20201 21873
20422 list. */ 22094 list. */
20423 tree decl; 22095 tree decl;
20424 int next_parm_info_bit = 31; 22096 int next_parm_info_bit = 31;
20425 22097
20426 for (decl = DECL_ARGUMENTS (current_function_decl); 22098 for (decl = DECL_ARGUMENTS (current_function_decl);
20427 decl; decl = TREE_CHAIN (decl)) 22099 decl; decl = DECL_CHAIN (decl))
20428 { 22100 {
20429 rtx parameter = DECL_INCOMING_RTL (decl); 22101 rtx parameter = DECL_INCOMING_RTL (decl);
20430 enum machine_mode mode = GET_MODE (parameter); 22102 enum machine_mode mode = GET_MODE (parameter);
20431 22103
20432 if (GET_CODE (parameter) == REG) 22104 if (GET_CODE (parameter) == REG)
20501 fprintf (file, "\t.long %d\n", parm_info); 22173 fprintf (file, "\t.long %d\n", parm_info);
20502 22174
20503 /* Offset from start of code to tb table. */ 22175 /* Offset from start of code to tb table. */
20504 fputs ("\t.long ", file); 22176 fputs ("\t.long ", file);
20505 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT"); 22177 ASM_OUTPUT_INTERNAL_LABEL_PREFIX (file, "LT");
20506 if (TARGET_AIX) 22178 RS6000_OUTPUT_BASENAME (file, fname);
20507 RS6000_OUTPUT_BASENAME (file, fname);
20508 else
20509 assemble_name (file, fname);
20510 putc ('-', file); 22179 putc ('-', file);
20511 rs6000_output_function_entry (file, fname); 22180 rs6000_output_function_entry (file, fname);
20512 putc ('\n', file); 22181 putc ('\n', file);
20513 22182
20514 /* Interrupt handler mask. */ 22183 /* Interrupt handler mask. */
20864 if (TARGET_TOC && GET_CODE (x) != LABEL_REF) 22533 if (TARGET_TOC && GET_CODE (x) != LABEL_REF)
20865 { 22534 {
20866 struct toc_hash_struct *h; 22535 struct toc_hash_struct *h;
20867 void * * found; 22536 void * * found;
20868 22537
20869 /* Create toc_hash_table. This can't be done at OVERRIDE_OPTIONS 22538 /* Create toc_hash_table. This can't be done at TARGET_OPTION_OVERRIDE
20870 time because GGC is not initialized at that point. */ 22539 time because GGC is not initialized at that point. */
20871 if (toc_hash_table == NULL) 22540 if (toc_hash_table == NULL)
20872 toc_hash_table = htab_create_ggc (1021, toc_hash_function, 22541 toc_hash_table = htab_create_ggc (1021, toc_hash_function,
20873 toc_hash_eq, NULL); 22542 toc_hash_eq, NULL);
20874 22543
20875 h = GGC_NEW (struct toc_hash_struct); 22544 h = ggc_alloc_toc_hash_struct ();
20876 h->key = x; 22545 h->key = x;
20877 h->key_mode = mode; 22546 h->key_mode = mode;
20878 h->labelno = labelno; 22547 h->labelno = labelno;
20879 22548
20880 found = htab_find_slot (toc_hash_table, h, INSERT); 22549 found = htab_find_slot (toc_hash_table, h, INSERT);
22100 case CPU_CELL: 23769 case CPU_CELL:
22101 case CPU_PPCE300C2: 23770 case CPU_PPCE300C2:
22102 case CPU_PPCE300C3: 23771 case CPU_PPCE300C3:
22103 case CPU_PPCE500MC: 23772 case CPU_PPCE500MC:
22104 case CPU_PPCE500MC64: 23773 case CPU_PPCE500MC64:
23774 case CPU_TITAN:
22105 return 2; 23775 return 2;
22106 case CPU_RIOS2: 23776 case CPU_RIOS2:
22107 case CPU_PPC476: 23777 case CPU_PPC476:
22108 case CPU_PPC604: 23778 case CPU_PPC604:
22109 case CPU_PPC604E: 23779 case CPU_PPC604E:
23751 else 25421 else
23752 { 25422 {
23753 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl)); 25423 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (decl));
23754 25424
23755 if (size > 0 25425 if (size > 0
23756 && (unsigned HOST_WIDE_INT) size <= g_switch_value 25426 && size <= g_switch_value
23757 /* If it's not public, and we're not going to reference it there, 25427 /* If it's not public, and we're not going to reference it there,
23758 there's no need to put it in the small data section. */ 25428 there's no need to put it in the small data section. */
23759 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl))) 25429 && (rs6000_sdata != SDATA_DATA || TREE_PUBLIC (decl)))
23760 return true; 25430 return true;
23761 } 25431 }
23808 fatal_insn ("bad address", op); 25478 fatal_insn ("bad address", op);
23809 } 25479 }
23810 25480
23811 #if TARGET_MACHO 25481 #if TARGET_MACHO
23812 25482
23813 static tree branch_island_list = 0; 25483 typedef struct branch_island_d {
25484 tree function_name;
25485 tree label_name;
25486 int line_number;
25487 } branch_island;
25488
25489 DEF_VEC_O(branch_island);
25490 DEF_VEC_ALLOC_O(branch_island,gc);
25491
25492 static VEC(branch_island,gc) *branch_islands;
23814 25493
23815 /* Remember to generate a branch island for far calls to the given 25494 /* Remember to generate a branch island for far calls to the given
23816 function. */ 25495 function. */
23817 25496
23818 static void 25497 static void
23819 add_compiler_branch_island (tree label_name, tree function_name, 25498 add_compiler_branch_island (tree label_name, tree function_name,
23820 int line_number) 25499 int line_number)
23821 { 25500 {
23822 tree branch_island = build_tree_list (function_name, label_name); 25501 branch_island *bi = VEC_safe_push (branch_island, gc, branch_islands, NULL);
23823 TREE_TYPE (branch_island) = build_int_cst (NULL_TREE, line_number); 25502
23824 TREE_CHAIN (branch_island) = branch_island_list; 25503 bi->function_name = function_name;
23825 branch_island_list = branch_island; 25504 bi->label_name = label_name;
23826 } 25505 bi->line_number = line_number;
23827 25506 }
23828 #define BRANCH_ISLAND_LABEL_NAME(BRANCH_ISLAND) TREE_VALUE (BRANCH_ISLAND) 25507
23829 #define BRANCH_ISLAND_FUNCTION_NAME(BRANCH_ISLAND) TREE_PURPOSE (BRANCH_ISLAND) 25508 /* Generate far-jump branch islands for everything recorded in
23830 #define BRANCH_ISLAND_LINE_NUMBER(BRANCH_ISLAND) \ 25509 branch_islands. Invoked immediately after the last instruction of
23831 TREE_INT_CST_LOW (TREE_TYPE (BRANCH_ISLAND)) 25510 the epilogue has been emitted; the branch islands must be appended
23832 25511 to, and contiguous with, the function body. Mach-O stubs are
23833 /* Generate far-jump branch islands for everything on the 25512 generated in machopic_output_stub(). */
23834 branch_island_list. Invoked immediately after the last instruction
23835 of the epilogue has been emitted; the branch-islands must be
23836 appended to, and contiguous with, the function body. Mach-O stubs
23837 are generated in machopic_output_stub(). */
23838 25513
23839 static void 25514 static void
23840 macho_branch_islands (void) 25515 macho_branch_islands (void)
23841 { 25516 {
23842 char tmp_buf[512]; 25517 char tmp_buf[512];
23843 tree branch_island; 25518
23844 25519 while (!VEC_empty (branch_island, branch_islands))
23845 for (branch_island = branch_island_list; 25520 {
23846 branch_island; 25521 branch_island *bi = VEC_last (branch_island, branch_islands);
23847 branch_island = TREE_CHAIN (branch_island)) 25522 const char *label = IDENTIFIER_POINTER (bi->label_name);
23848 { 25523 const char *name = IDENTIFIER_POINTER (bi->function_name);
23849 const char *label =
23850 IDENTIFIER_POINTER (BRANCH_ISLAND_LABEL_NAME (branch_island));
23851 const char *name =
23852 IDENTIFIER_POINTER (BRANCH_ISLAND_FUNCTION_NAME (branch_island));
23853 char name_buf[512]; 25524 char name_buf[512];
23854 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */ 25525 /* Cheap copy of the details from the Darwin ASM_OUTPUT_LABELREF(). */
23855 if (name[0] == '*' || name[0] == '&') 25526 if (name[0] == '*' || name[0] == '&')
23856 strcpy (name_buf, name+1); 25527 strcpy (name_buf, name+1);
23857 else 25528 else
23861 } 25532 }
23862 strcpy (tmp_buf, "\n"); 25533 strcpy (tmp_buf, "\n");
23863 strcat (tmp_buf, label); 25534 strcat (tmp_buf, label);
23864 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO) 25535 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
23865 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG) 25536 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
23866 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island)); 25537 dbxout_stabd (N_SLINE, bi->line_number);
23867 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */ 25538 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
23868 if (flag_pic) 25539 if (flag_pic)
23869 { 25540 {
23870 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,"); 25541 strcat (tmp_buf, ":\n\tmflr r0\n\tbcl 20,31,");
23871 strcat (tmp_buf, label); 25542 strcat (tmp_buf, label);
23898 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr"); 25569 strcat (tmp_buf, ")\n\tmtctr r12\n\tbctr");
23899 } 25570 }
23900 output_asm_insn (tmp_buf, 0); 25571 output_asm_insn (tmp_buf, 0);
23901 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO) 25572 #if defined (DBX_DEBUGGING_INFO) || defined (XCOFF_DEBUGGING_INFO)
23902 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG) 25573 if (write_symbols == DBX_DEBUG || write_symbols == XCOFF_DEBUG)
23903 dbxout_stabd (N_SLINE, BRANCH_ISLAND_LINE_NUMBER (branch_island)); 25574 dbxout_stabd (N_SLINE, bi->line_number);
23904 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */ 25575 #endif /* DBX_DEBUGGING_INFO || XCOFF_DEBUGGING_INFO */
23905 } 25576 VEC_pop (branch_island, branch_islands);
23906 25577 }
23907 branch_island_list = 0;
23908 } 25578 }
23909 25579
23910 /* NO_PREVIOUS_DEF checks in the link list whether the function name is 25580 /* NO_PREVIOUS_DEF checks in the link list whether the function name is
23911 already there or not. */ 25581 already there or not. */
23912 25582
23913 static int 25583 static int
23914 no_previous_def (tree function_name) 25584 no_previous_def (tree function_name)
23915 { 25585 {
23916 tree branch_island; 25586 branch_island *bi;
23917 for (branch_island = branch_island_list; 25587 unsigned ix;
23918 branch_island; 25588
23919 branch_island = TREE_CHAIN (branch_island)) 25589 FOR_EACH_VEC_ELT (branch_island, branch_islands, ix, bi)
23920 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island)) 25590 if (function_name == bi->function_name)
23921 return 0; 25591 return 0;
23922 return 1; 25592 return 1;
23923 } 25593 }
23924 25594
23925 /* GET_PREV_LABEL gets the label name from the previous definition of 25595 /* GET_PREV_LABEL gets the label name from the previous definition of
23926 the function. */ 25596 the function. */
23927 25597
23928 static tree 25598 static tree
23929 get_prev_label (tree function_name) 25599 get_prev_label (tree function_name)
23930 { 25600 {
23931 tree branch_island; 25601 branch_island *bi;
23932 for (branch_island = branch_island_list; 25602 unsigned ix;
23933 branch_island; 25603
23934 branch_island = TREE_CHAIN (branch_island)) 25604 FOR_EACH_VEC_ELT (branch_island, branch_islands, ix, bi)
23935 if (function_name == BRANCH_ISLAND_FUNCTION_NAME (branch_island)) 25605 if (function_name == bi->function_name)
23936 return BRANCH_ISLAND_LABEL_NAME (branch_island); 25606 return bi->label_name;
23937 return 0; 25607 return NULL_TREE;
23938 } 25608 }
23939
23940 #ifndef DARWIN_LINKER_GENERATES_ISLANDS
23941 #define DARWIN_LINKER_GENERATES_ISLANDS 0
23942 #endif
23943
23944 /* KEXTs still need branch islands. */
23945 #define DARWIN_GENERATE_ISLANDS (!DARWIN_LINKER_GENERATES_ISLANDS \
23946 || flag_mkernel || flag_apple_kext)
23947 25609
23948 /* INSN is either a function call or a millicode call. It may have an 25610 /* INSN is either a function call or a millicode call. It may have an
23949 unconditional jump in its delay slot. 25611 unconditional jump in its delay slot.
23950 25612
23951 CALL_DEST is the routine we are calling. */ 25613 CALL_DEST is the routine we are calling. */
23953 char * 25615 char *
23954 output_call (rtx insn, rtx *operands, int dest_operand_number, 25616 output_call (rtx insn, rtx *operands, int dest_operand_number,
23955 int cookie_operand_number) 25617 int cookie_operand_number)
23956 { 25618 {
23957 static char buf[256]; 25619 static char buf[256];
23958 if (DARWIN_GENERATE_ISLANDS 25620 if (darwin_emit_branch_islands
23959 && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF 25621 && GET_CODE (operands[dest_operand_number]) == SYMBOL_REF
23960 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG)) 25622 && (INTVAL (operands[cookie_operand_number]) & CALL_LONG))
23961 { 25623 {
23962 tree labelname; 25624 tree labelname;
23963 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0)); 25625 tree funname = get_identifier (XSTR (operands[dest_operand_number], 0));
24320 } 25982 }
24321 ASM_OUTPUT_LABEL (file, name); 25983 ASM_OUTPUT_LABEL (file, name);
24322 } 25984 }
24323 25985
24324 static void 25986 static void
24325 rs6000_elf_end_indicate_exec_stack (void) 25987 rs6000_elf_file_end (void)
24326 { 25988 {
25989 #ifdef HAVE_AS_GNU_ATTRIBUTE
25990 if (TARGET_32BIT && DEFAULT_ABI == ABI_V4)
25991 {
25992 if (rs6000_passes_float)
25993 fprintf (asm_out_file, "\t.gnu_attribute 4, %d\n",
25994 ((TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_DOUBLE_FLOAT) ? 1
25995 : (TARGET_HARD_FLOAT && TARGET_FPRS && TARGET_SINGLE_FLOAT) ? 3
25996 : 2));
25997 if (rs6000_passes_vector)
25998 fprintf (asm_out_file, "\t.gnu_attribute 8, %d\n",
25999 (TARGET_ALTIVEC_ABI ? 2
26000 : TARGET_SPE_ABI ? 3
26001 : 1));
26002 if (rs6000_returns_struct)
26003 fprintf (asm_out_file, "\t.gnu_attribute 12, %d\n",
26004 aix_struct_return ? 2 : 1);
26005 }
26006 #endif
26007 #ifdef POWERPC_LINUX
24327 if (TARGET_32BIT) 26008 if (TARGET_32BIT)
24328 file_end_indicate_exec_stack (); 26009 file_end_indicate_exec_stack ();
26010 #endif
24329 } 26011 }
24330 #endif 26012 #endif
24331 26013
24332 #if TARGET_XCOFF 26014 #if TARGET_XCOFF
24333 static void 26015 static void
24624 || outer_code == MOD || outer_code == UMOD) 26306 || outer_code == MOD || outer_code == UMOD)
24625 && exact_log2 (INTVAL (x)) >= 0) 26307 && exact_log2 (INTVAL (x)) >= 0)
24626 || (outer_code == COMPARE 26308 || (outer_code == COMPARE
24627 && (satisfies_constraint_I (x) 26309 && (satisfies_constraint_I (x)
24628 || satisfies_constraint_K (x))) 26310 || satisfies_constraint_K (x)))
24629 || (outer_code == EQ 26311 || ((outer_code == EQ || outer_code == NE)
24630 && (satisfies_constraint_I (x) 26312 && (satisfies_constraint_I (x)
24631 || satisfies_constraint_K (x) 26313 || satisfies_constraint_K (x)
24632 || (mode == SImode 26314 || (mode == SImode
24633 ? satisfies_constraint_L (x) 26315 ? satisfies_constraint_L (x)
24634 : satisfies_constraint_J (x)))) 26316 : satisfies_constraint_J (x))))
24691 case LABEL_REF: 26373 case LABEL_REF:
24692 *total = 0; 26374 *total = 0;
24693 return true; 26375 return true;
24694 26376
24695 case PLUS: 26377 case PLUS:
24696 if (mode == DFmode)
24697 {
24698 if (GET_CODE (XEXP (x, 0)) == MULT)
24699 {
24700 /* FNMA accounted in outer NEG. */
24701 if (outer_code == NEG)
24702 *total = rs6000_cost->dmul - rs6000_cost->fp;
24703 else
24704 *total = rs6000_cost->dmul;
24705 }
24706 else
24707 *total = rs6000_cost->fp;
24708 }
24709 else if (mode == SFmode)
24710 {
24711 /* FNMA accounted in outer NEG. */
24712 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
24713 *total = 0;
24714 else
24715 *total = rs6000_cost->fp;
24716 }
24717 else
24718 *total = COSTS_N_INSNS (1);
24719 return false;
24720
24721 case MINUS: 26378 case MINUS:
24722 if (mode == DFmode) 26379 if (FLOAT_MODE_P (mode))
24723 { 26380 *total = rs6000_cost->fp;
24724 if (GET_CODE (XEXP (x, 0)) == MULT
24725 || GET_CODE (XEXP (x, 1)) == MULT)
24726 {
24727 /* FNMA accounted in outer NEG. */
24728 if (outer_code == NEG)
24729 *total = rs6000_cost->dmul - rs6000_cost->fp;
24730 else
24731 *total = rs6000_cost->dmul;
24732 }
24733 else
24734 *total = rs6000_cost->fp;
24735 }
24736 else if (mode == SFmode)
24737 {
24738 /* FNMA accounted in outer NEG. */
24739 if (outer_code == NEG && GET_CODE (XEXP (x, 0)) == MULT)
24740 *total = 0;
24741 else
24742 *total = rs6000_cost->fp;
24743 }
24744 else 26381 else
24745 *total = COSTS_N_INSNS (1); 26382 *total = COSTS_N_INSNS (1);
24746 return false; 26383 return false;
24747 26384
24748 case MULT: 26385 case MULT:
24753 && INTVAL (XEXP (x, 1)) <= 255) 26390 && INTVAL (XEXP (x, 1)) <= 255)
24754 *total = rs6000_cost->mulsi_const9; 26391 *total = rs6000_cost->mulsi_const9;
24755 else 26392 else
24756 *total = rs6000_cost->mulsi_const; 26393 *total = rs6000_cost->mulsi_const;
24757 } 26394 }
24758 /* FMA accounted in outer PLUS/MINUS. */
24759 else if ((mode == DFmode || mode == SFmode)
24760 && (outer_code == PLUS || outer_code == MINUS))
24761 *total = 0;
24762 else if (mode == DFmode)
24763 *total = rs6000_cost->dmul;
24764 else if (mode == SFmode) 26395 else if (mode == SFmode)
24765 *total = rs6000_cost->fp; 26396 *total = rs6000_cost->fp;
26397 else if (FLOAT_MODE_P (mode))
26398 *total = rs6000_cost->dmul;
24766 else if (mode == DImode) 26399 else if (mode == DImode)
24767 *total = rs6000_cost->muldi; 26400 *total = rs6000_cost->muldi;
24768 else 26401 else
24769 *total = rs6000_cost->mulsi; 26402 *total = rs6000_cost->mulsi;
24770 return false; 26403 return false;
26404
26405 case FMA:
26406 if (mode == SFmode)
26407 *total = rs6000_cost->fp;
26408 else
26409 *total = rs6000_cost->dmul;
26410 break;
24771 26411
24772 case DIV: 26412 case DIV:
24773 case MOD: 26413 case MOD:
24774 if (FLOAT_MODE_P (mode)) 26414 if (FLOAT_MODE_P (mode))
24775 { 26415 {
24807 case FFS: 26447 case FFS:
24808 *total = COSTS_N_INSNS (4); 26448 *total = COSTS_N_INSNS (4);
24809 return false; 26449 return false;
24810 26450
24811 case POPCOUNT: 26451 case POPCOUNT:
24812 *total = COSTS_N_INSNS (6); 26452 *total = COSTS_N_INSNS (TARGET_POPCNTD ? 1 : 6);
26453 return false;
26454
26455 case PARITY:
26456 *total = COSTS_N_INSNS (TARGET_CMPB ? 2 : 6);
24813 return false; 26457 return false;
24814 26458
24815 case NOT: 26459 case NOT:
24816 if (outer_code == AND || outer_code == IOR || outer_code == XOR) 26460 if (outer_code == AND || outer_code == IOR || outer_code == XOR)
24817 { 26461 {
25002 26646
25003 26647
25004 /* A C expression returning the cost of moving data from a register of class 26648 /* A C expression returning the cost of moving data from a register of class
25005 CLASS1 to one of CLASS2. */ 26649 CLASS1 to one of CLASS2. */
25006 26650
25007 int 26651 static int
25008 rs6000_register_move_cost (enum machine_mode mode, 26652 rs6000_register_move_cost (enum machine_mode mode,
25009 enum reg_class from, enum reg_class to) 26653 reg_class_t from, reg_class_t to)
25010 { 26654 {
25011 int ret; 26655 int ret;
25012 26656
25013 /* Moves from/to GENERAL_REGS. */ 26657 /* Moves from/to GENERAL_REGS. */
25014 if (reg_classes_intersect_p (to, GENERAL_REGS) 26658 if (reg_classes_intersect_p (to, GENERAL_REGS)
25016 { 26660 {
25017 if (! reg_classes_intersect_p (to, GENERAL_REGS)) 26661 if (! reg_classes_intersect_p (to, GENERAL_REGS))
25018 from = to; 26662 from = to;
25019 26663
25020 if (from == FLOAT_REGS || from == ALTIVEC_REGS || from == VSX_REGS) 26664 if (from == FLOAT_REGS || from == ALTIVEC_REGS || from == VSX_REGS)
25021 ret = (rs6000_memory_move_cost (mode, from, 0) 26665 ret = (rs6000_memory_move_cost (mode, from, false)
25022 + rs6000_memory_move_cost (mode, GENERAL_REGS, 0)); 26666 + rs6000_memory_move_cost (mode, GENERAL_REGS, false));
25023 26667
25024 /* It's more expensive to move CR_REGS than CR0_REGS because of the 26668 /* It's more expensive to move CR_REGS than CR0_REGS because of the
25025 shift. */ 26669 shift. */
25026 else if (from == CR_REGS) 26670 else if (from == CR_REGS)
25027 ret = 4; 26671 ret = 4;
25062 } 26706 }
25063 26707
25064 /* A C expressions returning the cost of moving data of MODE from a register to 26708 /* A C expressions returning the cost of moving data of MODE from a register to
25065 or from memory. */ 26709 or from memory. */
25066 26710
25067 int 26711 static int
25068 rs6000_memory_move_cost (enum machine_mode mode, enum reg_class rclass, 26712 rs6000_memory_move_cost (enum machine_mode mode, reg_class_t rclass,
25069 int in ATTRIBUTE_UNUSED) 26713 bool in ATTRIBUTE_UNUSED)
25070 { 26714 {
25071 int ret; 26715 int ret;
25072 26716
25073 if (reg_classes_intersect_p (rclass, GENERAL_REGS)) 26717 if (reg_classes_intersect_p (rclass, GENERAL_REGS))
25074 ret = 4 * hard_regno_nregs[0][mode]; 26718 ret = 4 * hard_regno_nregs[0][mode];
25092 26736
25093 static tree 26737 static tree
25094 rs6000_builtin_reciprocal (unsigned int fn, bool md_fn, 26738 rs6000_builtin_reciprocal (unsigned int fn, bool md_fn,
25095 bool sqrt ATTRIBUTE_UNUSED) 26739 bool sqrt ATTRIBUTE_UNUSED)
25096 { 26740 {
25097 if (! (TARGET_RECIP && TARGET_PPC_GFXOPT && !optimize_size 26741 if (optimize_insn_for_size_p ())
25098 && flag_finite_math_only && !flag_trapping_math
25099 && flag_unsafe_math_optimizations))
25100 return NULL_TREE; 26742 return NULL_TREE;
25101 26743
25102 if (md_fn) 26744 if (md_fn)
25103 return NULL_TREE; 26745 switch (fn)
26746 {
26747 case VSX_BUILTIN_XVSQRTDP:
26748 if (!RS6000_RECIP_AUTO_RSQRTE_P (V2DFmode))
26749 return NULL_TREE;
26750
26751 return rs6000_builtin_decls[VSX_BUILTIN_VEC_RSQRT_V2DF];
26752
26753 case VSX_BUILTIN_XVSQRTSP:
26754 if (!RS6000_RECIP_AUTO_RSQRTE_P (V4SFmode))
26755 return NULL_TREE;
26756
26757 return rs6000_builtin_decls[VSX_BUILTIN_VEC_RSQRT_V4SF];
26758
26759 default:
26760 return NULL_TREE;
26761 }
26762
25104 else 26763 else
25105 switch (fn) 26764 switch (fn)
25106 { 26765 {
26766 case BUILT_IN_SQRT:
26767 if (!RS6000_RECIP_AUTO_RSQRTE_P (DFmode))
26768 return NULL_TREE;
26769
26770 return rs6000_builtin_decls[RS6000_BUILTIN_RSQRT];
26771
25107 case BUILT_IN_SQRTF: 26772 case BUILT_IN_SQRTF:
26773 if (!RS6000_RECIP_AUTO_RSQRTE_P (SFmode))
26774 return NULL_TREE;
26775
25108 return rs6000_builtin_decls[RS6000_BUILTIN_RSQRTF]; 26776 return rs6000_builtin_decls[RS6000_BUILTIN_RSQRTF];
25109 26777
25110 default: 26778 default:
25111 return NULL_TREE; 26779 return NULL_TREE;
25112 } 26780 }
25113 } 26781 }
25114 26782
25115 /* Newton-Raphson approximation of single-precision floating point divide n/d. 26783 /* Load up a constant. If the mode is a vector mode, splat the value across
25116 Assumes no trapping math and finite arguments. */ 26784 all of the vector elements. */
26785
26786 static rtx
26787 rs6000_load_constant_and_splat (enum machine_mode mode, REAL_VALUE_TYPE dconst)
26788 {
26789 rtx reg;
26790
26791 if (mode == SFmode || mode == DFmode)
26792 {
26793 rtx d = CONST_DOUBLE_FROM_REAL_VALUE (dconst, mode);
26794 reg = force_reg (mode, d);
26795 }
26796 else if (mode == V4SFmode)
26797 {
26798 rtx d = CONST_DOUBLE_FROM_REAL_VALUE (dconst, SFmode);
26799 rtvec v = gen_rtvec (4, d, d, d, d);
26800 reg = gen_reg_rtx (mode);
26801 rs6000_expand_vector_init (reg, gen_rtx_PARALLEL (mode, v));
26802 }
26803 else if (mode == V2DFmode)
26804 {
26805 rtx d = CONST_DOUBLE_FROM_REAL_VALUE (dconst, DFmode);
26806 rtvec v = gen_rtvec (2, d, d);
26807 reg = gen_reg_rtx (mode);
26808 rs6000_expand_vector_init (reg, gen_rtx_PARALLEL (mode, v));
26809 }
26810 else
26811 gcc_unreachable ();
26812
26813 return reg;
26814 }
26815
26816 /* Generate an FMA instruction. */
26817
26818 static void
26819 rs6000_emit_madd (rtx target, rtx m1, rtx m2, rtx a)
26820 {
26821 enum machine_mode mode = GET_MODE (target);
26822 rtx dst;
26823
26824 dst = expand_ternary_op (mode, fma_optab, m1, m2, a, target, 0);
26825 gcc_assert (dst != NULL);
26826
26827 if (dst != target)
26828 emit_move_insn (target, dst);
26829 }
26830
26831 /* Generate a FMSUB instruction: dst = fma(m1, m2, -a). */
26832
26833 static void
26834 rs6000_emit_msub (rtx target, rtx m1, rtx m2, rtx a)
26835 {
26836 enum machine_mode mode = GET_MODE (target);
26837 rtx dst;
26838
26839 /* Altivec does not support fms directly;
26840 generate in terms of fma in that case. */
26841 if (optab_handler (fms_optab, mode) != CODE_FOR_nothing)
26842 dst = expand_ternary_op (mode, fms_optab, m1, m2, a, target, 0);
26843 else
26844 {
26845 a = expand_unop (mode, neg_optab, a, NULL_RTX, 0);
26846 dst = expand_ternary_op (mode, fma_optab, m1, m2, a, target, 0);
26847 }
26848 gcc_assert (dst != NULL);
26849
26850 if (dst != target)
26851 emit_move_insn (target, dst);
26852 }
26853
26854 /* Generate a FNMSUB instruction: dst = -fma(m1, m2, -a). */
26855
26856 static void
26857 rs6000_emit_nmsub (rtx dst, rtx m1, rtx m2, rtx a)
26858 {
26859 enum machine_mode mode = GET_MODE (dst);
26860 rtx r;
26861
26862 /* This is a tad more complicated, since the fnma_optab is for
26863 a different expression: fma(-m1, m2, a), which is the same
26864 thing except in the case of signed zeros.
26865
26866 Fortunately we know that if FMA is supported that FNMSUB is
26867 also supported in the ISA. Just expand it directly. */
26868
26869 gcc_assert (optab_handler (fma_optab, mode) != CODE_FOR_nothing);
26870
26871 r = gen_rtx_NEG (mode, a);
26872 r = gen_rtx_FMA (mode, m1, m2, r);
26873 r = gen_rtx_NEG (mode, r);
26874 emit_insn (gen_rtx_SET (VOIDmode, dst, r));
26875 }
26876
26877 /* Newton-Raphson approximation of floating point divide with just 2 passes
26878 (either single precision floating point, or newer machines with higher
26879 accuracy estimates). Support both scalar and vector divide. Assumes no
26880 trapping math and finite arguments. */
26881
26882 static void
26883 rs6000_emit_swdiv_high_precision (rtx dst, rtx n, rtx d)
26884 {
26885 enum machine_mode mode = GET_MODE (dst);
26886 rtx x0, e0, e1, y1, u0, v0;
26887 enum insn_code code = optab_handler (smul_optab, mode);
26888 gen_2arg_fn_t gen_mul = (gen_2arg_fn_t) GEN_FCN (code);
26889 rtx one = rs6000_load_constant_and_splat (mode, dconst1);
26890
26891 gcc_assert (code != CODE_FOR_nothing);
26892
26893 /* x0 = 1./d estimate */
26894 x0 = gen_reg_rtx (mode);
26895 emit_insn (gen_rtx_SET (VOIDmode, x0,
26896 gen_rtx_UNSPEC (mode, gen_rtvec (1, d),
26897 UNSPEC_FRES)));
26898
26899 e0 = gen_reg_rtx (mode);
26900 rs6000_emit_nmsub (e0, d, x0, one); /* e0 = 1. - (d * x0) */
26901
26902 e1 = gen_reg_rtx (mode);
26903 rs6000_emit_madd (e1, e0, e0, e0); /* e1 = (e0 * e0) + e0 */
26904
26905 y1 = gen_reg_rtx (mode);
26906 rs6000_emit_madd (y1, e1, x0, x0); /* y1 = (e1 * x0) + x0 */
26907
26908 u0 = gen_reg_rtx (mode);
26909 emit_insn (gen_mul (u0, n, y1)); /* u0 = n * y1 */
26910
26911 v0 = gen_reg_rtx (mode);
26912 rs6000_emit_nmsub (v0, d, u0, n); /* v0 = n - (d * u0) */
26913
26914 rs6000_emit_madd (dst, v0, y1, u0); /* dst = (v0 * y1) + u0 */
26915 }
26916
26917 /* Newton-Raphson approximation of floating point divide that has a low
26918 precision estimate. Assumes no trapping math and finite arguments. */
26919
26920 static void
26921 rs6000_emit_swdiv_low_precision (rtx dst, rtx n, rtx d)
26922 {
26923 enum machine_mode mode = GET_MODE (dst);
26924 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one;
26925 enum insn_code code = optab_handler (smul_optab, mode);
26926 gen_2arg_fn_t gen_mul = (gen_2arg_fn_t) GEN_FCN (code);
26927
26928 gcc_assert (code != CODE_FOR_nothing);
26929
26930 one = rs6000_load_constant_and_splat (mode, dconst1);
26931
26932 /* x0 = 1./d estimate */
26933 x0 = gen_reg_rtx (mode);
26934 emit_insn (gen_rtx_SET (VOIDmode, x0,
26935 gen_rtx_UNSPEC (mode, gen_rtvec (1, d),
26936 UNSPEC_FRES)));
26937
26938 e0 = gen_reg_rtx (mode);
26939 rs6000_emit_nmsub (e0, d, x0, one); /* e0 = 1. - d * x0 */
26940
26941 y1 = gen_reg_rtx (mode);
26942 rs6000_emit_madd (y1, e0, x0, x0); /* y1 = x0 + e0 * x0 */
26943
26944 e1 = gen_reg_rtx (mode);
26945 emit_insn (gen_mul (e1, e0, e0)); /* e1 = e0 * e0 */
26946
26947 y2 = gen_reg_rtx (mode);
26948 rs6000_emit_madd (y2, e1, y1, y1); /* y2 = y1 + e1 * y1 */
26949
26950 e2 = gen_reg_rtx (mode);
26951 emit_insn (gen_mul (e2, e1, e1)); /* e2 = e1 * e1 */
26952
26953 y3 = gen_reg_rtx (mode);
26954 rs6000_emit_madd (y3, e2, y2, y2); /* y3 = y2 + e2 * y2 */
26955
26956 u0 = gen_reg_rtx (mode);
26957 emit_insn (gen_mul (u0, n, y3)); /* u0 = n * y3 */
26958
26959 v0 = gen_reg_rtx (mode);
26960 rs6000_emit_nmsub (v0, d, u0, n); /* v0 = n - d * u0 */
26961
26962 rs6000_emit_madd (dst, v0, y3, u0); /* dst = u0 + v0 * y3 */
26963 }
26964
26965 /* Newton-Raphson approximation of floating point divide DST = N/D. If NOTE_P,
26966 add a reg_note saying that this was a division. Support both scalar and
26967 vector divide. Assumes no trapping math and finite arguments. */
25117 26968
25118 void 26969 void
25119 rs6000_emit_swdivsf (rtx dst, rtx n, rtx d) 26970 rs6000_emit_swdiv (rtx dst, rtx n, rtx d, bool note_p)
25120 { 26971 {
25121 rtx x0, e0, e1, y1, u0, v0, one; 26972 enum machine_mode mode = GET_MODE (dst);
25122 26973
25123 x0 = gen_reg_rtx (SFmode); 26974 if (RS6000_RECIP_HIGH_PRECISION_P (mode))
25124 e0 = gen_reg_rtx (SFmode); 26975 rs6000_emit_swdiv_high_precision (dst, n, d);
25125 e1 = gen_reg_rtx (SFmode); 26976 else
25126 y1 = gen_reg_rtx (SFmode); 26977 rs6000_emit_swdiv_low_precision (dst, n, d);
25127 u0 = gen_reg_rtx (SFmode); 26978
25128 v0 = gen_reg_rtx (SFmode); 26979 if (note_p)
25129 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode)); 26980 add_reg_note (get_last_insn (), REG_EQUAL, gen_rtx_DIV (mode, n, d));
25130 26981 }
25131 /* x0 = 1./d estimate */ 26982
25132 emit_insn (gen_rtx_SET (VOIDmode, x0, 26983 /* Newton-Raphson approximation of single/double-precision floating point
25133 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, d), 26984 rsqrt. Assumes no trapping math and finite arguments. */
25134 UNSPEC_FRES)));
25135 /* e0 = 1. - d * x0 */
25136 emit_insn (gen_rtx_SET (VOIDmode, e0,
25137 gen_rtx_MINUS (SFmode, one,
25138 gen_rtx_MULT (SFmode, d, x0))));
25139 /* e1 = e0 + e0 * e0 */
25140 emit_insn (gen_rtx_SET (VOIDmode, e1,
25141 gen_rtx_PLUS (SFmode,
25142 gen_rtx_MULT (SFmode, e0, e0), e0)));
25143 /* y1 = x0 + e1 * x0 */
25144 emit_insn (gen_rtx_SET (VOIDmode, y1,
25145 gen_rtx_PLUS (SFmode,
25146 gen_rtx_MULT (SFmode, e1, x0), x0)));
25147 /* u0 = n * y1 */
25148 emit_insn (gen_rtx_SET (VOIDmode, u0,
25149 gen_rtx_MULT (SFmode, n, y1)));
25150 /* v0 = n - d * u0 */
25151 emit_insn (gen_rtx_SET (VOIDmode, v0,
25152 gen_rtx_MINUS (SFmode, n,
25153 gen_rtx_MULT (SFmode, d, u0))));
25154 /* dst = u0 + v0 * y1 */
25155 emit_insn (gen_rtx_SET (VOIDmode, dst,
25156 gen_rtx_PLUS (SFmode,
25157 gen_rtx_MULT (SFmode, v0, y1), u0)));
25158 }
25159
25160 /* Newton-Raphson approximation of double-precision floating point divide n/d.
25161 Assumes no trapping math and finite arguments. */
25162 26985
25163 void 26986 void
25164 rs6000_emit_swdivdf (rtx dst, rtx n, rtx d) 26987 rs6000_emit_swrsqrt (rtx dst, rtx src)
25165 { 26988 {
25166 rtx x0, e0, e1, e2, y1, y2, y3, u0, v0, one; 26989 enum machine_mode mode = GET_MODE (src);
25167 26990 rtx x0 = gen_reg_rtx (mode);
25168 x0 = gen_reg_rtx (DFmode); 26991 rtx y = gen_reg_rtx (mode);
25169 e0 = gen_reg_rtx (DFmode); 26992 int passes = (TARGET_RECIP_PRECISION) ? 2 : 3;
25170 e1 = gen_reg_rtx (DFmode); 26993 REAL_VALUE_TYPE dconst3_2;
25171 e2 = gen_reg_rtx (DFmode); 26994 int i;
25172 y1 = gen_reg_rtx (DFmode); 26995 rtx halfthree;
25173 y2 = gen_reg_rtx (DFmode); 26996 enum insn_code code = optab_handler (smul_optab, mode);
25174 y3 = gen_reg_rtx (DFmode); 26997 gen_2arg_fn_t gen_mul = (gen_2arg_fn_t) GEN_FCN (code);
25175 u0 = gen_reg_rtx (DFmode); 26998
25176 v0 = gen_reg_rtx (DFmode); 26999 gcc_assert (code != CODE_FOR_nothing);
25177 one = force_reg (DFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, DFmode)); 27000
25178 27001 /* Load up the constant 1.5 either as a scalar, or as a vector. */
25179 /* x0 = 1./d estimate */ 27002 real_from_integer (&dconst3_2, VOIDmode, 3, 0, 0);
25180 emit_insn (gen_rtx_SET (VOIDmode, x0, 27003 SET_REAL_EXP (&dconst3_2, REAL_EXP (&dconst3_2) - 1);
25181 gen_rtx_UNSPEC (DFmode, gen_rtvec (1, d), 27004
25182 UNSPEC_FRES))); 27005 halfthree = rs6000_load_constant_and_splat (mode, dconst3_2);
25183 /* e0 = 1. - d * x0 */
25184 emit_insn (gen_rtx_SET (VOIDmode, e0,
25185 gen_rtx_MINUS (DFmode, one,
25186 gen_rtx_MULT (SFmode, d, x0))));
25187 /* y1 = x0 + e0 * x0 */
25188 emit_insn (gen_rtx_SET (VOIDmode, y1,
25189 gen_rtx_PLUS (DFmode,
25190 gen_rtx_MULT (DFmode, e0, x0), x0)));
25191 /* e1 = e0 * e0 */
25192 emit_insn (gen_rtx_SET (VOIDmode, e1,
25193 gen_rtx_MULT (DFmode, e0, e0)));
25194 /* y2 = y1 + e1 * y1 */
25195 emit_insn (gen_rtx_SET (VOIDmode, y2,
25196 gen_rtx_PLUS (DFmode,
25197 gen_rtx_MULT (DFmode, e1, y1), y1)));
25198 /* e2 = e1 * e1 */
25199 emit_insn (gen_rtx_SET (VOIDmode, e2,
25200 gen_rtx_MULT (DFmode, e1, e1)));
25201 /* y3 = y2 + e2 * y2 */
25202 emit_insn (gen_rtx_SET (VOIDmode, y3,
25203 gen_rtx_PLUS (DFmode,
25204 gen_rtx_MULT (DFmode, e2, y2), y2)));
25205 /* u0 = n * y3 */
25206 emit_insn (gen_rtx_SET (VOIDmode, u0,
25207 gen_rtx_MULT (DFmode, n, y3)));
25208 /* v0 = n - d * u0 */
25209 emit_insn (gen_rtx_SET (VOIDmode, v0,
25210 gen_rtx_MINUS (DFmode, n,
25211 gen_rtx_MULT (DFmode, d, u0))));
25212 /* dst = u0 + v0 * y3 */
25213 emit_insn (gen_rtx_SET (VOIDmode, dst,
25214 gen_rtx_PLUS (DFmode,
25215 gen_rtx_MULT (DFmode, v0, y3), u0)));
25216 }
25217
25218
25219 /* Newton-Raphson approximation of single-precision floating point rsqrt.
25220 Assumes no trapping math and finite arguments. */
25221
25222 void
25223 rs6000_emit_swrsqrtsf (rtx dst, rtx src)
25224 {
25225 rtx x0, x1, x2, y1, u0, u1, u2, v0, v1, v2, t0,
25226 half, one, halfthree, c1, cond, label;
25227
25228 x0 = gen_reg_rtx (SFmode);
25229 x1 = gen_reg_rtx (SFmode);
25230 x2 = gen_reg_rtx (SFmode);
25231 y1 = gen_reg_rtx (SFmode);
25232 u0 = gen_reg_rtx (SFmode);
25233 u1 = gen_reg_rtx (SFmode);
25234 u2 = gen_reg_rtx (SFmode);
25235 v0 = gen_reg_rtx (SFmode);
25236 v1 = gen_reg_rtx (SFmode);
25237 v2 = gen_reg_rtx (SFmode);
25238 t0 = gen_reg_rtx (SFmode);
25239 halfthree = gen_reg_rtx (SFmode);
25240 cond = gen_rtx_REG (CCFPmode, CR1_REGNO);
25241 label = gen_rtx_LABEL_REF (VOIDmode, gen_label_rtx ());
25242
25243 /* check 0.0, 1.0, NaN, Inf by testing src * src = src */
25244 emit_insn (gen_rtx_SET (VOIDmode, t0,
25245 gen_rtx_MULT (SFmode, src, src)));
25246
25247 emit_insn (gen_rtx_SET (VOIDmode, cond,
25248 gen_rtx_COMPARE (CCFPmode, t0, src)));
25249 c1 = gen_rtx_EQ (VOIDmode, cond, const0_rtx);
25250 emit_unlikely_jump (c1, label);
25251
25252 half = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconsthalf, SFmode));
25253 one = force_reg (SFmode, CONST_DOUBLE_FROM_REAL_VALUE (dconst1, SFmode));
25254
25255 /* halfthree = 1.5 = 1.0 + 0.5 */
25256 emit_insn (gen_rtx_SET (VOIDmode, halfthree,
25257 gen_rtx_PLUS (SFmode, one, half)));
25258 27006
25259 /* x0 = rsqrt estimate */ 27007 /* x0 = rsqrt estimate */
25260 emit_insn (gen_rtx_SET (VOIDmode, x0, 27008 emit_insn (gen_rtx_SET (VOIDmode, x0,
25261 gen_rtx_UNSPEC (SFmode, gen_rtvec (1, src), 27009 gen_rtx_UNSPEC (mode, gen_rtvec (1, src),
25262 UNSPEC_RSQRT))); 27010 UNSPEC_RSQRT)));
25263 27011
25264 /* y1 = 0.5 * src = 1.5 * src - src -> fewer constants */ 27012 /* y = 0.5 * src = 1.5 * src - src -> fewer constants */
25265 emit_insn (gen_rtx_SET (VOIDmode, y1, 27013 rs6000_emit_msub (y, src, halfthree, src);
25266 gen_rtx_MINUS (SFmode, 27014
25267 gen_rtx_MULT (SFmode, src, halfthree), 27015 for (i = 0; i < passes; i++)
25268 src))); 27016 {
25269 27017 rtx x1 = gen_reg_rtx (mode);
25270 /* x1 = x0 * (1.5 - y1 * (x0 * x0)) */ 27018 rtx u = gen_reg_rtx (mode);
25271 emit_insn (gen_rtx_SET (VOIDmode, u0, 27019 rtx v = gen_reg_rtx (mode);
25272 gen_rtx_MULT (SFmode, x0, x0))); 27020
25273 emit_insn (gen_rtx_SET (VOIDmode, v0, 27021 /* x1 = x0 * (1.5 - y * (x0 * x0)) */
25274 gen_rtx_MINUS (SFmode, 27022 emit_insn (gen_mul (u, x0, x0));
25275 halfthree, 27023 rs6000_emit_nmsub (v, y, u, halfthree);
25276 gen_rtx_MULT (SFmode, y1, u0)))); 27024 emit_insn (gen_mul (x1, x0, v));
25277 emit_insn (gen_rtx_SET (VOIDmode, x1, 27025 x0 = x1;
25278 gen_rtx_MULT (SFmode, x0, v0))); 27026 }
25279 27027
25280 /* x2 = x1 * (1.5 - y1 * (x1 * x1)) */ 27028 emit_move_insn (dst, x0);
25281 emit_insn (gen_rtx_SET (VOIDmode, u1, 27029 return;
25282 gen_rtx_MULT (SFmode, x1, x1)));
25283 emit_insn (gen_rtx_SET (VOIDmode, v1,
25284 gen_rtx_MINUS (SFmode,
25285 halfthree,
25286 gen_rtx_MULT (SFmode, y1, u1))));
25287 emit_insn (gen_rtx_SET (VOIDmode, x2,
25288 gen_rtx_MULT (SFmode, x1, v1)));
25289
25290 /* dst = x2 * (1.5 - y1 * (x2 * x2)) */
25291 emit_insn (gen_rtx_SET (VOIDmode, u2,
25292 gen_rtx_MULT (SFmode, x2, x2)));
25293 emit_insn (gen_rtx_SET (VOIDmode, v2,
25294 gen_rtx_MINUS (SFmode,
25295 halfthree,
25296 gen_rtx_MULT (SFmode, y1, u2))));
25297 emit_insn (gen_rtx_SET (VOIDmode, dst,
25298 gen_rtx_MULT (SFmode, x2, v2)));
25299
25300 emit_label (XEXP (label, 0));
25301 } 27030 }
25302 27031
25303 /* Emit popcount intrinsic on TARGET_POPCNTB (Power5) and TARGET_POPCNTD 27032 /* Emit popcount intrinsic on TARGET_POPCNTB (Power5) and TARGET_POPCNTD
25304 (Power7) targets. DST is the target, and SRC is the argument operand. */ 27033 (Power7) targets. DST is the target, and SRC is the argument operand. */
25305 27034
25311 27040
25312 /* Use the PPC ISA 2.06 popcnt{w,d} instruction if we can. */ 27041 /* Use the PPC ISA 2.06 popcnt{w,d} instruction if we can. */
25313 if (TARGET_POPCNTD) 27042 if (TARGET_POPCNTD)
25314 { 27043 {
25315 if (mode == SImode) 27044 if (mode == SImode)
25316 emit_insn (gen_popcntwsi2 (dst, src)); 27045 emit_insn (gen_popcntdsi2 (dst, src));
25317 else 27046 else
25318 emit_insn (gen_popcntddi2 (dst, src)); 27047 emit_insn (gen_popcntddi2 (dst, src));
25319 return; 27048 return;
25320 } 27049 }
25321 27050
25350 { 27079 {
25351 enum machine_mode mode = GET_MODE (dst); 27080 enum machine_mode mode = GET_MODE (dst);
25352 rtx tmp; 27081 rtx tmp;
25353 27082
25354 tmp = gen_reg_rtx (mode); 27083 tmp = gen_reg_rtx (mode);
27084
27085 /* Use the PPC ISA 2.05 prtyw/prtyd instruction if we can. */
27086 if (TARGET_CMPB)
27087 {
27088 if (mode == SImode)
27089 {
27090 emit_insn (gen_popcntbsi2 (tmp, src));
27091 emit_insn (gen_paritysi2_cmpb (dst, tmp));
27092 }
27093 else
27094 {
27095 emit_insn (gen_popcntbdi2 (tmp, src));
27096 emit_insn (gen_paritydi2_cmpb (dst, tmp));
27097 }
27098 return;
27099 }
27100
25355 if (mode == SImode) 27101 if (mode == SImode)
25356 { 27102 {
25357 /* Is mult+shift >= shift+xor+shift+xor? */ 27103 /* Is mult+shift >= shift+xor+shift+xor? */
25358 if (rs6000_cost->mulsi_const >= COSTS_N_INSNS (3)) 27104 if (rs6000_cost->mulsi_const >= COSTS_N_INSNS (3))
25359 { 27105 {
25450 { 27196 {
25451 enum machine_mode mode; 27197 enum machine_mode mode;
25452 unsigned int regno; 27198 unsigned int regno;
25453 27199
25454 /* Special handling for structs in darwin64. */ 27200 /* Special handling for structs in darwin64. */
25455 if (rs6000_darwin64_abi 27201 if (TARGET_MACHO
25456 && TYPE_MODE (valtype) == BLKmode 27202 && rs6000_darwin64_struct_check_p (TYPE_MODE (valtype), valtype))
25457 && TREE_CODE (valtype) == RECORD_TYPE
25458 && int_size_in_bytes (valtype) > 0)
25459 { 27203 {
25460 CUMULATIVE_ARGS valcum; 27204 CUMULATIVE_ARGS valcum;
25461 rtx valret; 27205 rtx valret;
25462 27206
25463 valcum.words = 0; 27207 valcum.words = 0;
25464 valcum.fregno = FP_ARG_MIN_REG; 27208 valcum.fregno = FP_ARG_MIN_REG;
25465 valcum.vregno = ALTIVEC_ARG_MIN_REG; 27209 valcum.vregno = ALTIVEC_ARG_MIN_REG;
25466 /* Do a trial code generation as if this were going to be passed as 27210 /* Do a trial code generation as if this were going to be passed as
25467 an argument; if any part goes in memory, we return NULL. */ 27211 an argument; if any part goes in memory, we return NULL. */
25468 valret = rs6000_darwin64_record_arg (&valcum, valtype, 1, true); 27212 valret = rs6000_darwin64_record_arg (&valcum, valtype, true, /* retval= */ true);
25469 if (valret) 27213 if (valret)
25470 return valret; 27214 return valret;
25471 /* Otherwise fall through to standard ABI rules. */ 27215 /* Otherwise fall through to standard ABI rules. */
25472 } 27216 }
25473 27217
25713 return 108; 27457 return 108;
25714 if (regno == CTR_REGNO) 27458 if (regno == CTR_REGNO)
25715 return 109; 27459 return 109;
25716 if (CR_REGNO_P (regno)) 27460 if (CR_REGNO_P (regno))
25717 return regno - CR0_REGNO + 86; 27461 return regno - CR0_REGNO + 86;
25718 if (regno == XER_REGNO) 27462 if (regno == CA_REGNO)
25719 return 101; 27463 return 101; /* XER */
25720 if (ALTIVEC_REGNO_P (regno)) 27464 if (ALTIVEC_REGNO_P (regno))
25721 return regno - FIRST_ALTIVEC_REGNO + 1124; 27465 return regno - FIRST_ALTIVEC_REGNO + 1124;
25722 if (regno == VRSAVE_REGNO) 27466 if (regno == VRSAVE_REGNO)
25723 return 356; 27467 return 356;
25724 if (regno == VSCR_REGNO) 27468 if (regno == VSCR_REGNO)
25820 "emitting conditional microcode insn %s\t[%s] #%d", 27564 "emitting conditional microcode insn %s\t[%s] #%d",
25821 temp, insn_data[INSN_CODE (insn)].name, INSN_UID (insn)); 27565 temp, insn_data[INSN_CODE (insn)].name, INSN_UID (insn));
25822 } 27566 }
25823 } 27567 }
25824 27568
27569
27570 /* Mask options that we want to support inside of attribute((target)) and
27571 #pragma GCC target operations. Note, we do not include things like
27572 64/32-bit, endianess, hard/soft floating point, etc. that would have
27573 different calling sequences. */
27574
27575 struct rs6000_opt_mask {
27576 const char *name; /* option name */
27577 int mask; /* mask to set */
27578 bool invert; /* invert sense of mask */
27579 bool valid_target; /* option is a target option */
27580 };
27581
27582 static struct rs6000_opt_mask const rs6000_opt_masks[] =
27583 {
27584 { "altivec", MASK_ALTIVEC, false, true },
27585 { "cmpb", MASK_CMPB, false, true },
27586 { "dlmzb", MASK_DLMZB, false, true },
27587 { "fprnd", MASK_FPRND, false, true },
27588 { "hard-dfp", MASK_DFP, false, true },
27589 { "isel", MASK_ISEL, false, true },
27590 { "mfcrf", MASK_MFCRF, false, true },
27591 { "mfpgpr", MASK_MFPGPR, false, true },
27592 { "mulhw", MASK_MULHW, false, true },
27593 { "multiple", MASK_MULTIPLE, false, true },
27594 { "update", MASK_NO_UPDATE, true , true },
27595 { "popcntb", MASK_POPCNTB, false, true },
27596 { "popcntd", MASK_POPCNTD, false, true },
27597 { "powerpc-gfxopt", MASK_PPC_GFXOPT, false, true },
27598 { "powerpc-gpopt", MASK_PPC_GPOPT, false, true },
27599 { "recip-precision", MASK_RECIP_PRECISION, false, true },
27600 { "string", MASK_STRING, false, true },
27601 { "vsx", MASK_VSX, false, true },
27602 #ifdef MASK_64BIT
27603 #if TARGET_AIX_OS
27604 { "aix64", MASK_64BIT, false, false },
27605 { "aix32", MASK_64BIT, true, false },
27606 #else
27607 { "64", MASK_64BIT, false, false },
27608 { "32", MASK_64BIT, true, false },
27609 #endif
27610 #endif
27611 #ifdef MASK_EABI
27612 { "eabi", MASK_EABI, false, false },
27613 #endif
27614 #ifdef MASK_LITTLE_ENDIAN
27615 { "little", MASK_LITTLE_ENDIAN, false, false },
27616 { "big", MASK_LITTLE_ENDIAN, true, false },
27617 #endif
27618 #ifdef MASK_RELOCATABLE
27619 { "relocatable", MASK_RELOCATABLE, false, false },
27620 #endif
27621 #ifdef MASK_STRICT_ALIGN
27622 { "strict-align", MASK_STRICT_ALIGN, false, false },
27623 #endif
27624 { "power", MASK_POWER, false, false },
27625 { "power2", MASK_POWER2, false, false },
27626 { "powerpc", MASK_POWERPC, false, false },
27627 { "soft-float", MASK_SOFT_FLOAT, false, false },
27628 { "string", MASK_STRING, false, false },
27629 };
27630
27631 /* Option variables that we want to support inside attribute((target)) and
27632 #pragma GCC target operations. */
27633
27634 struct rs6000_opt_var {
27635 const char *name; /* option name */
27636 size_t global_offset; /* offset of the option in global_options. */
27637 size_t target_offset; /* offset of the option in target optiosn. */
27638 };
27639
27640 static struct rs6000_opt_var const rs6000_opt_vars[] =
27641 {
27642 { "friz",
27643 offsetof (struct gcc_options, x_TARGET_FRIZ),
27644 offsetof (struct cl_target_option, x_TARGET_FRIZ), },
27645 { "avoid-indexed-addresses",
27646 offsetof (struct gcc_options, x_TARGET_AVOID_XFORM),
27647 offsetof (struct cl_target_option, x_TARGET_AVOID_XFORM) },
27648 { "paired",
27649 offsetof (struct gcc_options, x_rs6000_paired_float),
27650 offsetof (struct cl_target_option, x_rs6000_paired_float), },
27651 { "longcall",
27652 offsetof (struct gcc_options, x_rs6000_default_long_calls),
27653 offsetof (struct cl_target_option, x_rs6000_default_long_calls), },
27654 };
27655
27656 /* Inner function to handle attribute((target("..."))) and #pragma GCC target
27657 parsing. Return true if there were no errors. */
27658
27659 static bool
27660 rs6000_inner_target_options (tree args, bool attr_p)
27661 {
27662 bool ret = true;
27663
27664 if (args == NULL_TREE)
27665 ;
27666
27667 else if (TREE_CODE (args) == STRING_CST)
27668 {
27669 char *p = ASTRDUP (TREE_STRING_POINTER (args));
27670 char *q;
27671
27672 while ((q = strtok (p, ",")) != NULL)
27673 {
27674 bool error_p = false;
27675 bool not_valid_p = false;
27676 const char *cpu_opt = NULL;
27677
27678 p = NULL;
27679 if (strncmp (q, "cpu=", 4) == 0)
27680 {
27681 int cpu_index = rs6000_cpu_name_lookup (q+4);
27682 if (cpu_index >= 0)
27683 rs6000_cpu_index = cpu_index;
27684 else
27685 {
27686 error_p = true;
27687 cpu_opt = q+4;
27688 }
27689 }
27690 else if (strncmp (q, "tune=", 5) == 0)
27691 {
27692 int tune_index = rs6000_cpu_name_lookup (q+5);
27693 if (tune_index >= 0)
27694 rs6000_tune_index = tune_index;
27695 else
27696 {
27697 error_p = true;
27698 cpu_opt = q+5;
27699 }
27700 }
27701 else
27702 {
27703 size_t i;
27704 bool invert = false;
27705 char *r = q;
27706
27707 error_p = true;
27708 if (strncmp (r, "no-", 3) == 0)
27709 {
27710 invert = true;
27711 r += 3;
27712 }
27713
27714 for (i = 0; i < ARRAY_SIZE (rs6000_opt_masks); i++)
27715 if (strcmp (r, rs6000_opt_masks[i].name) == 0)
27716 {
27717 int mask = rs6000_opt_masks[i].mask;
27718
27719 if (!rs6000_opt_masks[i].valid_target)
27720 not_valid_p = true;
27721 else
27722 {
27723 error_p = false;
27724 target_flags_explicit |= mask;
27725
27726 if (rs6000_opt_masks[i].invert)
27727 invert = !invert;
27728
27729 if (invert)
27730 target_flags &= ~mask;
27731 else
27732 target_flags |= mask;
27733 }
27734 break;
27735 }
27736
27737 if (error_p && !not_valid_p)
27738 {
27739 for (i = 0; i < ARRAY_SIZE (rs6000_opt_vars); i++)
27740 if (strcmp (r, rs6000_opt_vars[i].name) == 0)
27741 {
27742 size_t j = rs6000_opt_vars[i].global_offset;
27743 ((int *) &global_options)[j] = !invert;
27744 error_p = false;
27745 break;
27746 }
27747 }
27748 }
27749
27750 if (error_p)
27751 {
27752 const char *eprefix, *esuffix;
27753
27754 ret = false;
27755 if (attr_p)
27756 {
27757 eprefix = "__attribute__((__target__(";
27758 esuffix = ")))";
27759 }
27760 else
27761 {
27762 eprefix = "#pragma GCC target ";
27763 esuffix = "";
27764 }
27765
27766 if (cpu_opt)
27767 error ("invalid cpu \"%s\" for %s\"%s\"%s", cpu_opt, eprefix,
27768 q, esuffix);
27769 else if (not_valid_p)
27770 error ("%s\"%s\"%s is not allowed", eprefix, q, esuffix);
27771 else
27772 error ("%s\"%s\"%s is invalid", eprefix, q, esuffix);
27773 }
27774 }
27775 }
27776
27777 else if (TREE_CODE (args) == TREE_LIST)
27778 {
27779 do
27780 {
27781 tree value = TREE_VALUE (args);
27782 if (value)
27783 {
27784 bool ret2 = rs6000_inner_target_options (value, attr_p);
27785 if (!ret2)
27786 ret = false;
27787 }
27788 args = TREE_CHAIN (args);
27789 }
27790 while (args != NULL_TREE);
27791 }
27792
27793 else
27794 gcc_unreachable ();
27795
27796 return ret;
27797 }
27798
27799 /* Print out the target options as a list for -mdebug=target. */
27800
27801 static void
27802 rs6000_debug_target_options (tree args, const char *prefix)
27803 {
27804 if (args == NULL_TREE)
27805 fprintf (stderr, "%s<NULL>", prefix);
27806
27807 else if (TREE_CODE (args) == STRING_CST)
27808 {
27809 char *p = ASTRDUP (TREE_STRING_POINTER (args));
27810 char *q;
27811
27812 while ((q = strtok (p, ",")) != NULL)
27813 {
27814 p = NULL;
27815 fprintf (stderr, "%s\"%s\"", prefix, q);
27816 prefix = ", ";
27817 }
27818 }
27819
27820 else if (TREE_CODE (args) == TREE_LIST)
27821 {
27822 do
27823 {
27824 tree value = TREE_VALUE (args);
27825 if (value)
27826 {
27827 rs6000_debug_target_options (value, prefix);
27828 prefix = ", ";
27829 }
27830 args = TREE_CHAIN (args);
27831 }
27832 while (args != NULL_TREE);
27833 }
27834
27835 else
27836 gcc_unreachable ();
27837
27838 return;
27839 }
27840
27841
27842 /* Hook to validate attribute((target("..."))). */
27843
27844 static bool
27845 rs6000_valid_attribute_p (tree fndecl,
27846 tree ARG_UNUSED (name),
27847 tree args,
27848 int flags)
27849 {
27850 struct cl_target_option cur_target;
27851 bool ret;
27852 tree old_optimize = build_optimization_node ();
27853 tree new_target, new_optimize;
27854 tree func_optimize = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl);
27855
27856 gcc_assert ((fndecl != NULL_TREE) && (args != NULL_TREE));
27857
27858 if (TARGET_DEBUG_TARGET)
27859 {
27860 tree tname = DECL_NAME (fndecl);
27861 fprintf (stderr, "\n==================== rs6000_valid_attribute_p:\n");
27862 if (tname)
27863 fprintf (stderr, "function: %.*s\n",
27864 (int) IDENTIFIER_LENGTH (tname),
27865 IDENTIFIER_POINTER (tname));
27866 else
27867 fprintf (stderr, "function: unknown\n");
27868
27869 fprintf (stderr, "args:");
27870 rs6000_debug_target_options (args, " ");
27871 fprintf (stderr, "\n");
27872
27873 if (flags)
27874 fprintf (stderr, "flags: 0x%x\n", flags);
27875
27876 fprintf (stderr, "--------------------\n");
27877 }
27878
27879 old_optimize = build_optimization_node ();
27880 func_optimize = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl);
27881
27882 /* If the function changed the optimization levels as well as setting target
27883 options, start with the optimizations specified. */
27884 if (func_optimize && func_optimize != old_optimize)
27885 cl_optimization_restore (&global_options,
27886 TREE_OPTIMIZATION (func_optimize));
27887
27888 /* The target attributes may also change some optimization flags, so update
27889 the optimization options if necessary. */
27890 cl_target_option_save (&cur_target, &global_options);
27891 rs6000_cpu_index = rs6000_tune_index = -1;
27892 ret = rs6000_inner_target_options (args, true);
27893
27894 /* Set up any additional state. */
27895 if (ret)
27896 {
27897 ret = rs6000_option_override_internal (false);
27898 new_target = build_target_option_node ();
27899 }
27900 else
27901 new_target = NULL;
27902
27903 new_optimize = build_optimization_node ();
27904
27905 if (!new_target)
27906 ret = false;
27907
27908 else if (fndecl)
27909 {
27910 DECL_FUNCTION_SPECIFIC_TARGET (fndecl) = new_target;
27911
27912 if (old_optimize != new_optimize)
27913 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (fndecl) = new_optimize;
27914 }
27915
27916 cl_target_option_restore (&global_options, &cur_target);
27917
27918 if (old_optimize != new_optimize)
27919 cl_optimization_restore (&global_options,
27920 TREE_OPTIMIZATION (old_optimize));
27921
27922 return ret;
27923 }
27924
27925
27926 /* Hook to validate the current #pragma GCC target and set the state, and
27927 update the macros based on what was changed. If ARGS is NULL, then
27928 POP_TARGET is used to reset the options. */
27929
27930 bool
27931 rs6000_pragma_target_parse (tree args, tree pop_target)
27932 {
27933 tree cur_tree;
27934 bool ret;
27935
27936 if (TARGET_DEBUG_TARGET)
27937 {
27938 fprintf (stderr, "\n==================== rs6000_pragma_target_parse\n");
27939 fprintf (stderr, "args:");
27940 rs6000_debug_target_options (args, " ");
27941 fprintf (stderr, "\n");
27942
27943 if (pop_target)
27944 {
27945 fprintf (stderr, "pop_target:\n");
27946 debug_tree (pop_target);
27947 }
27948 else
27949 fprintf (stderr, "pop_target: <NULL>\n");
27950
27951 fprintf (stderr, "--------------------\n");
27952 }
27953
27954 if (! args)
27955 {
27956 ret = true;
27957 cur_tree = ((pop_target)
27958 ? pop_target
27959 : target_option_default_node);
27960 cl_target_option_restore (&global_options,
27961 TREE_TARGET_OPTION (cur_tree));
27962 }
27963 else
27964 {
27965 rs6000_cpu_index = rs6000_tune_index = -1;
27966 ret = rs6000_inner_target_options (args, false);
27967 cur_tree = build_target_option_node ();
27968
27969 if (!cur_tree)
27970 ret = false;
27971 }
27972
27973 if (cur_tree)
27974 target_option_current_node = cur_tree;
27975
27976 return ret;
27977 }
27978
27979
27980 /* Remember the last target of rs6000_set_current_function. */
27981 static GTY(()) tree rs6000_previous_fndecl;
27982
27983 /* Establish appropriate back-end context for processing the function
27984 FNDECL. The argument might be NULL to indicate processing at top
27985 level, outside of any function scope. */
27986 static void
27987 rs6000_set_current_function (tree fndecl)
27988 {
27989 tree old_tree = (rs6000_previous_fndecl
27990 ? DECL_FUNCTION_SPECIFIC_TARGET (rs6000_previous_fndecl)
27991 : NULL_TREE);
27992
27993 tree new_tree = (fndecl
27994 ? DECL_FUNCTION_SPECIFIC_TARGET (fndecl)
27995 : NULL_TREE);
27996
27997 if (TARGET_DEBUG_TARGET)
27998 {
27999 bool print_final = false;
28000 fprintf (stderr, "\n==================== rs6000_set_current_function");
28001
28002 if (fndecl)
28003 fprintf (stderr, ", fndecl %s (%p)",
28004 (DECL_NAME (fndecl)
28005 ? IDENTIFIER_POINTER (DECL_NAME (fndecl))
28006 : "<unknown>"), (void *)fndecl);
28007
28008 if (rs6000_previous_fndecl)
28009 fprintf (stderr, ", prev_fndecl (%p)", (void *)rs6000_previous_fndecl);
28010
28011 fprintf (stderr, "\n");
28012 if (new_tree)
28013 {
28014 fprintf (stderr, "\nnew fndecl target specific options:\n");
28015 debug_tree (new_tree);
28016 print_final = true;
28017 }
28018
28019 if (old_tree)
28020 {
28021 fprintf (stderr, "\nold fndecl target specific options:\n");
28022 debug_tree (old_tree);
28023 print_final = true;
28024 }
28025
28026 if (print_final)
28027 fprintf (stderr, "--------------------\n");
28028 }
28029
28030 /* Only change the context if the function changes. This hook is called
28031 several times in the course of compiling a function, and we don't want to
28032 slow things down too much or call target_reinit when it isn't safe. */
28033 if (fndecl && fndecl != rs6000_previous_fndecl)
28034 {
28035 rs6000_previous_fndecl = fndecl;
28036 if (old_tree == new_tree)
28037 ;
28038
28039 else if (new_tree)
28040 {
28041 cl_target_option_restore (&global_options,
28042 TREE_TARGET_OPTION (new_tree));
28043 target_reinit ();
28044 }
28045
28046 else if (old_tree)
28047 {
28048 struct cl_target_option *def
28049 = TREE_TARGET_OPTION (target_option_current_node);
28050
28051 cl_target_option_restore (&global_options, def);
28052 target_reinit ();
28053 }
28054 }
28055 }
28056
28057
28058 /* Save the current options */
28059
28060 static void
28061 rs6000_function_specific_save (struct cl_target_option *ptr)
28062 {
28063 ptr->rs6000_target_flags_explicit = target_flags_explicit;
28064 }
28065
28066 /* Restore the current options */
28067
28068 static void
28069 rs6000_function_specific_restore (struct cl_target_option *ptr)
28070 {
28071 target_flags_explicit = ptr->rs6000_target_flags_explicit;
28072 (void) rs6000_option_override_internal (false);
28073 }
28074
28075 /* Print the current options */
28076
28077 static void
28078 rs6000_function_specific_print (FILE *file, int indent,
28079 struct cl_target_option *ptr)
28080 {
28081 size_t i;
28082 int flags = ptr->x_target_flags;
28083
28084 /* Print the various mask options. */
28085 for (i = 0; i < ARRAY_SIZE (rs6000_opt_masks); i++)
28086 if ((flags & rs6000_opt_masks[i].mask) != 0)
28087 {
28088 flags &= ~ rs6000_opt_masks[i].mask;
28089 fprintf (file, "%*s-m%s%s\n", indent, "",
28090 rs6000_opt_masks[i].invert ? "no-" : "",
28091 rs6000_opt_masks[i].name);
28092 }
28093
28094 /* Print the various options that are variables. */
28095 for (i = 0; i < ARRAY_SIZE (rs6000_opt_vars); i++)
28096 {
28097 size_t j = rs6000_opt_vars[i].target_offset;
28098 if (((signed char *) ptr)[j])
28099 fprintf (file, "%*s-m%s\n", indent, "",
28100 rs6000_opt_vars[i].name);
28101 }
28102 }
28103
28104
28105 /* Hook to determine if one function can safely inline another. */
28106
28107 static bool
28108 rs6000_can_inline_p (tree caller, tree callee)
28109 {
28110 bool ret = false;
28111 tree caller_tree = DECL_FUNCTION_SPECIFIC_TARGET (caller);
28112 tree callee_tree = DECL_FUNCTION_SPECIFIC_TARGET (callee);
28113
28114 /* If callee has no option attributes, then it is ok to inline. */
28115 if (!callee_tree)
28116 ret = true;
28117
28118 /* If caller has no option attributes, but callee does then it is not ok to
28119 inline. */
28120 else if (!caller_tree)
28121 ret = false;
28122
28123 else
28124 {
28125 struct cl_target_option *caller_opts = TREE_TARGET_OPTION (caller_tree);
28126 struct cl_target_option *callee_opts = TREE_TARGET_OPTION (callee_tree);
28127
28128 /* Callee's options should a subset of the caller's, i.e. a vsx function
28129 can inline an altivec function but a non-vsx function can't inline a
28130 vsx function. */
28131 if ((caller_opts->x_target_flags & callee_opts->x_target_flags)
28132 == callee_opts->x_target_flags)
28133 ret = true;
28134 }
28135
28136 if (TARGET_DEBUG_TARGET)
28137 fprintf (stderr, "rs6000_can_inline_p:, caller %s, callee %s, %s inline\n",
28138 (DECL_NAME (caller)
28139 ? IDENTIFIER_POINTER (DECL_NAME (caller))
28140 : "<unknown>"),
28141 (DECL_NAME (callee)
28142 ? IDENTIFIER_POINTER (DECL_NAME (callee))
28143 : "<unknown>"),
28144 (ret ? "can" : "cannot"));
28145
28146 return ret;
28147 }
28148
28149 /* Allocate a stack temp and fixup the address so it meets the particular
28150 memory requirements (either offetable or REG+REG addressing). */
28151
28152 rtx
28153 rs6000_allocate_stack_temp (enum machine_mode mode,
28154 bool offsettable_p,
28155 bool reg_reg_p)
28156 {
28157 rtx stack = assign_stack_temp (mode, GET_MODE_SIZE (mode), 0);
28158 rtx addr = XEXP (stack, 0);
28159 int strict_p = (reload_in_progress || reload_completed);
28160
28161 if (!legitimate_indirect_address_p (addr, strict_p))
28162 {
28163 if (offsettable_p
28164 && !rs6000_legitimate_offset_address_p (mode, addr, strict_p))
28165 stack = replace_equiv_address (stack, copy_addr_to_reg (addr));
28166
28167 else if (reg_reg_p && !legitimate_indexed_address_p (addr, strict_p))
28168 stack = replace_equiv_address (stack, copy_addr_to_reg (addr));
28169 }
28170
28171 return stack;
28172 }
28173
28174 /* Given a memory reference, if it is not a reg or reg+reg addressing, convert
28175 to such a form to deal with memory reference instructions like STFIWX that
28176 only take reg+reg addressing. */
28177
28178 rtx
28179 rs6000_address_for_fpconvert (rtx x)
28180 {
28181 int strict_p = (reload_in_progress || reload_completed);
28182 rtx addr;
28183
28184 gcc_assert (MEM_P (x));
28185 addr = XEXP (x, 0);
28186 if (! legitimate_indirect_address_p (addr, strict_p)
28187 && ! legitimate_indexed_address_p (addr, strict_p))
28188 {
28189 if (GET_CODE (addr) == PRE_INC || GET_CODE (addr) == PRE_DEC)
28190 {
28191 rtx reg = XEXP (addr, 0);
28192 HOST_WIDE_INT size = GET_MODE_SIZE (GET_MODE (x));
28193 rtx size_rtx = GEN_INT ((GET_CODE (addr) == PRE_DEC) ? -size : size);
28194 gcc_assert (REG_P (reg));
28195 emit_insn (gen_add3_insn (reg, reg, size_rtx));
28196 addr = reg;
28197 }
28198 else if (GET_CODE (addr) == PRE_MODIFY)
28199 {
28200 rtx reg = XEXP (addr, 0);
28201 rtx expr = XEXP (addr, 1);
28202 gcc_assert (REG_P (reg));
28203 gcc_assert (GET_CODE (expr) == PLUS);
28204 emit_insn (gen_add3_insn (reg, XEXP (expr, 0), XEXP (expr, 1)));
28205 addr = reg;
28206 }
28207
28208 x = replace_equiv_address (x, copy_addr_to_reg (addr));
28209 }
28210
28211 return x;
28212 }
28213
28214 /* Given a memory reference, if it is not in the form for altivec memory
28215 reference instructions (i.e. reg or reg+reg addressing with AND of -16),
28216 convert to the altivec format. */
28217
28218 rtx
28219 rs6000_address_for_altivec (rtx x)
28220 {
28221 gcc_assert (MEM_P (x));
28222 if (!altivec_indexed_or_indirect_operand (x, GET_MODE (x)))
28223 {
28224 rtx addr = XEXP (x, 0);
28225 int strict_p = (reload_in_progress || reload_completed);
28226
28227 if (!legitimate_indexed_address_p (addr, strict_p)
28228 && !legitimate_indirect_address_p (addr, strict_p))
28229 addr = copy_to_mode_reg (Pmode, addr);
28230
28231 addr = gen_rtx_AND (Pmode, addr, GEN_INT (-16));
28232 x = change_address (x, GET_MODE (x), addr);
28233 }
28234
28235 return x;
28236 }
28237
28238
25825 #include "gt-rs6000.h" 28239 #include "gt-rs6000.h"