comparison gcc/target.h @ 67:f6334be47118

update gcc from gcc-4.6-20100522 to gcc-4.6-20110318
author nobuyasu <dimolto@cr.ie.u-ryukyu.ac.jp>
date Tue, 22 Mar 2011 17:18:12 +0900
parents b7f97abdc517
children 04ced10e8804
comparison
equal deleted inserted replaced
65:65488c3d617d 67:f6334be47118
98 struct ddg; 98 struct ddg;
99 99
100 /* This is defined in cfgloop.h . */ 100 /* This is defined in cfgloop.h . */
101 struct loop; 101 struct loop;
102 102
103 /* This is defined in tree-ssa-alias.h. */
104 struct ao_ref_s;
105
103 /* Assembler instructions for creating various kinds of integer object. */ 106 /* Assembler instructions for creating various kinds of integer object. */
104 107
105 struct asm_int_op 108 struct asm_int_op
106 { 109 {
107 const char *hi; 110 const char *hi;
108 const char *si; 111 const char *si;
109 const char *di; 112 const char *di;
110 const char *ti; 113 const char *ti;
111 }; 114 };
112 115
113 /* The target structure. This holds all the backend hooks. */ 116 /* Types of costs for vectorizer cost model. */
114 117 enum vect_cost_for_stmt
115 struct gcc_target
116 { 118 {
117 /* Functions that output assembler for the target. */ 119 scalar_stmt,
118 struct asm_out 120 scalar_load,
119 { 121 scalar_store,
120 /* Opening and closing parentheses for asm expression grouping. */ 122 vector_stmt,
121 const char *open_paren, *close_paren; 123 vector_load,
122 124 unaligned_load,
123 /* Assembler instructions for creating various kinds of integer object. */ 125 unaligned_store,
124 const char *byte_op; 126 vector_store,
125 struct asm_int_op aligned_op, unaligned_op; 127 vec_to_scalar,
126 128 scalar_to_vec,
127 /* Try to output the assembler code for an integer object whose 129 cond_branch_not_taken,
128 value is given by X. SIZE is the size of the object in bytes and 130 cond_branch_taken,
129 ALIGNED_P indicates whether it is aligned. Return true if 131 vec_perm
130 successful. Only handles cases for which BYTE_OP, ALIGNED_OP
131 and UNALIGNED_OP are NULL. */
132 bool (* integer) (rtx x, unsigned int size, int aligned_p);
133
134 /* Output code that will globalize a label. */
135 void (* globalize_label) (FILE *, const char *);
136
137 /* Output code that will globalize a declaration. */
138 void (* globalize_decl_name) (FILE *, tree);
139
140 /* Output code that will emit a label for unwind info, if this
141 target requires such labels. Second argument is the decl the
142 unwind info is associated with, third is a boolean: true if
143 this is for exception handling, fourth is a boolean: true if
144 this is only a placeholder for an omitted FDE. */
145 void (* unwind_label) (FILE *, tree, int, int);
146
147 /* Output code that will emit a label to divide up the exception
148 table. */
149 void (* except_table_label) (FILE *);
150
151 /* Emit any directives required to unwind this instruction. */
152 void (* unwind_emit) (FILE *, rtx);
153
154 /* Output an internal label. */
155 void (* internal_label) (FILE *, const char *, unsigned long);
156
157 /* Emit a ttype table reference to a typeinfo object. */
158 bool (* ttype) (rtx);
159
160 /* Emit an assembler directive to set visibility for the symbol
161 associated with the tree decl. */
162 void (* visibility) (tree, int);
163
164 /* Output the assembler code for entry to a function. */
165 void (* function_prologue) (FILE *, HOST_WIDE_INT);
166
167 /* Output the assembler code for end of prologue. */
168 void (* function_end_prologue) (FILE *);
169
170 /* Output the assembler code for start of epilogue. */
171 void (* function_begin_epilogue) (FILE *);
172
173 /* Output the assembler code for function exit. */
174 void (* function_epilogue) (FILE *, HOST_WIDE_INT);
175
176 /* Initialize target-specific sections. */
177 void (* init_sections) (void);
178
179 /* Tell assembler to change to section NAME with attributes FLAGS.
180 If DECL is non-NULL, it is the VAR_DECL or FUNCTION_DECL with
181 which this section is associated. */
182 void (* named_section) (const char *name, unsigned int flags, tree decl);
183
184 /* Return a mask describing how relocations should be treated when
185 selecting sections. Bit 1 should be set if global relocations
186 should be placed in a read-write section; bit 0 should be set if
187 local relocations should be placed in a read-write section. */
188 int (*reloc_rw_mask) (void);
189
190 /* Return a section for EXP. It may be a DECL or a constant. RELOC
191 is nonzero if runtime relocations must be applied; bit 1 will be
192 set if the runtime relocations require non-local name resolution.
193 ALIGN is the required alignment of the data. */
194 section *(* select_section) (tree, int, unsigned HOST_WIDE_INT);
195
196 /* Return a section for X. MODE is X's mode and ALIGN is its
197 alignment in bits. */
198 section *(* select_rtx_section) (enum machine_mode, rtx,
199 unsigned HOST_WIDE_INT);
200
201 /* Select a unique section name for DECL. RELOC is the same as
202 for SELECT_SECTION. */
203 void (* unique_section) (tree, int);
204
205 /* Return the readonly data section associated with function DECL. */
206 section *(* function_rodata_section) (tree);
207
208 /* Output a constructor for a symbol with a given priority. */
209 void (* constructor) (rtx, int);
210
211 /* Output a destructor for a symbol with a given priority. */
212 void (* destructor) (rtx, int);
213
214 /* Output the assembler code for a thunk function. THUNK_DECL is the
215 declaration for the thunk function itself, FUNCTION is the decl for
216 the target function. DELTA is an immediate constant offset to be
217 added to THIS. If VCALL_OFFSET is nonzero, the word at
218 *(*this + vcall_offset) should be added to THIS. */
219 void (* output_mi_thunk) (FILE *file, tree thunk_decl,
220 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
221 tree function_decl);
222
223 /* Determine whether output_mi_thunk would succeed. */
224 /* ??? Ideally, this hook would not exist, and success or failure
225 would be returned from output_mi_thunk directly. But there's
226 too much undo-able setup involved in invoking output_mi_thunk.
227 Could be fixed by making output_mi_thunk emit rtl instead of
228 text to the output file. */
229 bool (* can_output_mi_thunk) (const_tree thunk_decl, HOST_WIDE_INT delta,
230 HOST_WIDE_INT vcall_offset,
231 const_tree function_decl);
232
233 /* Output any boilerplate text needed at the beginning of a
234 translation unit. */
235 void (*file_start) (void);
236
237 /* Output any boilerplate text needed at the end of a
238 translation unit. */
239 void (*file_end) (void);
240
241 /* Output any boilerplate text needed at the beginning of an
242 LTO output stream. */
243 void (*lto_start) (void);
244
245 /* Output any boilerplate text needed at the end of an
246 LTO output stream. */
247 void (*lto_end) (void);
248
249 /* Output any boilerplace text needed at the end of a
250 translation unit before debug and unwind info is emitted. */
251 void (*code_end) (void);
252
253 /* Output an assembler pseudo-op to declare a library function name
254 external. */
255 void (*external_libcall) (rtx);
256
257 /* Output an assembler directive to mark decl live. This instructs
258 linker to not dead code strip this symbol. */
259 void (*mark_decl_preserved) (const char *);
260
261 /* Output a record of the command line switches that have been passed. */
262 print_switch_fn_type record_gcc_switches;
263 /* The name of the section that the example ELF implementation of
264 record_gcc_switches will use to store the information. Target
265 specific versions of record_gcc_switches may or may not use
266 this information. */
267 const char * record_gcc_switches_section;
268
269 /* Output the definition of a section anchor. */
270 void (*output_anchor) (rtx);
271
272 /* Output a DTP-relative reference to a TLS symbol. */
273 void (*output_dwarf_dtprel) (FILE *file, int size, rtx x);
274
275 /* Some target machines need to postscan each insn after it is output. */
276 void (*final_postscan_insn) (FILE *, rtx, rtx *, int);
277
278 /* Emit the trampoline template. This hook may be NULL. */
279 void (*trampoline_template) (FILE *);
280 } asm_out;
281
282 /* Functions relating to instruction scheduling. */
283 struct sched
284 {
285 /* Given the current cost, COST, of an insn, INSN, calculate and
286 return a new cost based on its relationship to DEP_INSN through
287 the dependence LINK. The default is to make no adjustment. */
288 int (* adjust_cost) (rtx insn, rtx link, rtx dep_insn, int cost);
289
290 /* Adjust the priority of an insn as you see fit. Returns the new
291 priority. */
292 int (* adjust_priority) (rtx, int);
293
294 /* Function which returns the maximum number of insns that can be
295 scheduled in the same machine cycle. This must be constant
296 over an entire compilation. The default is 1. */
297 int (* issue_rate) (void);
298
299 /* Calculate how much this insn affects how many more insns we
300 can emit this cycle. Default is they all cost the same. */
301 int (* variable_issue) (FILE *, int, rtx, int);
302
303 /* Initialize machine-dependent scheduling code. */
304 void (* md_init) (FILE *, int, int);
305
306 /* Finalize machine-dependent scheduling code. */
307 void (* md_finish) (FILE *, int);
308
309 /* Initialize machine-dependent function wide scheduling code. */
310 void (* md_init_global) (FILE *, int, int);
311
312 /* Finalize machine-dependent function wide scheduling code. */
313 void (* md_finish_global) (FILE *, int);
314
315 /* Reorder insns in a machine-dependent fashion, in two different
316 places. Default does nothing. */
317 int (* reorder) (FILE *, int, rtx *, int *, int);
318 int (* reorder2) (FILE *, int, rtx *, int *, int);
319
320 /* The following member value is a pointer to a function called
321 after evaluation forward dependencies of insns in chain given
322 by two parameter values (head and tail correspondingly). */
323 void (* dependencies_evaluation_hook) (rtx, rtx);
324
325 /* The values of the following four members are pointers to
326 functions used to simplify the automaton descriptions.
327 dfa_pre_cycle_insn and dfa_post_cycle_insn give functions
328 returning insns which are used to change the pipeline hazard
329 recognizer state when the new simulated processor cycle
330 correspondingly starts and finishes. The function defined by
331 init_dfa_pre_cycle_insn and init_dfa_post_cycle_insn are used
332 to initialize the corresponding insns. The default values of
333 the members result in not changing the automaton state when
334 the new simulated processor cycle correspondingly starts and
335 finishes. */
336 void (* init_dfa_pre_cycle_insn) (void);
337 rtx (* dfa_pre_cycle_insn) (void);
338 void (* init_dfa_post_cycle_insn) (void);
339 rtx (* dfa_post_cycle_insn) (void);
340
341 /* The values of the following two members are pointers to
342 functions used to simplify the automaton descriptions.
343 dfa_pre_advance_cycle and dfa_post_advance_cycle are getting called
344 immediately before and after cycle is advanced. */
345 void (* dfa_pre_advance_cycle) (void);
346 void (* dfa_post_advance_cycle) (void);
347
348 /* The following member value is a pointer to a function returning value
349 which defines how many insns in queue `ready' will we try for
350 multi-pass scheduling. If the member value is nonzero and the
351 function returns positive value, the DFA based scheduler will make
352 multi-pass scheduling for the first cycle. In other words, we will
353 try to choose ready insn which permits to start maximum number of
354 insns on the same cycle. */
355 int (* first_cycle_multipass_dfa_lookahead) (void);
356
357 /* The following member value is pointer to a function controlling
358 what insns from the ready insn queue will be considered for the
359 multipass insn scheduling. If the hook returns zero for insn
360 passed as the parameter, the insn will be not chosen to be
361 issued. */
362 int (* first_cycle_multipass_dfa_lookahead_guard) (rtx);
363
364 /* The following member value is pointer to a function called by
365 the insn scheduler before issuing insn passed as the third
366 parameter on given cycle. If the hook returns nonzero, the
367 insn is not issued on given processors cycle. Instead of that,
368 the processor cycle is advanced. If the value passed through
369 the last parameter is zero, the insn ready queue is not sorted
370 on the new cycle start as usually. The first parameter passes
371 file for debugging output. The second one passes the scheduler
372 verbose level of the debugging output. The forth and the fifth
373 parameter values are correspondingly processor cycle on which
374 the previous insn has been issued and the current processor
375 cycle. */
376 int (* dfa_new_cycle) (FILE *, int, rtx, int, int, int *);
377
378 /* The following member value is a pointer to a function called by the
379 insn scheduler. It should return true if there exists a dependence
380 which is considered costly by the target, between the insn
381 DEP_PRO (&_DEP), and the insn DEP_CON (&_DEP). The first parameter is
382 the dep that represents the dependence between the two insns. The
383 second argument is the cost of the dependence as estimated by
384 the scheduler. The last argument is the distance in cycles
385 between the already scheduled insn (first parameter) and the
386 second insn (second parameter). */
387 bool (* is_costly_dependence) (struct _dep *_dep, int, int);
388
389 /* Given the current cost, COST, of an insn, INSN, calculate and
390 return a new cost based on its relationship to DEP_INSN through the
391 dependence of type DEP_TYPE. The default is to make no adjustment. */
392 int (* adjust_cost_2) (rtx insn, int, rtx dep_insn, int cost, int dw);
393
394 /* The following member value is a pointer to a function called
395 by the insn scheduler. This hook is called to notify the backend
396 that new instructions were emitted. */
397 void (* h_i_d_extended) (void);
398
399 /* Next 5 functions are for multi-point scheduling. */
400
401 /* Allocate memory for scheduler context. */
402 void *(* alloc_sched_context) (void);
403
404 /* Fills the context from the local machine scheduler context. */
405 void (* init_sched_context) (void *, bool);
406
407 /* Sets local machine scheduler context to a saved value. */
408 void (* set_sched_context) (void *);
409
410 /* Clears a scheduler context so it becomes like after init. */
411 void (* clear_sched_context) (void *);
412
413 /* Frees the scheduler context. */
414 void (* free_sched_context) (void *);
415
416 /* The following member value is a pointer to a function called
417 by the insn scheduler.
418 The first parameter is an instruction, the second parameter is the type
419 of the requested speculation, and the third parameter is a pointer to the
420 speculative pattern of the corresponding type (set if return value == 1).
421 It should return
422 -1, if there is no pattern, that will satisfy the requested speculation
423 type,
424 0, if current pattern satisfies the requested speculation type,
425 1, if pattern of the instruction should be changed to the newly
426 generated one. */
427 int (* speculate_insn) (rtx, int, rtx *);
428
429 /* The following member value is a pointer to a function called
430 by the insn scheduler. It should return true if the check instruction
431 passed as the parameter needs a recovery block. */
432 bool (* needs_block_p) (int);
433
434 /* The following member value is a pointer to a function called
435 by the insn scheduler. It should return a pattern for the check
436 instruction.
437 The first parameter is a speculative instruction, the second parameter
438 is the label of the corresponding recovery block (or null, if it is a
439 simple check). If the mutation of the check is requested (e.g. from
440 ld.c to chk.a), the third parameter is true - in this case the first
441 parameter is the previous check. */
442 rtx (* gen_spec_check) (rtx, rtx, int);
443
444 /* The following member value is a pointer to a function controlling
445 what insns from the ready insn queue will be considered for the
446 multipass insn scheduling. If the hook returns zero for the insn
447 passed as the parameter, the insn will not be chosen to be
448 issued. This hook is used to discard speculative instructions,
449 that stand at the first position of the ready list. */
450 bool (* first_cycle_multipass_dfa_lookahead_guard_spec) (const_rtx);
451
452 /* The following member value is a pointer to a function that provides
453 information about the speculation capabilities of the target.
454 The parameter is a pointer to spec_info variable. */
455 void (* set_sched_flags) (struct spec_info_def *);
456
457 /* Return speculation types of the instruction passed as the parameter. */
458 int (* get_insn_spec_ds) (rtx);
459
460 /* Return speculation types that are checked for the instruction passed as
461 the parameter. */
462 int (* get_insn_checked_ds) (rtx);
463
464 /* Return bool if rtx scanning should just skip current layer and
465 advance to the inner rtxes. */
466 bool (* skip_rtx_p) (const_rtx);
467
468 /* The following member value is a pointer to a function that provides
469 information about the target resource-based lower bound which is
470 used by the swing modulo scheduler. The parameter is a pointer
471 to ddg variable. */
472 int (* sms_res_mii) (struct ddg *);
473 } sched;
474
475 /* Functions relating to vectorization. */
476 struct vectorize
477 {
478 /* The following member value is a pointer to a function called
479 by the vectorizer, and return the decl of the target builtin
480 function. */
481 tree (* builtin_mask_for_load) (void);
482
483 /* Returns a code for builtin that realizes vectorized version of
484 function, or NULL_TREE if not available. */
485 tree (* builtin_vectorized_function) (tree, tree, tree);
486
487 /* Returns a function declaration for a builtin that realizes the
488 vector conversion, or NULL_TREE if not available. */
489 tree (* builtin_conversion) (unsigned, tree, tree);
490
491 /* Target builtin that implements vector widening multiplication.
492 builtin_mul_widen_eve computes the element-by-element products
493 for the even elements, and builtin_mul_widen_odd computes the
494 element-by-element products for the odd elements. */
495 tree (* builtin_mul_widen_even) (tree);
496 tree (* builtin_mul_widen_odd) (tree);
497
498 /* Returns the cost to be added to the overheads involved with
499 executing the vectorized version of a loop. */
500 int (*builtin_vectorization_cost) (bool);
501
502 /* Return true if vector alignment is reachable (by peeling N
503 iterations) for the given type. */
504 bool (* vector_alignment_reachable) (const_tree, bool);
505
506 /* Target builtin that implements vector permute. */
507 tree (* builtin_vec_perm) (tree, tree*);
508
509 /* Return true if a vector created for builtin_vec_perm is valid. */
510 bool (* builtin_vec_perm_ok) (tree, tree);
511
512 /* Return true if the target supports misaligned store/load of a
513 specific factor denoted in the third parameter. The last parameter
514 is true if the access is defined in a packed struct. */
515 bool (* builtin_support_vector_misalignment) (enum machine_mode,
516 const_tree, int, bool);
517 } vectorize;
518
519 /* The initial value of target_flags. */
520 int default_target_flags;
521
522 /* Allow target specific overriding of option settings after options have
523 been changed by an attribute or pragma or when it is reset at the
524 end of the code affected by an attribute or pragma. */
525 void (* override_options_after_change) (void);
526
527 /* Handle target switch CODE (an OPT_* value). ARG is the argument
528 passed to the switch; it is NULL if no argument was. VALUE is the
529 value of ARG if CODE specifies a UInteger option, otherwise it is
530 1 if the positive form of the switch was used and 0 if the negative
531 form was. Return true if the switch was valid. */
532 bool (* handle_option) (size_t code, const char *arg, int value);
533
534 /* Display extra, target specific information in response to a
535 --target-help switch. */
536 void (* target_help) (void);
537
538 /* Return machine mode for filter value. */
539 enum machine_mode (* eh_return_filter_mode) (void);
540
541 /* Return machine mode for libgcc expanded cmp instructions. */
542 enum machine_mode (* libgcc_cmp_return_mode) (void);
543
544 /* Return machine mode for libgcc expanded shift instructions. */
545 enum machine_mode (* libgcc_shift_count_mode) (void);
546
547 /* Return machine mode to be used for _Unwind_Word type. */
548 enum machine_mode (* unwind_word_mode) (void);
549
550 /* Given two decls, merge their attributes and return the result. */
551 tree (* merge_decl_attributes) (tree, tree);
552
553 /* Given two types, merge their attributes and return the result. */
554 tree (* merge_type_attributes) (tree, tree);
555
556 /* Table of machine attributes and functions to handle them.
557 Ignored if NULL. */
558 const struct attribute_spec *attribute_table;
559
560 /* Return true iff attribute NAME expects a plain identifier as its first
561 argument. */
562 bool (*attribute_takes_identifier_p) (const_tree name);
563
564 /* Return zero if the attributes on TYPE1 and TYPE2 are incompatible,
565 one if they are compatible and two if they are nearly compatible
566 (which causes a warning to be generated). */
567 int (* comp_type_attributes) (const_tree type1, const_tree type2);
568
569 /* Assign default attributes to the newly defined TYPE. */
570 void (* set_default_type_attributes) (tree type);
571
572 /* Insert attributes on the newly created DECL. */
573 void (* insert_attributes) (tree decl, tree *attributes);
574
575 /* Return true if FNDECL (which has at least one machine attribute)
576 can be inlined despite its machine attributes, false otherwise. */
577 bool (* function_attribute_inlinable_p) (const_tree fndecl);
578
579 /* Return true if bitfields in RECORD_TYPE should follow the
580 Microsoft Visual C++ bitfield layout rules. */
581 bool (* ms_bitfield_layout_p) (const_tree record_type);
582
583 /* True if the target supports decimal floating point. */
584 bool (* decimal_float_supported_p) (void);
585
586 /* True if the target supports fixed-point. */
587 bool (* fixed_point_supported_p) (void);
588
589 /* Return true if anonymous bitfields affect structure alignment. */
590 bool (* align_anon_bitfield) (void);
591
592 /* Return true if volatile bitfields should use the narrowest type possible.
593 Return false if they should use the container type. */
594 bool (* narrow_volatile_bitfield) (void);
595
596 /* Set up target-specific built-in functions. */
597 void (* init_builtins) (void);
598
599 /* Initialize (if INITIALIZE_P is true) and return the target-specific
600 built-in function decl for CODE.
601 Return NULL if that is not possible. Return error_mark_node if CODE
602 is outside of the range of valid target builtin function codes. */
603 tree (* builtin_decl) (unsigned code, bool initialize_p);
604
605 /* Expand a target-specific builtin. */
606 rtx (* expand_builtin) (tree exp, rtx target, rtx subtarget,
607 enum machine_mode mode, int ignore);
608
609 /* Select a replacement for a target-specific builtin. This is done
610 *before* regular type checking, and so allows the target to
611 implement a crude form of function overloading. The result is a
612 complete expression that implements the operation. PARAMS really
613 has type VEC(tree,gc)*, but we don't want to include tree.h
614 here. */
615 tree (*resolve_overloaded_builtin) (unsigned int /*location_t*/,
616 tree decl, void *params);
617
618 /* Fold a target-specific builtin. */
619 tree (* fold_builtin) (tree fndecl, int nargs, tree *argp, bool ignore);
620
621 /* Returns a code for a target-specific builtin that implements
622 reciprocal of the function, or NULL_TREE if not available. */
623 tree (* builtin_reciprocal) (unsigned, bool, bool);
624
625 /* For a vendor-specific TYPE, return a pointer to a statically-allocated
626 string containing the C++ mangling for TYPE. In all other cases, return
627 NULL. */
628 const char * (* mangle_type) (const_tree type);
629
630 /* Make any adjustments to libfunc names needed for this target. */
631 void (* init_libfuncs) (void);
632
633 /* Given a decl, a section name, and whether the decl initializer
634 has relocs, choose attributes for the section. */
635 /* ??? Should be merged with SELECT_SECTION and UNIQUE_SECTION. */
636 unsigned int (* section_type_flags) (tree, const char *, int);
637
638 /* True if new jumps cannot be created, to replace existing ones or
639 not, at the current point in the compilation. */
640 bool (* cannot_modify_jumps_p) (void);
641
642 /* Return a register class for which branch target register
643 optimizations should be applied. */
644 enum reg_class (* branch_target_register_class) (void);
645
646 /* Return true if branch target register optimizations should include
647 callee-saved registers that are not already live during the current
648 function. AFTER_PE_GEN is true if prologues and epilogues have
649 already been generated. */
650 bool (* branch_target_register_callee_saved) (bool after_pe_gen);
651
652 /* Return true if the target supports conditional execution. */
653 bool (* have_conditional_execution) (void);
654
655 /* Return a new value for loop unroll size. */
656 unsigned (* loop_unroll_adjust) (unsigned nunroll, struct loop *loop);
657
658 /* True if the constant X cannot be placed in the constant pool. */
659 bool (* cannot_force_const_mem) (rtx);
660
661 /* True if the insn X cannot be duplicated. */
662 bool (* cannot_copy_insn_p) (rtx);
663
664 /* True if X is considered to be commutative. */
665 bool (* commutative_p) (const_rtx, int);
666
667 /* True if ADDR is an address-expression whose effect depends
668 on the mode of the memory reference it is used in. */
669 bool (* mode_dependent_address_p) (const_rtx addr);
670
671 /* Given an invalid address X for a given machine mode, try machine-specific
672 ways to make it legitimate. Return X or an invalid address on failure. */
673 rtx (* legitimize_address) (rtx, rtx, enum machine_mode);
674
675 /* Given an address RTX, undo the effects of LEGITIMIZE_ADDRESS. */
676 rtx (* delegitimize_address) (rtx);
677
678 /* Given an address RTX, say whether it is valid. */
679 bool (* legitimate_address_p) (enum machine_mode, rtx, bool);
680
681 /* True if the given constant can be put into an object_block. */
682 bool (* use_blocks_for_constant_p) (enum machine_mode, const_rtx);
683
684 /* The minimum and maximum byte offsets for anchored addresses. */
685 HOST_WIDE_INT min_anchor_offset;
686 HOST_WIDE_INT max_anchor_offset;
687
688 /* True if section anchors can be used to access the given symbol. */
689 bool (* use_anchors_for_symbol_p) (const_rtx);
690
691 /* True if it is OK to do sibling call optimization for the specified
692 call expression EXP. DECL will be the called function, or NULL if
693 this is an indirect call. */
694 bool (*function_ok_for_sibcall) (tree decl, tree exp);
695
696 /* Establish appropriate back-end context for processing the function
697 FNDECL. The argument might be NULL to indicate processing at top
698 level, outside of any function scope. */
699 void (*set_current_function) (tree fndecl);
700
701 /* True if EXP should be placed in a "small data" section. */
702 bool (* in_small_data_p) (const_tree);
703
704 /* True if EXP names an object for which name resolution must resolve
705 to the current executable or shared library. */
706 bool (* binds_local_p) (const_tree);
707
708 /* Modify and return the identifier of a DECL's external name,
709 originally identified by ID, as required by the target,
710 (eg, append @nn to windows32 stdcall function names).
711 The default is to return ID without modification. */
712 tree (* mangle_decl_assembler_name) (tree decl, tree id);
713
714 /* Do something target-specific to record properties of the DECL into
715 the associated SYMBOL_REF. */
716 void (* encode_section_info) (tree, rtx, int);
717
718 /* Undo the effects of encode_section_info on the symbol string. */
719 const char * (* strip_name_encoding) (const char *);
720
721 /* If shift optabs for MODE are known to always truncate the shift count,
722 return the mask that they apply. Return 0 otherwise. */
723 unsigned HOST_WIDE_INT (* shift_truncation_mask) (enum machine_mode mode);
724
725 /* Return the number of divisions in the given MODE that should be present,
726 so that it is profitable to turn the division into a multiplication by
727 the reciprocal. */
728 unsigned int (* min_divisions_for_recip_mul) (enum machine_mode mode);
729
730 /* If the representation of integral MODE is such that values are
731 always sign-extended to a wider mode MODE_REP then return
732 SIGN_EXTEND. Return UNKNOWN otherwise. */
733 /* Note that the return type ought to be RTX_CODE, but that's not
734 necessarily defined at this point. */
735 int (* mode_rep_extended) (enum machine_mode mode,
736 enum machine_mode mode_rep);
737
738 /* True if MODE is valid for a pointer in __attribute__((mode("MODE"))). */
739 bool (* valid_pointer_mode) (enum machine_mode mode);
740
741 /* Support for named address spaces. */
742 struct addr_space {
743 /* MODE to use for a pointer into another address space. */
744 enum machine_mode (* pointer_mode) (addr_space_t);
745
746 /* MODE to use for an address in another address space. */
747 enum machine_mode (* address_mode) (addr_space_t);
748
749 /* True if MODE is valid for a pointer in __attribute__((mode("MODE")))
750 in another address space. */
751 bool (* valid_pointer_mode) (enum machine_mode, addr_space_t);
752
753 /* True if an address is a valid memory address to a given named address
754 space for a given mode. */
755 bool (* legitimate_address_p) (enum machine_mode, rtx, bool, addr_space_t);
756
757 /* Return an updated address to convert an invalid pointer to a named
758 address space to a valid one. If NULL_RTX is returned use machine
759 independent methods to make the address valid. */
760 rtx (* legitimize_address) (rtx, rtx, enum machine_mode, addr_space_t);
761
762 /* True if one named address space is a subset of another named address. */
763 bool (* subset_p) (addr_space_t, addr_space_t);
764
765 /* Function to convert an rtl expression from one address space to
766 another. */
767 rtx (* convert) (rtx, tree, tree);
768
769 } addr_space;
770
771 /* True if MODE is valid for the target. By "valid", we mean able to
772 be manipulated in non-trivial ways. In particular, this means all
773 the arithmetic is supported. */
774 bool (* scalar_mode_supported_p) (enum machine_mode mode);
775
776 /* Similarly for vector modes. "Supported" here is less strict. At
777 least some operations are supported; need to check optabs or builtins
778 for further details. */
779 bool (* vector_mode_supported_p) (enum machine_mode mode);
780
781 /* True for MODE if the target expects that registers in this mode will
782 be allocated to registers in a small register class. The compiler is
783 allowed to use registers explicitly used in the rtl as spill registers
784 but it should prevent extending the lifetime of these registers. */
785 bool (* small_register_classes_for_mode_p) (enum machine_mode mode);
786
787 /* Compute a (partial) cost for rtx X. Return true if the complete
788 cost has been computed, and false if subexpressions should be
789 scanned. In either case, *TOTAL contains the cost result. */
790 /* Note that CODE and OUTER_CODE ought to be RTX_CODE, but that's
791 not necessarily defined at this point. */
792 bool (* rtx_costs) (rtx x, int code, int outer_code, int *total, bool speed);
793
794 /* Compute the cost of X, used as an address. Never called with
795 invalid addresses. */
796 int (* address_cost) (rtx x, bool speed);
797
798 /* Return where to allocate pseudo for a given hard register initial
799 value. */
800 rtx (* allocate_initial_value) (rtx x);
801
802 /* Return nonzero if evaluating UNSPEC[_VOLATILE] X might cause a trap.
803 FLAGS has the same meaning as in rtlanal.c: may_trap_p_1. */
804 int (* unspec_may_trap_p) (const_rtx x, unsigned flags);
805
806 /* Given a register, this hook should return a parallel of registers
807 to represent where to find the register pieces. Define this hook
808 if the register and its mode are represented in Dwarf in
809 non-contiguous locations, or if the register should be
810 represented in more than one register in Dwarf. Otherwise, this
811 hook should return NULL_RTX. */
812 rtx (* dwarf_register_span) (rtx);
813
814 /* If expand_builtin_init_dwarf_reg_sizes needs to fill in table
815 entries not corresponding directly to registers below
816 FIRST_PSEUDO_REGISTER, this hook should generate the necessary
817 code, given the address of the table. */
818 void (* init_dwarf_reg_sizes_extra) (tree);
819
820 /* Fetch the fixed register(s) which hold condition codes, for
821 targets where it makes sense to look for duplicate assignments to
822 the condition codes. This should return true if there is such a
823 register, false otherwise. The arguments should be set to the
824 fixed register numbers. Up to two condition code registers are
825 supported. If there is only one for this target, the int pointed
826 at by the second argument should be set to -1. */
827 bool (* fixed_condition_code_regs) (unsigned int *, unsigned int *);
828
829 /* If two condition code modes are compatible, return a condition
830 code mode which is compatible with both, such that a comparison
831 done in the returned mode will work for both of the original
832 modes. If the condition code modes are not compatible, return
833 VOIDmode. */
834 enum machine_mode (* cc_modes_compatible) (enum machine_mode,
835 enum machine_mode);
836
837 /* Do machine-dependent code transformations. Called just before
838 delayed-branch scheduling. */
839 void (* machine_dependent_reorg) (void);
840
841 /* Create the __builtin_va_list type. */
842 tree (* build_builtin_va_list) (void);
843
844 /* Get the cfun/fndecl calling abi __builtin_va_list type. */
845 tree (* fn_abi_va_list) (tree);
846
847 /* Get the __builtin_va_list type dependent on input type. */
848 tree (* canonical_va_list_type) (tree);
849
850 /* Expand the __builtin_va_start builtin. */
851 void (* expand_builtin_va_start) (tree valist, rtx nextarg);
852
853 /* Gimplifies a VA_ARG_EXPR. */
854 tree (* gimplify_va_arg_expr) (tree valist, tree type, gimple_seq *pre_p,
855 gimple_seq *post_p);
856
857 /* Validity-checking routines for PCH files, target-specific.
858 get_pch_validity returns a pointer to the data to be stored,
859 and stores the size in its argument. pch_valid_p gets the same
860 information back and returns NULL if the PCH is valid,
861 or an error message if not.
862 */
863 void * (* get_pch_validity) (size_t *);
864 const char * (* pch_valid_p) (const void *, size_t);
865
866 /* If nonnull, this function checks whether a PCH file with the
867 given set of target flags can be used. It returns NULL if so,
868 otherwise it returns an error message. */
869 const char *(*check_pch_target_flags) (int);
870
871 /* True if the compiler should give an enum type only as many
872 bytes as it takes to represent the range of possible values of
873 that type. */
874 bool (* default_short_enums) (void);
875
876 /* This target hook returns an rtx that is used to store the address
877 of the current frame into the built-in setjmp buffer. */
878 rtx (* builtin_setjmp_frame_value) (void);
879
880 /* This target hook should add STRING_CST trees for any hard regs
881 the port wishes to automatically clobber for an asm. */
882 tree (* md_asm_clobbers) (tree, tree, tree);
883
884 /* This target hook allows the backend to specify a calling convention
885 in the debug information. This function actually returns an
886 enum dwarf_calling_convention, but because of forward declarations
887 and not wanting to include dwarf2.h everywhere target.h is included
888 the function is being declared as an int. */
889 int (* dwarf_calling_convention) (const_tree);
890
891 /* This target hook allows the backend to emit frame-related insns that
892 contain UNSPECs or UNSPEC_VOLATILEs. The call frame debugging info
893 engine will invoke it on insns of the form
894 (set (reg) (unspec [...] UNSPEC_INDEX))
895 and
896 (set (reg) (unspec_volatile [...] UNSPECV_INDEX))
897 to let the backend emit the call frame instructions. */
898 void (* dwarf_handle_frame_unspec) (const char *, rtx, int);
899
900 /* Perform architecture specific checking of statements gimplified
901 from VA_ARG_EXPR. STMT is the statement. Returns true if the statement
902 doesn't need to be checked for va_list references. */
903 bool (* stdarg_optimize_hook) (struct stdarg_info *ai, const_gimple stmt);
904
905 /* This target hook allows the operating system to override the DECL
906 that represents the external variable that contains the stack
907 protection guard variable. The type of this DECL is ptr_type_node. */
908 tree (* stack_protect_guard) (void);
909
910 /* This target hook allows the operating system to override the CALL_EXPR
911 that is invoked when a check vs the guard variable fails. */
912 tree (* stack_protect_fail) (void);
913
914 /* Returns NULL if target supports the insn within a doloop block,
915 otherwise it returns an error message. */
916 const char * (*invalid_within_doloop) (const_rtx);
917
918 /* DECL is a variable or function with __attribute__((dllimport))
919 specified. Use this hook if the target needs to add extra validation
920 checks to handle_dll_attribute (). */
921 bool (* valid_dllimport_attribute_p) (const_tree decl);
922
923 /* If non-zero, align constant anchors in CSE to a multiple of this
924 value. */
925 unsigned HOST_WIDE_INT const_anchor;
926
927 /* Functions relating to calls - argument passing, returns, etc. */
928 struct calls {
929 enum machine_mode (*promote_function_mode) (const_tree type,
930 enum machine_mode mode,
931 int *punsignedp,
932 const_tree fntype,
933 int for_return);
934 bool (*promote_prototypes) (const_tree fntype);
935 rtx (*struct_value_rtx) (tree fndecl, int incoming);
936 bool (*return_in_memory) (const_tree type, const_tree fndecl);
937 bool (*return_in_msb) (const_tree type);
938
939 /* Return true if a parameter must be passed by reference. TYPE may
940 be null if this is a libcall. CA may be null if this query is
941 from __builtin_va_arg. */
942 bool (*pass_by_reference) (CUMULATIVE_ARGS *ca, enum machine_mode mode,
943 const_tree type, bool named_arg);
944
945 rtx (*expand_builtin_saveregs) (void);
946 /* Returns pretend_argument_size. */
947 void (*setup_incoming_varargs) (CUMULATIVE_ARGS *ca, enum machine_mode mode,
948 tree type, int *pretend_arg_size,
949 int second_time);
950 bool (*strict_argument_naming) (CUMULATIVE_ARGS *ca);
951 /* Returns true if we should use
952 targetm.calls.setup_incoming_varargs() and/or
953 targetm.calls.strict_argument_naming(). */
954 bool (*pretend_outgoing_varargs_named) (CUMULATIVE_ARGS *ca);
955
956 /* Given a complex type T, return true if a parameter of type T
957 should be passed as two scalars. */
958 bool (* split_complex_arg) (const_tree type);
959
960 /* Return true if type T, mode MODE, may not be passed in registers,
961 but must be passed on the stack. */
962 /* ??? This predicate should be applied strictly after pass-by-reference.
963 Need audit to verify that this is the case. */
964 bool (* must_pass_in_stack) (enum machine_mode mode, const_tree t);
965
966 /* Return true if type TYPE, mode MODE, which is passed by reference,
967 should have the object copy generated by the callee rather than
968 the caller. It is never called for TYPE requiring constructors. */
969 bool (* callee_copies) (CUMULATIVE_ARGS *ca, enum machine_mode mode,
970 const_tree type, bool named);
971
972 /* Return zero for arguments passed entirely on the stack or entirely
973 in registers. If passed in both, return the number of bytes passed
974 in registers; the balance is therefore passed on the stack. */
975 int (* arg_partial_bytes) (CUMULATIVE_ARGS *ca, enum machine_mode mode,
976 tree type, bool named);
977
978 /* Return the diagnostic message string if function without a prototype
979 is not allowed for this 'val' argument; NULL otherwise. */
980 const char *(*invalid_arg_for_unprototyped_fn) (const_tree typelist,
981 const_tree funcdecl,
982 const_tree val);
983
984 /* Return an rtx for the return value location of the function
985 specified by FN_DECL_OR_TYPE with a return type of RET_TYPE. */
986 rtx (*function_value) (const_tree ret_type, const_tree fn_decl_or_type,
987 bool outgoing);
988
989 /* Return the rtx for the result of a libcall of mode MODE,
990 calling the function FN_NAME. */
991 rtx (*libcall_value) (enum machine_mode, const_rtx);
992
993 /* Return true if REGNO is a possible register number for
994 a function value as seen by the caller. */
995 bool (*function_value_regno_p) (const unsigned int);
996
997 /* Return an rtx for the argument pointer incoming to the
998 current function. */
999 rtx (*internal_arg_pointer) (void);
1000
1001 /* Update the current function stack boundary if needed. */
1002 void (*update_stack_boundary) (void);
1003
1004 /* Handle stack alignment and return an rtx for Dynamic Realign
1005 Argument Pointer if necessary. */
1006 rtx (*get_drap_rtx) (void);
1007
1008 /* Return true if all function parameters should be spilled to the
1009 stack. */
1010 bool (*allocate_stack_slots_for_args) (void);
1011
1012 /* Return an rtx for the static chain for FNDECL. If INCOMING_P is true,
1013 then it should be for the callee; otherwise for the caller. */
1014 rtx (*static_chain) (const_tree fndecl, bool incoming_p);
1015
1016 /* Fill in the trampoline at MEM with a call to FNDECL and a
1017 static chain value of CHAIN. */
1018 void (*trampoline_init) (rtx mem, tree fndecl, rtx chain);
1019
1020 /* Adjust the address of the trampoline in a target-specific way. */
1021 rtx (*trampoline_adjust_address) (rtx addr);
1022 } calls;
1023
1024 /* Return the diagnostic message string if conversion from FROMTYPE
1025 to TOTYPE is not allowed, NULL otherwise. */
1026 const char *(*invalid_conversion) (const_tree fromtype, const_tree totype);
1027
1028 /* Return the diagnostic message string if the unary operation OP is
1029 not permitted on TYPE, NULL otherwise. */
1030 const char *(*invalid_unary_op) (int op, const_tree type);
1031
1032 /* Return the diagnostic message string if the binary operation OP
1033 is not permitted on TYPE1 and TYPE2, NULL otherwise. */
1034 const char *(*invalid_binary_op) (int op, const_tree type1, const_tree type2);
1035
1036 /* Return the diagnostic message string if TYPE is not valid as a
1037 function parameter type, NULL otherwise. */
1038 const char *(*invalid_parameter_type) (const_tree type);
1039
1040 /* Return the diagnostic message string if TYPE is not valid as a
1041 function return type, NULL otherwise. */
1042 const char *(*invalid_return_type) (const_tree type);
1043
1044 /* If values of TYPE are promoted to some other type when used in
1045 expressions (analogous to the integer promotions), return that type,
1046 or NULL_TREE otherwise. */
1047 tree (*promoted_type) (const_tree type);
1048
1049 /* Convert EXPR to TYPE, if target-specific types with special conversion
1050 rules are involved. Return the converted expression, or NULL to apply
1051 the standard conversion rules. */
1052 tree (*convert_to_type) (tree type, tree expr);
1053
1054 /* Return the array of IRA cover classes for the current target. */
1055 const enum reg_class *(*ira_cover_classes) (void);
1056
1057 /* Return the class for a secondary reload, and fill in extra information. */
1058 enum reg_class (*secondary_reload) (bool, rtx, enum reg_class,
1059 enum machine_mode,
1060 secondary_reload_info *);
1061
1062 /* This target hook allows the backend to perform additional
1063 processing while initializing for variable expansion. */
1064 void (* expand_to_rtl_hook) (void);
1065
1066 /* This target hook allows the backend to perform additional
1067 instantiations on rtx that are not actually in insns yet,
1068 but will be later. */
1069 void (* instantiate_decls) (void);
1070
1071 /* Return true if is OK to use a hard register REGNO as scratch register
1072 in peephole2. */
1073 bool (* hard_regno_scratch_ok) (unsigned int regno);
1074
1075 /* Return the smallest number of different values for which it is best to
1076 use a jump-table instead of a tree of conditional branches. */
1077 unsigned int (* case_values_threshold) (void);
1078
1079 /* Retutn true if a function must have and use a frame pointer. */
1080 bool (* frame_pointer_required) (void);
1081
1082 /* Returns true if the compiler is allowed to try to replace register number
1083 from-reg with register number to-reg. */
1084 bool (* can_eliminate) (const int, const int);
1085
1086 /* Functions specific to the C family of frontends. */
1087 struct c {
1088 /* Return machine mode for non-standard suffix
1089 or VOIDmode if non-standard suffixes are unsupported. */
1090 enum machine_mode (*mode_for_suffix) (char);
1091 } c;
1092
1093 /* Functions specific to the C++ frontend. */
1094 struct cxx {
1095 /* Return the integer type used for guard variables. */
1096 tree (*guard_type) (void);
1097 /* Return true if only the low bit of the guard should be tested. */
1098 bool (*guard_mask_bit) (void);
1099 /* Returns the size of the array cookie for an array of type. */
1100 tree (*get_cookie_size) (tree);
1101 /* Returns true if the element size should be stored in the
1102 array cookie. */
1103 bool (*cookie_has_size) (void);
1104 /* Allows backends to perform additional processing when
1105 deciding if a class should be exported or imported. */
1106 int (*import_export_class) (tree, int);
1107 /* Returns true if constructors and destructors return "this". */
1108 bool (*cdtor_returns_this) (void);
1109 /* Returns true if the key method for a class can be an inline
1110 function, so long as it is not declared inline in the class
1111 itself. Returning true is the behavior required by the Itanium
1112 C++ ABI. */
1113 bool (*key_method_may_be_inline) (void);
1114 /* DECL is a virtual table, virtual table table, typeinfo object,
1115 or other similar implicit class data object that will be
1116 emitted with external linkage in this translation unit. No ELF
1117 visibility has been explicitly specified. If the target needs
1118 to specify a visibility other than that of the containing class,
1119 use this hook to set DECL_VISIBILITY and
1120 DECL_VISIBILITY_SPECIFIED. */
1121 void (*determine_class_data_visibility) (tree decl);
1122 /* Returns true (the default) if virtual tables and other
1123 similar implicit class data objects are always COMDAT if they
1124 have external linkage. If this hook returns false, then
1125 class data for classes whose virtual table will be emitted in
1126 only one translation unit will not be COMDAT. */
1127 bool (*class_data_always_comdat) (void);
1128 /* Returns true (the default) if the RTTI for the basic types,
1129 which is always defined in the C++ runtime, should be COMDAT;
1130 false if it should not be COMDAT. */
1131 bool (*library_rtti_comdat) (void);
1132 /* Returns true if __aeabi_atexit should be used to register static
1133 destructors. */
1134 bool (*use_aeabi_atexit) (void);
1135 /* Returns true if target may use atexit in the same manner as
1136 __cxa_atexit to register static destructors. */
1137 bool (*use_atexit_for_cxa_atexit) (void);
1138 /* TYPE is a C++ class (i.e., RECORD_TYPE or UNION_TYPE) that
1139 has just been defined. Use this hook to make adjustments to the
1140 class (eg, tweak visibility or perform any other required
1141 target modifications). */
1142 void (*adjust_class_at_definition) (tree type);
1143 } cxx;
1144
1145 /* Functions and data for emulated TLS support. */
1146 struct emutls {
1147 /* Name of the address and common functions. */
1148 const char *get_address;
1149 const char *register_common;
1150
1151 /* Prefixes for proxy variable and template. */
1152 const char *var_section;
1153 const char *tmpl_section;
1154
1155 /* Prefixes for proxy variable and template. */
1156 const char *var_prefix;
1157 const char *tmpl_prefix;
1158
1159 /* Function to generate field definitions of the proxy variable. */
1160 tree (*var_fields) (tree, tree *);
1161
1162 /* Function to initialize a proxy variable. */
1163 tree (*var_init) (tree, tree, tree);
1164
1165 /* Whether we are allowed to alter the usual alignment of the
1166 proxy variable. */
1167 bool var_align_fixed;
1168
1169 /* Whether we can emit debug information for TLS vars. */
1170 bool debug_form_tls_address;
1171 } emutls;
1172
1173 struct target_option_hooks {
1174 /* Function to validate the attribute((option(...))) strings or NULL. If
1175 the option is validated, it is assumed that DECL_FUNCTION_SPECIFIC will
1176 be filled in in the function decl node. */
1177 bool (*valid_attribute_p) (tree, tree, tree, int);
1178
1179 /* Function to save any extra target state in the target options
1180 structure. */
1181 void (*save) (struct cl_target_option *);
1182
1183 /* Function to restore any extra target state from the target options
1184 structure. */
1185 void (*restore) (struct cl_target_option *);
1186
1187 /* Function to print any extra target state from the target options
1188 structure. */
1189 void (*print) (FILE *, int, struct cl_target_option *);
1190
1191 /* Function to parse arguments to be validated for #pragma option, and to
1192 change the state if the options are valid. If the first argument is
1193 NULL, the second argument specifies the default options to use. Return
1194 true if the options are valid, and set the current state. */
1195 bool (*pragma_parse) (tree, tree);
1196
1197 /* Function to determine if one function can inline another function. */
1198 bool (*can_inline_p) (tree, tree);
1199 } target_option;
1200
1201 /* For targets that need to mark extra registers as live on entry to
1202 the function, they should define this target hook and set their
1203 bits in the bitmap passed in. */
1204 void (*live_on_entry) (bitmap);
1205
1206 /* True if unwinding tables should be generated by default. */
1207 bool unwind_tables_default;
1208
1209 /* Leave the boolean fields at the end. */
1210
1211 /* True if arbitrary sections are supported. */
1212 bool have_named_sections;
1213
1214 /* True if we can create zeroed data by switching to a BSS section
1215 and then using ASM_OUTPUT_SKIP to allocate the space. */
1216 bool have_switchable_bss_sections;
1217
1218 /* True if "native" constructors and destructors are supported,
1219 false if we're using collect2 for the job. */
1220 bool have_ctors_dtors;
1221
1222 /* True if thread-local storage is supported. */
1223 bool have_tls;
1224
1225 /* True if a small readonly data section is supported. */
1226 bool have_srodata_section;
1227
1228 /* True if EH frame info sections should be zero-terminated. */
1229 bool terminate_dw2_eh_frame_info;
1230
1231 /* True if #NO_APP should be emitted at the beginning of
1232 assembly output. */
1233 bool file_start_app_off;
1234
1235 /* True if output_file_directive should be called for main_input_filename
1236 at the beginning of assembly output. */
1237 bool file_start_file_directive;
1238
1239 /* True if #pragma extern_prefix is to be supported. */
1240 bool handle_pragma_extern_prefix;
1241
1242 /* True if the target is allowed to reorder memory accesses unless
1243 synchronization is explicitly requested. */
1244 bool relaxed_ordering;
1245
1246 /* Returns true if we should generate exception tables for use with the
1247 ARM EABI. The effects the encoding of function exception specifications.
1248 */
1249 bool arm_eabi_unwinder;
1250
1251 /* Leave the boolean fields at the end. */
1252 }; 132 };
1253 133
134 /* Sets of optimization levels at which an option may be enabled by
135 default_options_optimization. */
136 enum opt_levels
137 {
138 OPT_LEVELS_NONE, /* No levels (mark end of array). */
139 OPT_LEVELS_ALL, /* All levels (used by targets to disable options
140 enabled in target-independent code). */
141 OPT_LEVELS_0_ONLY, /* -O0 only. */
142 OPT_LEVELS_1_PLUS, /* -O1 and above, including -Os. */
143 OPT_LEVELS_1_PLUS_SPEED_ONLY, /* -O1 and above, but not -Os. */
144 OPT_LEVELS_2_PLUS, /* -O2 and above, including -Os. */
145 OPT_LEVELS_2_PLUS_SPEED_ONLY, /* -O2 and above, but not -Os. */
146 OPT_LEVELS_3_PLUS, /* -O3 and above. */
147 OPT_LEVELS_3_PLUS_AND_SIZE, /* -O3 and above and -Os. */
148 OPT_LEVELS_SIZE, /* -Os only. */
149 OPT_LEVELS_FAST /* -Ofast only. */
150 };
151
152 /* Description of options to enable by default at given levels. */
153 struct default_options
154 {
155 /* The levels at which to enable the option. */
156 enum opt_levels levels;
157
158 /* The option index and argument or enabled/disabled sense of the
159 option, as passed to handle_generated_option. If ARG is NULL and
160 the option allows a negative form, the option is considered to be
161 passed in negative form when the optimization level is not one of
162 those in LEVELS (in order to handle changes to the optimization
163 level with the "optimize" attribute). */
164 size_t opt_index;
165 const char *arg;
166 int value;
167 };
168
169 /* The target structure. This holds all the backend hooks. */
170 #define DEFHOOKPOD(NAME, DOC, TYPE, INIT) TYPE NAME;
171 #define DEFHOOK(NAME, DOC, TYPE, PARAMS, INIT) TYPE (* NAME) PARAMS;
172 #define DEFHOOK_UNDOC DEFHOOK
173 #define HOOKSTRUCT(FRAGMENT) FRAGMENT
174
175 #include "target.def"
176
1254 extern struct gcc_target targetm; 177 extern struct gcc_target targetm;
1255
1256 struct gcc_targetcm {
1257 /* Handle target switch CODE (an OPT_* value). ARG is the argument
1258 passed to the switch; it is NULL if no argument was. VALUE is the
1259 value of ARG if CODE specifies a UInteger option, otherwise it is
1260 1 if the positive form of the switch was used and 0 if the negative
1261 form was. Return true if the switch was valid. */
1262 bool (*handle_c_option) (size_t code, const char *arg, int value);
1263 };
1264 178
1265 /* Each target can provide their own. */ 179 /* Each target can provide their own. */
1266 extern struct gcc_targetcm targetcm; 180 extern struct gcc_targetcm targetcm;
1267 181
1268 #endif /* GCC_TARGET_H */ 182 #endif /* GCC_TARGET_H */