2005-03-29 Paul Brook <paul@codesourcery.com>
[official-gcc.git] / gcc / target.h
blobbb914da70372cad6e2b211f12ce4b1b6916bc463
1 /* Data structure definitions for a generic GCC target.
2 Copyright (C) 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 This program is free software; you can redistribute it and/or modify it
5 under the terms of the GNU General Public License as published by the
6 Free Software Foundation; either version 2, or (at your option) any
7 later version.
9 This program is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU General Public License for more details.
14 You should have received a copy of the GNU General Public License
15 along with this program; if not, write to the Free Software
16 Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
18 In other words, you are welcome to use, share and improve this program.
19 You are forbidden to forbid anyone else to use, share and improve
20 what you give them. Help stamp out software-hoarding! */
22 /* This file contains a data structure that describes a GCC target.
23 At present it is incomplete, but in future it should grow to
24 contain most or all target machine and target O/S specific
25 information.
27 This structure has its initializer declared in target-def.h in the
28 form of large macro TARGET_INITIALIZER that expands to many smaller
29 macros.
31 The smaller macros each initialize one component of the structure,
32 and each has a default. Each target should have a file that
33 includes target.h and target-def.h, and overrides any inappropriate
34 defaults by undefining the relevant macro and defining a suitable
35 replacement. That file should then contain the definition of
36 "targetm" like so:
38 struct gcc_target targetm = TARGET_INITIALIZER;
40 Doing things this way allows us to bring together everything that
41 defines a GCC target. By supplying a default that is appropriate
42 to most targets, we can easily add new items without needing to
43 edit dozens of target configuration files. It should also allow us
44 to gradually reduce the amount of conditional compilation that is
45 scattered throughout GCC. */
47 #ifndef GCC_TARGET_H
48 #define GCC_TARGET_H
50 #include "tm.h"
51 #include "insn-modes.h"
53 struct gcc_target
55 /* Functions that output assembler for the target. */
56 struct asm_out
58 /* Opening and closing parentheses for asm expression grouping. */
59 const char *open_paren, *close_paren;
61 /* Assembler instructions for creating various kinds of integer object. */
62 const char *byte_op;
63 struct asm_int_op
65 const char *hi;
66 const char *si;
67 const char *di;
68 const char *ti;
69 } aligned_op, unaligned_op;
71 /* Try to output the assembler code for an integer object whose
72 value is given by X. SIZE is the size of the object in bytes and
73 ALIGNED_P indicates whether it is aligned. Return true if
74 successful. Only handles cases for which BYTE_OP, ALIGNED_OP
75 and UNALIGNED_OP are NULL. */
76 bool (* integer) (rtx x, unsigned int size, int aligned_p);
78 /* Output code that will globalize a label. */
79 void (* globalize_label) (FILE *, const char *);
81 /* Output code that will emit a label for unwind info, if this
82 target requires such labels. Second argument is the decl the
83 unwind info is associated with, third is a boolean: true if
84 this is for exception handling, fourth is a boolean: true if
85 this is only a placeholder for an omitted FDE. */
86 void (* unwind_label) (FILE *, tree, int, int);
88 /* Emit any directives required to unwind this instruction. */
89 void (* unwind_emit) (FILE *, rtx);
91 /* Output an internal label. */
92 void (* internal_label) (FILE *, const char *, unsigned long);
94 /* Emit an assembler directive to set visibility for the symbol
95 associated with the tree decl. */
96 void (* visibility) (tree, int);
98 /* Output the assembler code for entry to a function. */
99 void (* function_prologue) (FILE *, HOST_WIDE_INT);
101 /* Output the assembler code for end of prologue. */
102 void (* function_end_prologue) (FILE *);
104 /* Output the assembler code for start of epilogue. */
105 void (* function_begin_epilogue) (FILE *);
107 /* Output the assembler code for function exit. */
108 void (* function_epilogue) (FILE *, HOST_WIDE_INT);
110 /* Tell assembler to change to section NAME with attributes FLAGS.
111 If DECL is non-NULL, it is the VAR_DECL or FUNCTION_DECL with
112 which this section is associated. */
113 void (* named_section) (const char *name, unsigned int flags, tree decl);
115 /* Switch to the section that holds the exception table. */
116 void (* exception_section) (void);
118 /* Switch to the section that holds the exception frames. */
119 void (* eh_frame_section) (void);
121 /* Select and switch to a section for EXP. It may be a DECL or a
122 constant. RELOC is nonzero if runtime relocations must be applied;
123 bit 1 will be set if the runtime relocations require non-local
124 name resolution. ALIGN is the required alignment of the data. */
125 void (* select_section) (tree, int, unsigned HOST_WIDE_INT);
127 /* Select and switch to a section for X with MODE. ALIGN is
128 the desired alignment of the data. */
129 void (* select_rtx_section) (enum machine_mode, rtx,
130 unsigned HOST_WIDE_INT);
132 /* Select a unique section name for DECL. RELOC is the same as
133 for SELECT_SECTION. */
134 void (* unique_section) (tree, int);
136 /* Tell assembler to switch to the readonly data section associated
137 with function DECL. */
138 void (* function_rodata_section) (tree);
140 /* Output a constructor for a symbol with a given priority. */
141 void (* constructor) (rtx, int);
143 /* Output a destructor for a symbol with a given priority. */
144 void (* destructor) (rtx, int);
146 /* Output the assembler code for a thunk function. THUNK_DECL is the
147 declaration for the thunk function itself, FUNCTION is the decl for
148 the target function. DELTA is an immediate constant offset to be
149 added to THIS. If VCALL_OFFSET is nonzero, the word at
150 *(*this + vcall_offset) should be added to THIS. */
151 void (* output_mi_thunk) (FILE *file, tree thunk_decl,
152 HOST_WIDE_INT delta, HOST_WIDE_INT vcall_offset,
153 tree function_decl);
155 /* Determine whether output_mi_thunk would succeed. */
156 /* ??? Ideally, this hook would not exist, and success or failure
157 would be returned from output_mi_thunk directly. But there's
158 too much undo-able setup involved in invoking output_mi_thunk.
159 Could be fixed by making output_mi_thunk emit rtl instead of
160 text to the output file. */
161 bool (* can_output_mi_thunk) (tree thunk_decl, HOST_WIDE_INT delta,
162 HOST_WIDE_INT vcall_offset,
163 tree function_decl);
165 /* Output any boilerplate text needed at the beginning of a
166 translation unit. */
167 void (*file_start) (void);
169 /* Output any boilerplate text needed at the end of a
170 translation unit. */
171 void (*file_end) (void);
173 /* Output an assembler pseudo-op to declare a library function name
174 external. */
175 void (*external_libcall) (rtx);
177 /* Output an assembler directive to mark decl live. This instructs
178 linker to not dead code strip this symbol. */
179 void (*mark_decl_preserved) (const char *);
181 } asm_out;
183 /* Functions relating to instruction scheduling. */
184 struct sched
186 /* Given the current cost, COST, of an insn, INSN, calculate and
187 return a new cost based on its relationship to DEP_INSN through
188 the dependence LINK. The default is to make no adjustment. */
189 int (* adjust_cost) (rtx insn, rtx link, rtx def_insn, int cost);
191 /* Adjust the priority of an insn as you see fit. Returns the new
192 priority. */
193 int (* adjust_priority) (rtx, int);
195 /* Function which returns the maximum number of insns that can be
196 scheduled in the same machine cycle. This must be constant
197 over an entire compilation. The default is 1. */
198 int (* issue_rate) (void);
200 /* Calculate how much this insn affects how many more insns we
201 can emit this cycle. Default is they all cost the same. */
202 int (* variable_issue) (FILE *, int, rtx, int);
204 /* Initialize machine-dependent scheduling code. */
205 void (* md_init) (FILE *, int, int);
207 /* Finalize machine-dependent scheduling code. */
208 void (* md_finish) (FILE *, int);
210 /* Initialize machine-dependent function while scheduling code. */
211 void (* md_init_global) (FILE *, int, int);
213 /* Finalize machine-dependent function wide scheduling code. */
214 void (* md_finish_global) (FILE *, int);
216 /* Reorder insns in a machine-dependent fashion, in two different
217 places. Default does nothing. */
218 int (* reorder) (FILE *, int, rtx *, int *, int);
219 int (* reorder2) (FILE *, int, rtx *, int *, int);
221 /* The following member value is a pointer to a function called
222 after evaluation forward dependencies of insns in chain given
223 by two parameter values (head and tail correspondingly). */
224 void (* dependencies_evaluation_hook) (rtx, rtx);
226 /* The values of the following four members are pointers to
227 functions used to simplify the automaton descriptions.
228 dfa_pre_cycle_insn and dfa_post_cycle_insn give functions
229 returning insns which are used to change the pipeline hazard
230 recognizer state when the new simulated processor cycle
231 correspondingly starts and finishes. The function defined by
232 init_dfa_pre_cycle_insn and init_dfa_post_cycle_insn are used
233 to initialize the corresponding insns. The default values of
234 the members result in not changing the automaton state when
235 the new simulated processor cycle correspondingly starts and
236 finishes. */
237 void (* init_dfa_pre_cycle_insn) (void);
238 rtx (* dfa_pre_cycle_insn) (void);
239 void (* init_dfa_post_cycle_insn) (void);
240 rtx (* dfa_post_cycle_insn) (void);
242 /* The following member value is a pointer to a function returning value
243 which defines how many insns in queue `ready' will we try for
244 multi-pass scheduling. If the member value is nonzero and the
245 function returns positive value, the DFA based scheduler will make
246 multi-pass scheduling for the first cycle. In other words, we will
247 try to choose ready insn which permits to start maximum number of
248 insns on the same cycle. */
249 int (* first_cycle_multipass_dfa_lookahead) (void);
251 /* The following member value is pointer to a function controlling
252 what insns from the ready insn queue will be considered for the
253 multipass insn scheduling. If the hook returns zero for insn
254 passed as the parameter, the insn will be not chosen to be
255 issued. */
256 int (* first_cycle_multipass_dfa_lookahead_guard) (rtx);
258 /* The following member value is pointer to a function called by
259 the insn scheduler before issuing insn passed as the third
260 parameter on given cycle. If the hook returns nonzero, the
261 insn is not issued on given processors cycle. Instead of that,
262 the processor cycle is advanced. If the value passed through
263 the last parameter is zero, the insn ready queue is not sorted
264 on the new cycle start as usually. The first parameter passes
265 file for debugging output. The second one passes the scheduler
266 verbose level of the debugging output. The forth and the fifth
267 parameter values are correspondingly processor cycle on which
268 the previous insn has been issued and the current processor
269 cycle. */
270 int (* dfa_new_cycle) (FILE *, int, rtx, int, int, int *);
272 /* The following member value is a pointer to a function called
273 by the insn scheduler. It should return true if there exists a
274 dependence which is considered costly by the target, between
275 the insn passed as the first parameter, and the insn passed as
276 the second parameter. The third parameter is the INSN_DEPEND
277 link that represents the dependence between the two insns. The
278 fourth argument is the cost of the dependence as estimated by
279 the scheduler. The last argument is the distance in cycles
280 between the already scheduled insn (first parameter) and the
281 the second insn (second parameter). */
282 bool (* is_costly_dependence) (rtx, rtx, rtx, int, int);
283 } sched;
285 /* Functions relating to vectorization. */
286 struct vectorize
288 /* The following member value is a pointer to a function called
289 by the vectorizer, and return the decl of the target builtin
290 function. */
291 tree (* builtin_mask_for_load) (void);
292 } vectorize;
294 /* The initial value of target_flags. */
295 int default_target_flags;
297 /* Handle target switch CODE (an OPT_* value). ARG is the argument
298 passed to the switch; it is NULL if no argument was. VALUE is the
299 value of ARG if CODE specifies a UInteger option, otherwise it is
300 1 if the positive form of the switch was used and 0 if the negative
301 form was. Return true if the switch was valid. */
302 bool (* handle_option) (size_t code, const char *arg, int value);
304 /* Return machine mode for filter value. */
305 enum machine_mode (* eh_return_filter_mode) (void);
307 /* Given two decls, merge their attributes and return the result. */
308 tree (* merge_decl_attributes) (tree, tree);
310 /* Given two types, merge their attributes and return the result. */
311 tree (* merge_type_attributes) (tree, tree);
313 /* Table of machine attributes and functions to handle them.
314 Ignored if NULL. */
315 const struct attribute_spec *attribute_table;
317 /* Return zero if the attributes on TYPE1 and TYPE2 are incompatible,
318 one if they are compatible and two if they are nearly compatible
319 (which causes a warning to be generated). */
320 int (* comp_type_attributes) (tree type1, tree type2);
322 /* Assign default attributes to the newly defined TYPE. */
323 void (* set_default_type_attributes) (tree type);
325 /* Insert attributes on the newly created DECL. */
326 void (* insert_attributes) (tree decl, tree *attributes);
328 /* Return true if FNDECL (which has at least one machine attribute)
329 can be inlined despite its machine attributes, false otherwise. */
330 bool (* function_attribute_inlinable_p) (tree fndecl);
332 /* Return true if bitfields in RECORD_TYPE should follow the
333 Microsoft Visual C++ bitfield layout rules. */
334 bool (* ms_bitfield_layout_p) (tree record_type);
336 /* Return true if anonymous bitfields affect structure alignment. */
337 bool (* align_anon_bitfield) (void);
339 /* Set up target-specific built-in functions. */
340 void (* init_builtins) (void);
342 /* Expand a target-specific builtin. */
343 rtx (* expand_builtin) (tree exp, rtx target, rtx subtarget,
344 enum machine_mode mode, int ignore);
346 /* Fold a target-specific builtin. */
347 tree (* fold_builtin) (tree fndecl, tree arglist, bool ignore);
349 /* For a vendor-specific fundamental TYPE, return a pointer to
350 a statically-allocated string containing the C++ mangling for
351 TYPE. In all other cases, return NULL. */
352 const char * (* mangle_fundamental_type) (tree type);
354 /* Make any adjustments to libfunc names needed for this target. */
355 void (* init_libfuncs) (void);
357 /* Given a decl, a section name, and whether the decl initializer
358 has relocs, choose attributes for the section. */
359 /* ??? Should be merged with SELECT_SECTION and UNIQUE_SECTION. */
360 unsigned int (* section_type_flags) (tree, const char *, int);
362 /* True if new jumps cannot be created, to replace existing ones or
363 not, at the current point in the compilation. */
364 bool (* cannot_modify_jumps_p) (void);
366 /* Return a register class for which branch target register
367 optimizations should be applied. */
368 int (* branch_target_register_class) (void);
370 /* Return true if branch target register optimizations should include
371 callee-saved registers that are not already live during the current
372 function. AFTER_PE_GEN is true if prologues and epilogues have
373 already been generated. */
374 bool (* branch_target_register_callee_saved) (bool after_pe_gen);
376 /* True if the constant X cannot be placed in the constant pool. */
377 bool (* cannot_force_const_mem) (rtx);
379 /* True if the insn X cannot be duplicated. */
380 bool (* cannot_copy_insn_p) (rtx);
382 /* Given an address RTX, undo the effects of LEGITIMIZE_ADDRESS. */
383 rtx (* delegitimize_address) (rtx);
385 /* True if it is OK to do sibling call optimization for the specified
386 call expression EXP. DECL will be the called function, or NULL if
387 this is an indirect call. */
388 bool (*function_ok_for_sibcall) (tree decl, tree exp);
390 /* True if EXP should be placed in a "small data" section. */
391 bool (* in_small_data_p) (tree);
393 /* True if EXP names an object for which name resolution must resolve
394 to the current module. */
395 bool (* binds_local_p) (tree);
397 /* Do something target-specific to record properties of the DECL into
398 the associated SYMBOL_REF. */
399 void (* encode_section_info) (tree, rtx, int);
401 /* Undo the effects of encode_section_info on the symbol string. */
402 const char * (* strip_name_encoding) (const char *);
404 /* If shift optabs for MODE are known to always truncate the shift count,
405 return the mask that they apply. Return 0 otherwise. */
406 unsigned HOST_WIDE_INT (* shift_truncation_mask) (enum machine_mode mode);
408 /* True if MODE is valid for a pointer in __attribute__((mode("MODE"))). */
409 bool (* valid_pointer_mode) (enum machine_mode mode);
411 /* True if MODE is valid for the target. By "valid", we mean able to
412 be manipulated in non-trivial ways. In particular, this means all
413 the arithmetic is supported. */
414 bool (* scalar_mode_supported_p) (enum machine_mode mode);
416 /* Similarly for vector modes. "Supported" here is less strict. At
417 least some operations are supported; need to check optabs or builtins
418 for further details. */
419 bool (* vector_mode_supported_p) (enum machine_mode mode);
421 /* True if a vector is opaque. */
422 bool (* vector_opaque_p) (tree);
424 /* Compute a (partial) cost for rtx X. Return true if the complete
425 cost has been computed, and false if subexpressions should be
426 scanned. In either case, *TOTAL contains the cost result. */
427 /* Note that CODE and OUTER_CODE ought to be RTX_CODE, but that's
428 not necessarily defined at this point. */
429 bool (* rtx_costs) (rtx x, int code, int outer_code, int *total);
431 /* Compute the cost of X, used as an address. Never called with
432 invalid addresses. */
433 int (* address_cost) (rtx x);
435 /* Given a register, this hook should return a parallel of registers
436 to represent where to find the register pieces. Define this hook
437 if the register and its mode are represented in Dwarf in
438 non-contiguous locations, or if the register should be
439 represented in more than one register in Dwarf. Otherwise, this
440 hook should return NULL_RTX. */
441 rtx (* dwarf_register_span) (rtx);
443 /* Fetch the fixed register(s) which hold condition codes, for
444 targets where it makes sense to look for duplicate assignments to
445 the condition codes. This should return true if there is such a
446 register, false otherwise. The arguments should be set to the
447 fixed register numbers. Up to two condition code registers are
448 supported. If there is only one for this target, the int pointed
449 at by the second argument should be set to -1. */
450 bool (* fixed_condition_code_regs) (unsigned int *, unsigned int *);
452 /* If two condition code modes are compatible, return a condition
453 code mode which is compatible with both, such that a comparison
454 done in the returned mode will work for both of the original
455 modes. If the condition code modes are not compatible, return
456 VOIDmode. */
457 enum machine_mode (* cc_modes_compatible) (enum machine_mode,
458 enum machine_mode);
460 /* Do machine-dependent code transformations. Called just before
461 delayed-branch scheduling. */
462 void (* machine_dependent_reorg) (void);
464 /* Create the __builtin_va_list type. */
465 tree (* build_builtin_va_list) (void);
467 /* Gimplifies a VA_ARG_EXPR. */
468 tree (* gimplify_va_arg_expr) (tree valist, tree type, tree *pre_p,
469 tree *post_p);
471 /* Validity-checking routines for PCH files, target-specific.
472 get_pch_validity returns a pointer to the data to be stored,
473 and stores the size in its argument. pch_valid_p gets the same
474 information back and returns NULL if the PCH is valid,
475 or an error message if not.
477 void * (* get_pch_validity) (size_t *);
478 const char * (* pch_valid_p) (const void *, size_t);
480 /* True if the compiler should give an enum type only as many
481 bytes as it takes to represent the range of possible values of
482 that type. */
483 bool (* default_short_enums) (void);
485 /* This target hook returns an rtx that is used to store the address
486 of the current frame into the built-in setjmp buffer. */
487 rtx (* builtin_setjmp_frame_value) (void);
489 /* This target hook should add STRING_CST trees for any hard regs
490 the port wishes to automatically clobber for an asm. */
491 tree (* md_asm_clobbers) (tree, tree, tree);
493 /* This target hook allows the backend to specify a calling convention
494 in the debug information. This function actually returns an
495 enum dwarf_calling_convention, but because of forward declarations
496 and not wanting to include dwarf2.h everywhere target.h is included
497 the function is being declared as an int. */
498 int (* dwarf_calling_convention) (tree);
500 /* This target hook allows the backend to emit frame-related insns that
501 contain UNSPECs or UNSPEC_VOLATILEs. The call frame debugging info
502 engine will invoke it on insns of the form
503 (set (reg) (unspec [...] UNSPEC_INDEX))
505 (set (reg) (unspec_volatile [...] UNSPECV_INDEX))
506 to let the backend emit the call frame instructions. */
507 void (* dwarf_handle_frame_unspec) (const char *, rtx, int);
509 /* Functions relating to calls - argument passing, returns, etc. */
510 struct calls {
511 bool (*promote_function_args) (tree fntype);
512 bool (*promote_function_return) (tree fntype);
513 bool (*promote_prototypes) (tree fntype);
514 rtx (*struct_value_rtx) (tree fndecl, int incoming);
515 bool (*return_in_memory) (tree type, tree fndecl);
516 bool (*return_in_msb) (tree type);
518 /* Return true if a parameter must be passed by reference. TYPE may
519 be null if this is a libcall. CA may be null if this query is
520 from __builtin_va_arg. */
521 bool (*pass_by_reference) (CUMULATIVE_ARGS *ca, enum machine_mode mode,
522 tree type, bool named_arg);
524 rtx (*expand_builtin_saveregs) (void);
525 /* Returns pretend_argument_size. */
526 void (*setup_incoming_varargs) (CUMULATIVE_ARGS *ca, enum machine_mode mode,
527 tree type, int *pretend_arg_size,
528 int second_time);
529 bool (*strict_argument_naming) (CUMULATIVE_ARGS *ca);
530 /* Returns true if we should use
531 targetm.calls.setup_incoming_varargs() and/or
532 targetm.calls.strict_argument_naming(). */
533 bool (*pretend_outgoing_varargs_named) (CUMULATIVE_ARGS *ca);
535 /* Given a complex type T, return true if a parameter of type T
536 should be passed as two scalars. */
537 bool (* split_complex_arg) (tree type);
539 /* Return true if type T, mode MODE, may not be passed in registers,
540 but must be passed on the stack. */
541 /* ??? This predicate should be applied strictly after pass-by-reference.
542 Need audit to verify that this is the case. */
543 bool (* must_pass_in_stack) (enum machine_mode mode, tree t);
545 /* Return true if type TYPE, mode MODE, which is passed by reference,
546 should have the object copy generated by the callee rather than
547 the caller. It is never called for TYPE requiring constructors. */
548 bool (* callee_copies) (CUMULATIVE_ARGS *ca, enum machine_mode mode,
549 tree type, bool named);
551 /* Return zero for arguments passed entirely on the stack or entirely
552 in registers. If passed in both, return the number of bytes passed
553 in registers; the balance is therefore passed on the stack. */
554 int (* arg_partial_bytes) (CUMULATIVE_ARGS *ca, enum machine_mode mode,
555 tree type, bool named);
557 /* Return the diagnostic message string if function without a prototype
558 is not allowed for this 'val' argument; NULL otherwise. */
559 const char *(*invalid_arg_for_unprototyped_fn) (tree typelist,
560 tree funcdecl, tree val);
561 } calls;
563 /* Functions specific to the C++ frontend. */
564 struct cxx {
565 /* Return the integer type used for guard variables. */
566 tree (*guard_type) (void);
567 /* Return true if only the low bit of the guard should be tested. */
568 bool (*guard_mask_bit) (void);
569 /* Returns the size of the array cookie for an array of type. */
570 tree (*get_cookie_size) (tree);
571 /* Returns true if the element size should be stored in the
572 array cookie. */
573 bool (*cookie_has_size) (void);
574 /* Allows backends to perform additional processing when
575 deciding if a class should be exported or imported. */
576 int (*import_export_class) (tree, int);
577 /* Returns true if constructors and destructors return "this". */
578 bool (*cdtor_returns_this) (void);
579 /* Returns true if the key method for a class can be an inline
580 function, so long as it is not declared inline in the class
581 itself. Returning true is the behavior required by the Itanium
582 C++ ABI. */
583 bool (*key_method_may_be_inline) (void);
584 /* Returns true if all class data (virtual tables, type info,
585 etc.) should be exported from the current DLL, even when the
586 associated class is not exported. */
587 bool (*export_class_data) (void);
588 } cxx;
590 /* Leave the boolean fields at the end. */
592 /* True if arbitrary sections are supported. */
593 bool have_named_sections;
595 /* True if "native" constructors and destructors are supported,
596 false if we're using collect2 for the job. */
597 bool have_ctors_dtors;
599 /* True if thread-local storage is supported. */
600 bool have_tls;
602 /* True if a small readonly data section is supported. */
603 bool have_srodata_section;
605 /* True if EH frame info sections should be zero-terminated. */
606 bool terminate_dw2_eh_frame_info;
608 /* True if #NO_APP should be emitted at the beginning of
609 assembly output. */
610 bool file_start_app_off;
612 /* True if output_file_directive should be called for main_input_filename
613 at the beginning of assembly output. */
614 bool file_start_file_directive;
616 /* True if #pragma redefine_extname is to be supported. */
617 bool handle_pragma_redefine_extname;
619 /* True if #pragma extern_prefix is to be supported. */
620 bool handle_pragma_extern_prefix;
622 /* True if the target is allowed to reorder memory accesses unless
623 synchronization is explicitly requested. */
624 bool relaxed_ordering;
626 /* Leave the boolean fields at the end. */
629 extern struct gcc_target targetm;
631 #endif /* GCC_TARGET_H */