re PR target/86807 (spu port needs updating for CVE-2017-5753)
[official-gcc.git] / gcc / config / pa / pa.c
blobdc0d5a7fb43fb74ff89a21644be01e5c5ca5e586
1 /* Subroutines for insn-output.c for HPPA.
2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Tim Moore (moore@cs.utah.edu), based on sparc.c
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #define IN_TARGET_CODE 1
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "memmodel.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "df.h"
32 #include "tm_p.h"
33 #include "stringpool.h"
34 #include "attribs.h"
35 #include "optabs.h"
36 #include "regs.h"
37 #include "emit-rtl.h"
38 #include "recog.h"
39 #include "diagnostic-core.h"
40 #include "insn-attr.h"
41 #include "alias.h"
42 #include "fold-const.h"
43 #include "stor-layout.h"
44 #include "varasm.h"
45 #include "calls.h"
46 #include "output.h"
47 #include "except.h"
48 #include "explow.h"
49 #include "expr.h"
50 #include "reload.h"
51 #include "common/common-target.h"
52 #include "langhooks.h"
53 #include "cfgrtl.h"
54 #include "opts.h"
55 #include "builtins.h"
57 /* This file should be included last. */
58 #include "target-def.h"
60 /* Return nonzero if there is a bypass for the output of
61 OUT_INSN and the fp store IN_INSN. */
62 int
63 pa_fpstore_bypass_p (rtx_insn *out_insn, rtx_insn *in_insn)
65 machine_mode store_mode;
66 machine_mode other_mode;
67 rtx set;
69 if (recog_memoized (in_insn) < 0
70 || (get_attr_type (in_insn) != TYPE_FPSTORE
71 && get_attr_type (in_insn) != TYPE_FPSTORE_LOAD)
72 || recog_memoized (out_insn) < 0)
73 return 0;
75 store_mode = GET_MODE (SET_SRC (PATTERN (in_insn)));
77 set = single_set (out_insn);
78 if (!set)
79 return 0;
81 other_mode = GET_MODE (SET_SRC (set));
83 return (GET_MODE_SIZE (store_mode) == GET_MODE_SIZE (other_mode));
87 #ifndef DO_FRAME_NOTES
88 #ifdef INCOMING_RETURN_ADDR_RTX
89 #define DO_FRAME_NOTES 1
90 #else
91 #define DO_FRAME_NOTES 0
92 #endif
93 #endif
95 static void pa_option_override (void);
96 static void copy_reg_pointer (rtx, rtx);
97 static void fix_range (const char *);
98 static int hppa_register_move_cost (machine_mode mode, reg_class_t,
99 reg_class_t);
100 static int hppa_address_cost (rtx, machine_mode mode, addr_space_t, bool);
101 static bool hppa_rtx_costs (rtx, machine_mode, int, int, int *, bool);
102 static inline rtx force_mode (machine_mode, rtx);
103 static void pa_reorg (void);
104 static void pa_combine_instructions (void);
105 static int pa_can_combine_p (rtx_insn *, rtx_insn *, rtx_insn *, int, rtx,
106 rtx, rtx);
107 static bool forward_branch_p (rtx_insn *);
108 static void compute_zdepwi_operands (unsigned HOST_WIDE_INT, unsigned *);
109 static void compute_zdepdi_operands (unsigned HOST_WIDE_INT, unsigned *);
110 static int compute_movmem_length (rtx_insn *);
111 static int compute_clrmem_length (rtx_insn *);
112 static bool pa_assemble_integer (rtx, unsigned int, int);
113 static void remove_useless_addtr_insns (int);
114 static void store_reg (int, HOST_WIDE_INT, int);
115 static void store_reg_modify (int, int, HOST_WIDE_INT);
116 static void load_reg (int, HOST_WIDE_INT, int);
117 static void set_reg_plus_d (int, int, HOST_WIDE_INT, int);
118 static rtx pa_function_value (const_tree, const_tree, bool);
119 static rtx pa_libcall_value (machine_mode, const_rtx);
120 static bool pa_function_value_regno_p (const unsigned int);
121 static void pa_output_function_prologue (FILE *);
122 static void update_total_code_bytes (unsigned int);
123 static void pa_output_function_epilogue (FILE *);
124 static int pa_adjust_cost (rtx_insn *, int, rtx_insn *, int, unsigned int);
125 static int pa_adjust_priority (rtx_insn *, int);
126 static int pa_issue_rate (void);
127 static int pa_reloc_rw_mask (void);
128 static void pa_som_asm_init_sections (void) ATTRIBUTE_UNUSED;
129 static section *pa_som_tm_clone_table_section (void) ATTRIBUTE_UNUSED;
130 static section *pa_select_section (tree, int, unsigned HOST_WIDE_INT)
131 ATTRIBUTE_UNUSED;
132 static void pa_encode_section_info (tree, rtx, int);
133 static const char *pa_strip_name_encoding (const char *);
134 static bool pa_function_ok_for_sibcall (tree, tree);
135 static void pa_globalize_label (FILE *, const char *)
136 ATTRIBUTE_UNUSED;
137 static void pa_asm_output_mi_thunk (FILE *, tree, HOST_WIDE_INT,
138 HOST_WIDE_INT, tree);
139 #if !defined(USE_COLLECT2)
140 static void pa_asm_out_constructor (rtx, int);
141 static void pa_asm_out_destructor (rtx, int);
142 #endif
143 static void pa_init_builtins (void);
144 static rtx pa_expand_builtin (tree, rtx, rtx, machine_mode mode, int);
145 static rtx hppa_builtin_saveregs (void);
146 static void hppa_va_start (tree, rtx);
147 static tree hppa_gimplify_va_arg_expr (tree, tree, gimple_seq *, gimple_seq *);
148 static bool pa_scalar_mode_supported_p (scalar_mode);
149 static bool pa_commutative_p (const_rtx x, int outer_code);
150 static void copy_fp_args (rtx_insn *) ATTRIBUTE_UNUSED;
151 static int length_fp_args (rtx_insn *) ATTRIBUTE_UNUSED;
152 static rtx hppa_legitimize_address (rtx, rtx, machine_mode);
153 static inline void pa_file_start_level (void) ATTRIBUTE_UNUSED;
154 static inline void pa_file_start_space (int) ATTRIBUTE_UNUSED;
155 static inline void pa_file_start_file (int) ATTRIBUTE_UNUSED;
156 static inline void pa_file_start_mcount (const char*) ATTRIBUTE_UNUSED;
157 static void pa_elf_file_start (void) ATTRIBUTE_UNUSED;
158 static void pa_som_file_start (void) ATTRIBUTE_UNUSED;
159 static void pa_linux_file_start (void) ATTRIBUTE_UNUSED;
160 static void pa_hpux64_gas_file_start (void) ATTRIBUTE_UNUSED;
161 static void pa_hpux64_hpas_file_start (void) ATTRIBUTE_UNUSED;
162 static void output_deferred_plabels (void);
163 static void output_deferred_profile_counters (void) ATTRIBUTE_UNUSED;
164 static void pa_file_end (void);
165 static void pa_init_libfuncs (void);
166 static rtx pa_struct_value_rtx (tree, int);
167 static bool pa_pass_by_reference (cumulative_args_t, machine_mode,
168 const_tree, bool);
169 static int pa_arg_partial_bytes (cumulative_args_t, machine_mode,
170 tree, bool);
171 static void pa_function_arg_advance (cumulative_args_t, machine_mode,
172 const_tree, bool);
173 static rtx pa_function_arg (cumulative_args_t, machine_mode,
174 const_tree, bool);
175 static pad_direction pa_function_arg_padding (machine_mode, const_tree);
176 static unsigned int pa_function_arg_boundary (machine_mode, const_tree);
177 static struct machine_function * pa_init_machine_status (void);
178 static reg_class_t pa_secondary_reload (bool, rtx, reg_class_t,
179 machine_mode,
180 secondary_reload_info *);
181 static bool pa_secondary_memory_needed (machine_mode,
182 reg_class_t, reg_class_t);
183 static void pa_extra_live_on_entry (bitmap);
184 static machine_mode pa_promote_function_mode (const_tree,
185 machine_mode, int *,
186 const_tree, int);
188 static void pa_asm_trampoline_template (FILE *);
189 static void pa_trampoline_init (rtx, tree, rtx);
190 static rtx pa_trampoline_adjust_address (rtx);
191 static rtx pa_delegitimize_address (rtx);
192 static bool pa_print_operand_punct_valid_p (unsigned char);
193 static rtx pa_internal_arg_pointer (void);
194 static bool pa_can_eliminate (const int, const int);
195 static void pa_conditional_register_usage (void);
196 static machine_mode pa_c_mode_for_suffix (char);
197 static section *pa_function_section (tree, enum node_frequency, bool, bool);
198 static bool pa_cannot_force_const_mem (machine_mode, rtx);
199 static bool pa_legitimate_constant_p (machine_mode, rtx);
200 static unsigned int pa_section_type_flags (tree, const char *, int);
201 static bool pa_legitimate_address_p (machine_mode, rtx, bool);
202 static bool pa_callee_copies (cumulative_args_t, machine_mode,
203 const_tree, bool);
204 static unsigned int pa_hard_regno_nregs (unsigned int, machine_mode);
205 static bool pa_hard_regno_mode_ok (unsigned int, machine_mode);
206 static bool pa_modes_tieable_p (machine_mode, machine_mode);
207 static bool pa_can_change_mode_class (machine_mode, machine_mode, reg_class_t);
208 static HOST_WIDE_INT pa_starting_frame_offset (void);
210 /* The following extra sections are only used for SOM. */
211 static GTY(()) section *som_readonly_data_section;
212 static GTY(()) section *som_one_only_readonly_data_section;
213 static GTY(()) section *som_one_only_data_section;
214 static GTY(()) section *som_tm_clone_table_section;
216 /* Counts for the number of callee-saved general and floating point
217 registers which were saved by the current function's prologue. */
218 static int gr_saved, fr_saved;
220 /* Boolean indicating whether the return pointer was saved by the
221 current function's prologue. */
222 static bool rp_saved;
224 static rtx find_addr_reg (rtx);
226 /* Keep track of the number of bytes we have output in the CODE subspace
227 during this compilation so we'll know when to emit inline long-calls. */
228 unsigned long total_code_bytes;
230 /* The last address of the previous function plus the number of bytes in
231 associated thunks that have been output. This is used to determine if
232 a thunk can use an IA-relative branch to reach its target function. */
233 static unsigned int last_address;
235 /* Variables to handle plabels that we discover are necessary at assembly
236 output time. They are output after the current function. */
237 struct GTY(()) deferred_plabel
239 rtx internal_label;
240 rtx symbol;
242 static GTY((length ("n_deferred_plabels"))) struct deferred_plabel *
243 deferred_plabels;
244 static size_t n_deferred_plabels = 0;
246 /* Initialize the GCC target structure. */
248 #undef TARGET_OPTION_OVERRIDE
249 #define TARGET_OPTION_OVERRIDE pa_option_override
251 #undef TARGET_ASM_ALIGNED_HI_OP
252 #define TARGET_ASM_ALIGNED_HI_OP "\t.half\t"
253 #undef TARGET_ASM_ALIGNED_SI_OP
254 #define TARGET_ASM_ALIGNED_SI_OP "\t.word\t"
255 #undef TARGET_ASM_ALIGNED_DI_OP
256 #define TARGET_ASM_ALIGNED_DI_OP "\t.dword\t"
257 #undef TARGET_ASM_UNALIGNED_HI_OP
258 #define TARGET_ASM_UNALIGNED_HI_OP TARGET_ASM_ALIGNED_HI_OP
259 #undef TARGET_ASM_UNALIGNED_SI_OP
260 #define TARGET_ASM_UNALIGNED_SI_OP TARGET_ASM_ALIGNED_SI_OP
261 #undef TARGET_ASM_UNALIGNED_DI_OP
262 #define TARGET_ASM_UNALIGNED_DI_OP TARGET_ASM_ALIGNED_DI_OP
263 #undef TARGET_ASM_INTEGER
264 #define TARGET_ASM_INTEGER pa_assemble_integer
266 #undef TARGET_ASM_FUNCTION_PROLOGUE
267 #define TARGET_ASM_FUNCTION_PROLOGUE pa_output_function_prologue
268 #undef TARGET_ASM_FUNCTION_EPILOGUE
269 #define TARGET_ASM_FUNCTION_EPILOGUE pa_output_function_epilogue
271 #undef TARGET_FUNCTION_VALUE
272 #define TARGET_FUNCTION_VALUE pa_function_value
273 #undef TARGET_LIBCALL_VALUE
274 #define TARGET_LIBCALL_VALUE pa_libcall_value
275 #undef TARGET_FUNCTION_VALUE_REGNO_P
276 #define TARGET_FUNCTION_VALUE_REGNO_P pa_function_value_regno_p
278 #undef TARGET_LEGITIMIZE_ADDRESS
279 #define TARGET_LEGITIMIZE_ADDRESS hppa_legitimize_address
281 #undef TARGET_SCHED_ADJUST_COST
282 #define TARGET_SCHED_ADJUST_COST pa_adjust_cost
283 #undef TARGET_SCHED_ADJUST_PRIORITY
284 #define TARGET_SCHED_ADJUST_PRIORITY pa_adjust_priority
285 #undef TARGET_SCHED_ISSUE_RATE
286 #define TARGET_SCHED_ISSUE_RATE pa_issue_rate
288 #undef TARGET_ENCODE_SECTION_INFO
289 #define TARGET_ENCODE_SECTION_INFO pa_encode_section_info
290 #undef TARGET_STRIP_NAME_ENCODING
291 #define TARGET_STRIP_NAME_ENCODING pa_strip_name_encoding
293 #undef TARGET_FUNCTION_OK_FOR_SIBCALL
294 #define TARGET_FUNCTION_OK_FOR_SIBCALL pa_function_ok_for_sibcall
296 #undef TARGET_COMMUTATIVE_P
297 #define TARGET_COMMUTATIVE_P pa_commutative_p
299 #undef TARGET_ASM_OUTPUT_MI_THUNK
300 #define TARGET_ASM_OUTPUT_MI_THUNK pa_asm_output_mi_thunk
301 #undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
302 #define TARGET_ASM_CAN_OUTPUT_MI_THUNK default_can_output_mi_thunk_no_vcall
304 #undef TARGET_ASM_FILE_END
305 #define TARGET_ASM_FILE_END pa_file_end
307 #undef TARGET_ASM_RELOC_RW_MASK
308 #define TARGET_ASM_RELOC_RW_MASK pa_reloc_rw_mask
310 #undef TARGET_PRINT_OPERAND_PUNCT_VALID_P
311 #define TARGET_PRINT_OPERAND_PUNCT_VALID_P pa_print_operand_punct_valid_p
313 #if !defined(USE_COLLECT2)
314 #undef TARGET_ASM_CONSTRUCTOR
315 #define TARGET_ASM_CONSTRUCTOR pa_asm_out_constructor
316 #undef TARGET_ASM_DESTRUCTOR
317 #define TARGET_ASM_DESTRUCTOR pa_asm_out_destructor
318 #endif
320 #undef TARGET_INIT_BUILTINS
321 #define TARGET_INIT_BUILTINS pa_init_builtins
323 #undef TARGET_EXPAND_BUILTIN
324 #define TARGET_EXPAND_BUILTIN pa_expand_builtin
326 #undef TARGET_REGISTER_MOVE_COST
327 #define TARGET_REGISTER_MOVE_COST hppa_register_move_cost
328 #undef TARGET_RTX_COSTS
329 #define TARGET_RTX_COSTS hppa_rtx_costs
330 #undef TARGET_ADDRESS_COST
331 #define TARGET_ADDRESS_COST hppa_address_cost
333 #undef TARGET_MACHINE_DEPENDENT_REORG
334 #define TARGET_MACHINE_DEPENDENT_REORG pa_reorg
336 #undef TARGET_INIT_LIBFUNCS
337 #define TARGET_INIT_LIBFUNCS pa_init_libfuncs
339 #undef TARGET_PROMOTE_FUNCTION_MODE
340 #define TARGET_PROMOTE_FUNCTION_MODE pa_promote_function_mode
341 #undef TARGET_PROMOTE_PROTOTYPES
342 #define TARGET_PROMOTE_PROTOTYPES hook_bool_const_tree_true
344 #undef TARGET_STRUCT_VALUE_RTX
345 #define TARGET_STRUCT_VALUE_RTX pa_struct_value_rtx
346 #undef TARGET_RETURN_IN_MEMORY
347 #define TARGET_RETURN_IN_MEMORY pa_return_in_memory
348 #undef TARGET_MUST_PASS_IN_STACK
349 #define TARGET_MUST_PASS_IN_STACK must_pass_in_stack_var_size
350 #undef TARGET_PASS_BY_REFERENCE
351 #define TARGET_PASS_BY_REFERENCE pa_pass_by_reference
352 #undef TARGET_CALLEE_COPIES
353 #define TARGET_CALLEE_COPIES pa_callee_copies
354 #undef TARGET_ARG_PARTIAL_BYTES
355 #define TARGET_ARG_PARTIAL_BYTES pa_arg_partial_bytes
356 #undef TARGET_FUNCTION_ARG
357 #define TARGET_FUNCTION_ARG pa_function_arg
358 #undef TARGET_FUNCTION_ARG_ADVANCE
359 #define TARGET_FUNCTION_ARG_ADVANCE pa_function_arg_advance
360 #undef TARGET_FUNCTION_ARG_PADDING
361 #define TARGET_FUNCTION_ARG_PADDING pa_function_arg_padding
362 #undef TARGET_FUNCTION_ARG_BOUNDARY
363 #define TARGET_FUNCTION_ARG_BOUNDARY pa_function_arg_boundary
365 #undef TARGET_EXPAND_BUILTIN_SAVEREGS
366 #define TARGET_EXPAND_BUILTIN_SAVEREGS hppa_builtin_saveregs
367 #undef TARGET_EXPAND_BUILTIN_VA_START
368 #define TARGET_EXPAND_BUILTIN_VA_START hppa_va_start
369 #undef TARGET_GIMPLIFY_VA_ARG_EXPR
370 #define TARGET_GIMPLIFY_VA_ARG_EXPR hppa_gimplify_va_arg_expr
372 #undef TARGET_SCALAR_MODE_SUPPORTED_P
373 #define TARGET_SCALAR_MODE_SUPPORTED_P pa_scalar_mode_supported_p
375 #undef TARGET_CANNOT_FORCE_CONST_MEM
376 #define TARGET_CANNOT_FORCE_CONST_MEM pa_cannot_force_const_mem
378 #undef TARGET_SECONDARY_RELOAD
379 #define TARGET_SECONDARY_RELOAD pa_secondary_reload
380 #undef TARGET_SECONDARY_MEMORY_NEEDED
381 #define TARGET_SECONDARY_MEMORY_NEEDED pa_secondary_memory_needed
383 #undef TARGET_EXTRA_LIVE_ON_ENTRY
384 #define TARGET_EXTRA_LIVE_ON_ENTRY pa_extra_live_on_entry
386 #undef TARGET_ASM_TRAMPOLINE_TEMPLATE
387 #define TARGET_ASM_TRAMPOLINE_TEMPLATE pa_asm_trampoline_template
388 #undef TARGET_TRAMPOLINE_INIT
389 #define TARGET_TRAMPOLINE_INIT pa_trampoline_init
390 #undef TARGET_TRAMPOLINE_ADJUST_ADDRESS
391 #define TARGET_TRAMPOLINE_ADJUST_ADDRESS pa_trampoline_adjust_address
392 #undef TARGET_DELEGITIMIZE_ADDRESS
393 #define TARGET_DELEGITIMIZE_ADDRESS pa_delegitimize_address
394 #undef TARGET_INTERNAL_ARG_POINTER
395 #define TARGET_INTERNAL_ARG_POINTER pa_internal_arg_pointer
396 #undef TARGET_CAN_ELIMINATE
397 #define TARGET_CAN_ELIMINATE pa_can_eliminate
398 #undef TARGET_CONDITIONAL_REGISTER_USAGE
399 #define TARGET_CONDITIONAL_REGISTER_USAGE pa_conditional_register_usage
400 #undef TARGET_C_MODE_FOR_SUFFIX
401 #define TARGET_C_MODE_FOR_SUFFIX pa_c_mode_for_suffix
402 #undef TARGET_ASM_FUNCTION_SECTION
403 #define TARGET_ASM_FUNCTION_SECTION pa_function_section
405 #undef TARGET_LEGITIMATE_CONSTANT_P
406 #define TARGET_LEGITIMATE_CONSTANT_P pa_legitimate_constant_p
407 #undef TARGET_SECTION_TYPE_FLAGS
408 #define TARGET_SECTION_TYPE_FLAGS pa_section_type_flags
409 #undef TARGET_LEGITIMATE_ADDRESS_P
410 #define TARGET_LEGITIMATE_ADDRESS_P pa_legitimate_address_p
412 #undef TARGET_LRA_P
413 #define TARGET_LRA_P hook_bool_void_false
415 #undef TARGET_HARD_REGNO_NREGS
416 #define TARGET_HARD_REGNO_NREGS pa_hard_regno_nregs
417 #undef TARGET_HARD_REGNO_MODE_OK
418 #define TARGET_HARD_REGNO_MODE_OK pa_hard_regno_mode_ok
419 #undef TARGET_MODES_TIEABLE_P
420 #define TARGET_MODES_TIEABLE_P pa_modes_tieable_p
422 #undef TARGET_CAN_CHANGE_MODE_CLASS
423 #define TARGET_CAN_CHANGE_MODE_CLASS pa_can_change_mode_class
425 #undef TARGET_CONSTANT_ALIGNMENT
426 #define TARGET_CONSTANT_ALIGNMENT constant_alignment_word_strings
428 #undef TARGET_STARTING_FRAME_OFFSET
429 #define TARGET_STARTING_FRAME_OFFSET pa_starting_frame_offset
431 #undef TARGET_HAVE_SPECULATION_SAFE_VALUE
432 #define TARGET_HAVE_SPECULATION_SAFE_VALUE speculation_safe_value_not_needed
434 struct gcc_target targetm = TARGET_INITIALIZER;
436 /* Parse the -mfixed-range= option string. */
438 static void
439 fix_range (const char *const_str)
441 int i, first, last;
442 char *str, *dash, *comma;
444 /* str must be of the form REG1'-'REG2{,REG1'-'REG} where REG1 and
445 REG2 are either register names or register numbers. The effect
446 of this option is to mark the registers in the range from REG1 to
447 REG2 as ``fixed'' so they won't be used by the compiler. This is
448 used, e.g., to ensure that kernel mode code doesn't use fr4-fr31. */
450 i = strlen (const_str);
451 str = (char *) alloca (i + 1);
452 memcpy (str, const_str, i + 1);
454 while (1)
456 dash = strchr (str, '-');
457 if (!dash)
459 warning (0, "value of -mfixed-range must have form REG1-REG2");
460 return;
462 *dash = '\0';
464 comma = strchr (dash + 1, ',');
465 if (comma)
466 *comma = '\0';
468 first = decode_reg_name (str);
469 if (first < 0)
471 warning (0, "unknown register name: %s", str);
472 return;
475 last = decode_reg_name (dash + 1);
476 if (last < 0)
478 warning (0, "unknown register name: %s", dash + 1);
479 return;
482 *dash = '-';
484 if (first > last)
486 warning (0, "%s-%s is an empty range", str, dash + 1);
487 return;
490 for (i = first; i <= last; ++i)
491 fixed_regs[i] = call_used_regs[i] = 1;
493 if (!comma)
494 break;
496 *comma = ',';
497 str = comma + 1;
500 /* Check if all floating point registers have been fixed. */
501 for (i = FP_REG_FIRST; i <= FP_REG_LAST; i++)
502 if (!fixed_regs[i])
503 break;
505 if (i > FP_REG_LAST)
506 target_flags |= MASK_DISABLE_FPREGS;
509 /* Implement the TARGET_OPTION_OVERRIDE hook. */
511 static void
512 pa_option_override (void)
514 unsigned int i;
515 cl_deferred_option *opt;
516 vec<cl_deferred_option> *v
517 = (vec<cl_deferred_option> *) pa_deferred_options;
519 if (v)
520 FOR_EACH_VEC_ELT (*v, i, opt)
522 switch (opt->opt_index)
524 case OPT_mfixed_range_:
525 fix_range (opt->arg);
526 break;
528 default:
529 gcc_unreachable ();
533 if (flag_pic && TARGET_PORTABLE_RUNTIME)
535 warning (0, "PIC code generation is not supported in the portable runtime model");
538 if (flag_pic && TARGET_FAST_INDIRECT_CALLS)
540 warning (0, "PIC code generation is not compatible with fast indirect calls");
543 if (! TARGET_GAS && write_symbols != NO_DEBUG)
545 warning (0, "-g is only supported when using GAS on this processor,");
546 warning (0, "-g option disabled");
547 write_symbols = NO_DEBUG;
550 /* We only support the "big PIC" model now. And we always generate PIC
551 code when in 64bit mode. */
552 if (flag_pic == 1 || TARGET_64BIT)
553 flag_pic = 2;
555 /* Disable -freorder-blocks-and-partition as we don't support hot and
556 cold partitioning. */
557 if (flag_reorder_blocks_and_partition)
559 inform (input_location,
560 "-freorder-blocks-and-partition does not work "
561 "on this architecture");
562 flag_reorder_blocks_and_partition = 0;
563 flag_reorder_blocks = 1;
566 /* We can't guarantee that .dword is available for 32-bit targets. */
567 if (UNITS_PER_WORD == 4)
568 targetm.asm_out.aligned_op.di = NULL;
570 /* The unaligned ops are only available when using GAS. */
571 if (!TARGET_GAS)
573 targetm.asm_out.unaligned_op.hi = NULL;
574 targetm.asm_out.unaligned_op.si = NULL;
575 targetm.asm_out.unaligned_op.di = NULL;
578 init_machine_status = pa_init_machine_status;
581 enum pa_builtins
583 PA_BUILTIN_COPYSIGNQ,
584 PA_BUILTIN_FABSQ,
585 PA_BUILTIN_INFQ,
586 PA_BUILTIN_HUGE_VALQ,
587 PA_BUILTIN_max
590 static GTY(()) tree pa_builtins[(int) PA_BUILTIN_max];
592 static void
593 pa_init_builtins (void)
595 #ifdef DONT_HAVE_FPUTC_UNLOCKED
597 tree decl = builtin_decl_explicit (BUILT_IN_PUTC_UNLOCKED);
598 set_builtin_decl (BUILT_IN_FPUTC_UNLOCKED, decl,
599 builtin_decl_implicit_p (BUILT_IN_PUTC_UNLOCKED));
601 #endif
602 #if TARGET_HPUX_11
604 tree decl;
606 if ((decl = builtin_decl_explicit (BUILT_IN_FINITE)) != NULL_TREE)
607 set_user_assembler_name (decl, "_Isfinite");
608 if ((decl = builtin_decl_explicit (BUILT_IN_FINITEF)) != NULL_TREE)
609 set_user_assembler_name (decl, "_Isfinitef");
611 #endif
613 if (HPUX_LONG_DOUBLE_LIBRARY)
615 tree decl, ftype;
617 /* Under HPUX, the __float128 type is a synonym for "long double". */
618 (*lang_hooks.types.register_builtin_type) (long_double_type_node,
619 "__float128");
621 /* TFmode support builtins. */
622 ftype = build_function_type_list (long_double_type_node,
623 long_double_type_node,
624 NULL_TREE);
625 decl = add_builtin_function ("__builtin_fabsq", ftype,
626 PA_BUILTIN_FABSQ, BUILT_IN_MD,
627 "_U_Qfabs", NULL_TREE);
628 TREE_READONLY (decl) = 1;
629 pa_builtins[PA_BUILTIN_FABSQ] = decl;
631 ftype = build_function_type_list (long_double_type_node,
632 long_double_type_node,
633 long_double_type_node,
634 NULL_TREE);
635 decl = add_builtin_function ("__builtin_copysignq", ftype,
636 PA_BUILTIN_COPYSIGNQ, BUILT_IN_MD,
637 "_U_Qfcopysign", NULL_TREE);
638 TREE_READONLY (decl) = 1;
639 pa_builtins[PA_BUILTIN_COPYSIGNQ] = decl;
641 ftype = build_function_type_list (long_double_type_node, NULL_TREE);
642 decl = add_builtin_function ("__builtin_infq", ftype,
643 PA_BUILTIN_INFQ, BUILT_IN_MD,
644 NULL, NULL_TREE);
645 pa_builtins[PA_BUILTIN_INFQ] = decl;
647 decl = add_builtin_function ("__builtin_huge_valq", ftype,
648 PA_BUILTIN_HUGE_VALQ, BUILT_IN_MD,
649 NULL, NULL_TREE);
650 pa_builtins[PA_BUILTIN_HUGE_VALQ] = decl;
654 static rtx
655 pa_expand_builtin (tree exp, rtx target, rtx subtarget ATTRIBUTE_UNUSED,
656 machine_mode mode ATTRIBUTE_UNUSED,
657 int ignore ATTRIBUTE_UNUSED)
659 tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
660 unsigned int fcode = DECL_FUNCTION_CODE (fndecl);
662 switch (fcode)
664 case PA_BUILTIN_FABSQ:
665 case PA_BUILTIN_COPYSIGNQ:
666 return expand_call (exp, target, ignore);
668 case PA_BUILTIN_INFQ:
669 case PA_BUILTIN_HUGE_VALQ:
671 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
672 REAL_VALUE_TYPE inf;
673 rtx tmp;
675 real_inf (&inf);
676 tmp = const_double_from_real_value (inf, target_mode);
678 tmp = validize_mem (force_const_mem (target_mode, tmp));
680 if (target == 0)
681 target = gen_reg_rtx (target_mode);
683 emit_move_insn (target, tmp);
684 return target;
687 default:
688 gcc_unreachable ();
691 return NULL_RTX;
694 /* Function to init struct machine_function.
695 This will be called, via a pointer variable,
696 from push_function_context. */
698 static struct machine_function *
699 pa_init_machine_status (void)
701 return ggc_cleared_alloc<machine_function> ();
704 /* If FROM is a probable pointer register, mark TO as a probable
705 pointer register with the same pointer alignment as FROM. */
707 static void
708 copy_reg_pointer (rtx to, rtx from)
710 if (REG_POINTER (from))
711 mark_reg_pointer (to, REGNO_POINTER_ALIGN (REGNO (from)));
714 /* Return 1 if X contains a symbolic expression. We know these
715 expressions will have one of a few well defined forms, so
716 we need only check those forms. */
718 pa_symbolic_expression_p (rtx x)
721 /* Strip off any HIGH. */
722 if (GET_CODE (x) == HIGH)
723 x = XEXP (x, 0);
725 return symbolic_operand (x, VOIDmode);
728 /* Accept any constant that can be moved in one instruction into a
729 general register. */
731 pa_cint_ok_for_move (unsigned HOST_WIDE_INT ival)
733 /* OK if ldo, ldil, or zdepi, can be used. */
734 return (VAL_14_BITS_P (ival)
735 || pa_ldil_cint_p (ival)
736 || pa_zdepi_cint_p (ival));
739 /* True iff ldil can be used to load this CONST_INT. The least
740 significant 11 bits of the value must be zero and the value must
741 not change sign when extended from 32 to 64 bits. */
743 pa_ldil_cint_p (unsigned HOST_WIDE_INT ival)
745 unsigned HOST_WIDE_INT x;
747 x = ival & (((unsigned HOST_WIDE_INT) -1 << 31) | 0x7ff);
748 return x == 0 || x == ((unsigned HOST_WIDE_INT) -1 << 31);
751 /* True iff zdepi can be used to generate this CONST_INT.
752 zdepi first sign extends a 5-bit signed number to a given field
753 length, then places this field anywhere in a zero. */
755 pa_zdepi_cint_p (unsigned HOST_WIDE_INT x)
757 unsigned HOST_WIDE_INT lsb_mask, t;
759 /* This might not be obvious, but it's at least fast.
760 This function is critical; we don't have the time loops would take. */
761 lsb_mask = x & -x;
762 t = ((x >> 4) + lsb_mask) & ~(lsb_mask - 1);
763 /* Return true iff t is a power of two. */
764 return ((t & (t - 1)) == 0);
767 /* True iff depi or extru can be used to compute (reg & mask).
768 Accept bit pattern like these:
769 0....01....1
770 1....10....0
771 1..10..01..1 */
773 pa_and_mask_p (unsigned HOST_WIDE_INT mask)
775 mask = ~mask;
776 mask += mask & -mask;
777 return (mask & (mask - 1)) == 0;
780 /* True iff depi can be used to compute (reg | MASK). */
782 pa_ior_mask_p (unsigned HOST_WIDE_INT mask)
784 mask += mask & -mask;
785 return (mask & (mask - 1)) == 0;
788 /* Legitimize PIC addresses. If the address is already
789 position-independent, we return ORIG. Newly generated
790 position-independent addresses go to REG. If we need more
791 than one register, we lose. */
793 static rtx
794 legitimize_pic_address (rtx orig, machine_mode mode, rtx reg)
796 rtx pic_ref = orig;
798 gcc_assert (!PA_SYMBOL_REF_TLS_P (orig));
800 /* Labels need special handling. */
801 if (pic_label_operand (orig, mode))
803 rtx_insn *insn;
805 /* We do not want to go through the movXX expanders here since that
806 would create recursion.
808 Nor do we really want to call a generator for a named pattern
809 since that requires multiple patterns if we want to support
810 multiple word sizes.
812 So instead we just emit the raw set, which avoids the movXX
813 expanders completely. */
814 mark_reg_pointer (reg, BITS_PER_UNIT);
815 insn = emit_insn (gen_rtx_SET (reg, orig));
817 /* Put a REG_EQUAL note on this insn, so that it can be optimized. */
818 add_reg_note (insn, REG_EQUAL, orig);
820 /* During and after reload, we need to generate a REG_LABEL_OPERAND note
821 and update LABEL_NUSES because this is not done automatically. */
822 if (reload_in_progress || reload_completed)
824 /* Extract LABEL_REF. */
825 if (GET_CODE (orig) == CONST)
826 orig = XEXP (XEXP (orig, 0), 0);
827 /* Extract CODE_LABEL. */
828 orig = XEXP (orig, 0);
829 add_reg_note (insn, REG_LABEL_OPERAND, orig);
830 /* Make sure we have label and not a note. */
831 if (LABEL_P (orig))
832 LABEL_NUSES (orig)++;
834 crtl->uses_pic_offset_table = 1;
835 return reg;
837 if (GET_CODE (orig) == SYMBOL_REF)
839 rtx_insn *insn;
840 rtx tmp_reg;
842 gcc_assert (reg);
844 /* Before reload, allocate a temporary register for the intermediate
845 result. This allows the sequence to be deleted when the final
846 result is unused and the insns are trivially dead. */
847 tmp_reg = ((reload_in_progress || reload_completed)
848 ? reg : gen_reg_rtx (Pmode));
850 if (function_label_operand (orig, VOIDmode))
852 /* Force function label into memory in word mode. */
853 orig = XEXP (force_const_mem (word_mode, orig), 0);
854 /* Load plabel address from DLT. */
855 emit_move_insn (tmp_reg,
856 gen_rtx_PLUS (word_mode, pic_offset_table_rtx,
857 gen_rtx_HIGH (word_mode, orig)));
858 pic_ref
859 = gen_const_mem (Pmode,
860 gen_rtx_LO_SUM (Pmode, tmp_reg,
861 gen_rtx_UNSPEC (Pmode,
862 gen_rtvec (1, orig),
863 UNSPEC_DLTIND14R)));
864 emit_move_insn (reg, pic_ref);
865 /* Now load address of function descriptor. */
866 pic_ref = gen_rtx_MEM (Pmode, reg);
868 else
870 /* Load symbol reference from DLT. */
871 emit_move_insn (tmp_reg,
872 gen_rtx_PLUS (word_mode, pic_offset_table_rtx,
873 gen_rtx_HIGH (word_mode, orig)));
874 pic_ref
875 = gen_const_mem (Pmode,
876 gen_rtx_LO_SUM (Pmode, tmp_reg,
877 gen_rtx_UNSPEC (Pmode,
878 gen_rtvec (1, orig),
879 UNSPEC_DLTIND14R)));
882 crtl->uses_pic_offset_table = 1;
883 mark_reg_pointer (reg, BITS_PER_UNIT);
884 insn = emit_move_insn (reg, pic_ref);
886 /* Put a REG_EQUAL note on this insn, so that it can be optimized. */
887 set_unique_reg_note (insn, REG_EQUAL, orig);
889 return reg;
891 else if (GET_CODE (orig) == CONST)
893 rtx base;
895 if (GET_CODE (XEXP (orig, 0)) == PLUS
896 && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
897 return orig;
899 gcc_assert (reg);
900 gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS);
902 base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
903 orig = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
904 base == reg ? 0 : reg);
906 if (GET_CODE (orig) == CONST_INT)
908 if (INT_14_BITS (orig))
909 return plus_constant (Pmode, base, INTVAL (orig));
910 orig = force_reg (Pmode, orig);
912 pic_ref = gen_rtx_PLUS (Pmode, base, orig);
913 /* Likewise, should we set special REG_NOTEs here? */
916 return pic_ref;
919 static GTY(()) rtx gen_tls_tga;
921 static rtx
922 gen_tls_get_addr (void)
924 if (!gen_tls_tga)
925 gen_tls_tga = init_one_libfunc ("__tls_get_addr");
926 return gen_tls_tga;
929 static rtx
930 hppa_tls_call (rtx arg)
932 rtx ret;
934 ret = gen_reg_rtx (Pmode);
935 emit_library_call_value (gen_tls_get_addr (), ret,
936 LCT_CONST, Pmode, arg, Pmode);
938 return ret;
941 static rtx
942 legitimize_tls_address (rtx addr)
944 rtx ret, tmp, t1, t2, tp;
945 rtx_insn *insn;
947 /* Currently, we can't handle anything but a SYMBOL_REF. */
948 if (GET_CODE (addr) != SYMBOL_REF)
949 return addr;
951 switch (SYMBOL_REF_TLS_MODEL (addr))
953 case TLS_MODEL_GLOBAL_DYNAMIC:
954 tmp = gen_reg_rtx (Pmode);
955 if (flag_pic)
956 emit_insn (gen_tgd_load_pic (tmp, addr));
957 else
958 emit_insn (gen_tgd_load (tmp, addr));
959 ret = hppa_tls_call (tmp);
960 break;
962 case TLS_MODEL_LOCAL_DYNAMIC:
963 ret = gen_reg_rtx (Pmode);
964 tmp = gen_reg_rtx (Pmode);
965 start_sequence ();
966 if (flag_pic)
967 emit_insn (gen_tld_load_pic (tmp, addr));
968 else
969 emit_insn (gen_tld_load (tmp, addr));
970 t1 = hppa_tls_call (tmp);
971 insn = get_insns ();
972 end_sequence ();
973 t2 = gen_reg_rtx (Pmode);
974 emit_libcall_block (insn, t2, t1,
975 gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx),
976 UNSPEC_TLSLDBASE));
977 emit_insn (gen_tld_offset_load (ret, addr, t2));
978 break;
980 case TLS_MODEL_INITIAL_EXEC:
981 tp = gen_reg_rtx (Pmode);
982 tmp = gen_reg_rtx (Pmode);
983 ret = gen_reg_rtx (Pmode);
984 emit_insn (gen_tp_load (tp));
985 if (flag_pic)
986 emit_insn (gen_tie_load_pic (tmp, addr));
987 else
988 emit_insn (gen_tie_load (tmp, addr));
989 emit_move_insn (ret, gen_rtx_PLUS (Pmode, tp, tmp));
990 break;
992 case TLS_MODEL_LOCAL_EXEC:
993 tp = gen_reg_rtx (Pmode);
994 ret = gen_reg_rtx (Pmode);
995 emit_insn (gen_tp_load (tp));
996 emit_insn (gen_tle_load (ret, addr, tp));
997 break;
999 default:
1000 gcc_unreachable ();
1003 return ret;
1006 /* Helper for hppa_legitimize_address. Given X, return true if it
1007 is a left shift by 1, 2 or 3 positions or a multiply by 2, 4 or 8.
1009 This respectively represent canonical shift-add rtxs or scaled
1010 memory addresses. */
1011 static bool
1012 mem_shadd_or_shadd_rtx_p (rtx x)
1014 return ((GET_CODE (x) == ASHIFT
1015 || GET_CODE (x) == MULT)
1016 && GET_CODE (XEXP (x, 1)) == CONST_INT
1017 && ((GET_CODE (x) == ASHIFT
1018 && pa_shadd_constant_p (INTVAL (XEXP (x, 1))))
1019 || (GET_CODE (x) == MULT
1020 && pa_mem_shadd_constant_p (INTVAL (XEXP (x, 1))))));
1023 /* Try machine-dependent ways of modifying an illegitimate address
1024 to be legitimate. If we find one, return the new, valid address.
1025 This macro is used in only one place: `memory_address' in explow.c.
1027 OLDX is the address as it was before break_out_memory_refs was called.
1028 In some cases it is useful to look at this to decide what needs to be done.
1030 It is always safe for this macro to do nothing. It exists to recognize
1031 opportunities to optimize the output.
1033 For the PA, transform:
1035 memory(X + <large int>)
1037 into:
1039 if (<large int> & mask) >= 16
1040 Y = (<large int> & ~mask) + mask + 1 Round up.
1041 else
1042 Y = (<large int> & ~mask) Round down.
1043 Z = X + Y
1044 memory (Z + (<large int> - Y));
1046 This is for CSE to find several similar references, and only use one Z.
1048 X can either be a SYMBOL_REF or REG, but because combine cannot
1049 perform a 4->2 combination we do nothing for SYMBOL_REF + D where
1050 D will not fit in 14 bits.
1052 MODE_FLOAT references allow displacements which fit in 5 bits, so use
1053 0x1f as the mask.
1055 MODE_INT references allow displacements which fit in 14 bits, so use
1056 0x3fff as the mask.
1058 This relies on the fact that most mode MODE_FLOAT references will use FP
1059 registers and most mode MODE_INT references will use integer registers.
1060 (In the rare case of an FP register used in an integer MODE, we depend
1061 on secondary reloads to clean things up.)
1064 It is also beneficial to handle (plus (mult (X) (Y)) (Z)) in a special
1065 manner if Y is 2, 4, or 8. (allows more shadd insns and shifted indexed
1066 addressing modes to be used).
1068 Note that the addresses passed into hppa_legitimize_address always
1069 come from a MEM, so we only have to match the MULT form on incoming
1070 addresses. But to be future proof we also match the ASHIFT form.
1072 However, this routine always places those shift-add sequences into
1073 registers, so we have to generate the ASHIFT form as our output.
1075 Put X and Z into registers. Then put the entire expression into
1076 a register. */
1079 hppa_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED,
1080 machine_mode mode)
1082 rtx orig = x;
1084 /* We need to canonicalize the order of operands in unscaled indexed
1085 addresses since the code that checks if an address is valid doesn't
1086 always try both orders. */
1087 if (!TARGET_NO_SPACE_REGS
1088 && GET_CODE (x) == PLUS
1089 && GET_MODE (x) == Pmode
1090 && REG_P (XEXP (x, 0))
1091 && REG_P (XEXP (x, 1))
1092 && REG_POINTER (XEXP (x, 0))
1093 && !REG_POINTER (XEXP (x, 1)))
1094 return gen_rtx_PLUS (Pmode, XEXP (x, 1), XEXP (x, 0));
1096 if (tls_referenced_p (x))
1097 return legitimize_tls_address (x);
1098 else if (flag_pic)
1099 return legitimize_pic_address (x, mode, gen_reg_rtx (Pmode));
1101 /* Strip off CONST. */
1102 if (GET_CODE (x) == CONST)
1103 x = XEXP (x, 0);
1105 /* Special case. Get the SYMBOL_REF into a register and use indexing.
1106 That should always be safe. */
1107 if (GET_CODE (x) == PLUS
1108 && GET_CODE (XEXP (x, 0)) == REG
1109 && GET_CODE (XEXP (x, 1)) == SYMBOL_REF)
1111 rtx reg = force_reg (Pmode, XEXP (x, 1));
1112 return force_reg (Pmode, gen_rtx_PLUS (Pmode, reg, XEXP (x, 0)));
1115 /* Note we must reject symbols which represent function addresses
1116 since the assembler/linker can't handle arithmetic on plabels. */
1117 if (GET_CODE (x) == PLUS
1118 && GET_CODE (XEXP (x, 1)) == CONST_INT
1119 && ((GET_CODE (XEXP (x, 0)) == SYMBOL_REF
1120 && !FUNCTION_NAME_P (XSTR (XEXP (x, 0), 0)))
1121 || GET_CODE (XEXP (x, 0)) == REG))
1123 rtx int_part, ptr_reg;
1124 int newoffset;
1125 int offset = INTVAL (XEXP (x, 1));
1126 int mask;
1128 mask = (GET_MODE_CLASS (mode) == MODE_FLOAT
1129 && !INT14_OK_STRICT ? 0x1f : 0x3fff);
1131 /* Choose which way to round the offset. Round up if we
1132 are >= halfway to the next boundary. */
1133 if ((offset & mask) >= ((mask + 1) / 2))
1134 newoffset = (offset & ~ mask) + mask + 1;
1135 else
1136 newoffset = (offset & ~ mask);
1138 /* If the newoffset will not fit in 14 bits (ldo), then
1139 handling this would take 4 or 5 instructions (2 to load
1140 the SYMBOL_REF + 1 or 2 to load the newoffset + 1 to
1141 add the new offset and the SYMBOL_REF.) Combine can
1142 not handle 4->2 or 5->2 combinations, so do not create
1143 them. */
1144 if (! VAL_14_BITS_P (newoffset)
1145 && GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
1147 rtx const_part = plus_constant (Pmode, XEXP (x, 0), newoffset);
1148 rtx tmp_reg
1149 = force_reg (Pmode,
1150 gen_rtx_HIGH (Pmode, const_part));
1151 ptr_reg
1152 = force_reg (Pmode,
1153 gen_rtx_LO_SUM (Pmode,
1154 tmp_reg, const_part));
1156 else
1158 if (! VAL_14_BITS_P (newoffset))
1159 int_part = force_reg (Pmode, GEN_INT (newoffset));
1160 else
1161 int_part = GEN_INT (newoffset);
1163 ptr_reg = force_reg (Pmode,
1164 gen_rtx_PLUS (Pmode,
1165 force_reg (Pmode, XEXP (x, 0)),
1166 int_part));
1168 return plus_constant (Pmode, ptr_reg, offset - newoffset);
1171 /* Handle (plus (mult (a) (mem_shadd_constant)) (b)). */
1173 if (GET_CODE (x) == PLUS
1174 && mem_shadd_or_shadd_rtx_p (XEXP (x, 0))
1175 && (OBJECT_P (XEXP (x, 1))
1176 || GET_CODE (XEXP (x, 1)) == SUBREG)
1177 && GET_CODE (XEXP (x, 1)) != CONST)
1179 /* If we were given a MULT, we must fix the constant
1180 as we're going to create the ASHIFT form. */
1181 int shift_val = INTVAL (XEXP (XEXP (x, 0), 1));
1182 if (GET_CODE (XEXP (x, 0)) == MULT)
1183 shift_val = exact_log2 (shift_val);
1185 rtx reg1, reg2;
1186 reg1 = XEXP (x, 1);
1187 if (GET_CODE (reg1) != REG)
1188 reg1 = force_reg (Pmode, force_operand (reg1, 0));
1190 reg2 = XEXP (XEXP (x, 0), 0);
1191 if (GET_CODE (reg2) != REG)
1192 reg2 = force_reg (Pmode, force_operand (reg2, 0));
1194 return force_reg (Pmode,
1195 gen_rtx_PLUS (Pmode,
1196 gen_rtx_ASHIFT (Pmode, reg2,
1197 GEN_INT (shift_val)),
1198 reg1));
1201 /* Similarly for (plus (plus (mult (a) (mem_shadd_constant)) (b)) (c)).
1203 Only do so for floating point modes since this is more speculative
1204 and we lose if it's an integer store. */
1205 if (GET_CODE (x) == PLUS
1206 && GET_CODE (XEXP (x, 0)) == PLUS
1207 && mem_shadd_or_shadd_rtx_p (XEXP (XEXP (x, 0), 0))
1208 && (mode == SFmode || mode == DFmode))
1210 int shift_val = INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1));
1212 /* If we were given a MULT, we must fix the constant
1213 as we're going to create the ASHIFT form. */
1214 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT)
1215 shift_val = exact_log2 (shift_val);
1217 /* Try and figure out what to use as a base register. */
1218 rtx reg1, reg2, base, idx;
1220 reg1 = XEXP (XEXP (x, 0), 1);
1221 reg2 = XEXP (x, 1);
1222 base = NULL_RTX;
1223 idx = NULL_RTX;
1225 /* Make sure they're both regs. If one was a SYMBOL_REF [+ const],
1226 then pa_emit_move_sequence will turn on REG_POINTER so we'll know
1227 it's a base register below. */
1228 if (GET_CODE (reg1) != REG)
1229 reg1 = force_reg (Pmode, force_operand (reg1, 0));
1231 if (GET_CODE (reg2) != REG)
1232 reg2 = force_reg (Pmode, force_operand (reg2, 0));
1234 /* Figure out what the base and index are. */
1236 if (GET_CODE (reg1) == REG
1237 && REG_POINTER (reg1))
1239 base = reg1;
1240 idx = gen_rtx_PLUS (Pmode,
1241 gen_rtx_ASHIFT (Pmode,
1242 XEXP (XEXP (XEXP (x, 0), 0), 0),
1243 GEN_INT (shift_val)),
1244 XEXP (x, 1));
1246 else if (GET_CODE (reg2) == REG
1247 && REG_POINTER (reg2))
1249 base = reg2;
1250 idx = XEXP (x, 0);
1253 if (base == 0)
1254 return orig;
1256 /* If the index adds a large constant, try to scale the
1257 constant so that it can be loaded with only one insn. */
1258 if (GET_CODE (XEXP (idx, 1)) == CONST_INT
1259 && VAL_14_BITS_P (INTVAL (XEXP (idx, 1))
1260 / INTVAL (XEXP (XEXP (idx, 0), 1)))
1261 && INTVAL (XEXP (idx, 1)) % INTVAL (XEXP (XEXP (idx, 0), 1)) == 0)
1263 /* Divide the CONST_INT by the scale factor, then add it to A. */
1264 int val = INTVAL (XEXP (idx, 1));
1265 val /= (1 << shift_val);
1267 reg1 = XEXP (XEXP (idx, 0), 0);
1268 if (GET_CODE (reg1) != REG)
1269 reg1 = force_reg (Pmode, force_operand (reg1, 0));
1271 reg1 = force_reg (Pmode, gen_rtx_PLUS (Pmode, reg1, GEN_INT (val)));
1273 /* We can now generate a simple scaled indexed address. */
1274 return
1275 force_reg
1276 (Pmode, gen_rtx_PLUS (Pmode,
1277 gen_rtx_ASHIFT (Pmode, reg1,
1278 GEN_INT (shift_val)),
1279 base));
1282 /* If B + C is still a valid base register, then add them. */
1283 if (GET_CODE (XEXP (idx, 1)) == CONST_INT
1284 && INTVAL (XEXP (idx, 1)) <= 4096
1285 && INTVAL (XEXP (idx, 1)) >= -4096)
1287 rtx reg1, reg2;
1289 reg1 = force_reg (Pmode, gen_rtx_PLUS (Pmode, base, XEXP (idx, 1)));
1291 reg2 = XEXP (XEXP (idx, 0), 0);
1292 if (GET_CODE (reg2) != CONST_INT)
1293 reg2 = force_reg (Pmode, force_operand (reg2, 0));
1295 return force_reg (Pmode,
1296 gen_rtx_PLUS (Pmode,
1297 gen_rtx_ASHIFT (Pmode, reg2,
1298 GEN_INT (shift_val)),
1299 reg1));
1302 /* Get the index into a register, then add the base + index and
1303 return a register holding the result. */
1305 /* First get A into a register. */
1306 reg1 = XEXP (XEXP (idx, 0), 0);
1307 if (GET_CODE (reg1) != REG)
1308 reg1 = force_reg (Pmode, force_operand (reg1, 0));
1310 /* And get B into a register. */
1311 reg2 = XEXP (idx, 1);
1312 if (GET_CODE (reg2) != REG)
1313 reg2 = force_reg (Pmode, force_operand (reg2, 0));
1315 reg1 = force_reg (Pmode,
1316 gen_rtx_PLUS (Pmode,
1317 gen_rtx_ASHIFT (Pmode, reg1,
1318 GEN_INT (shift_val)),
1319 reg2));
1321 /* Add the result to our base register and return. */
1322 return force_reg (Pmode, gen_rtx_PLUS (Pmode, base, reg1));
1326 /* Uh-oh. We might have an address for x[n-100000]. This needs
1327 special handling to avoid creating an indexed memory address
1328 with x-100000 as the base.
1330 If the constant part is small enough, then it's still safe because
1331 there is a guard page at the beginning and end of the data segment.
1333 Scaled references are common enough that we want to try and rearrange the
1334 terms so that we can use indexing for these addresses too. Only
1335 do the optimization for floatint point modes. */
1337 if (GET_CODE (x) == PLUS
1338 && pa_symbolic_expression_p (XEXP (x, 1)))
1340 /* Ugly. We modify things here so that the address offset specified
1341 by the index expression is computed first, then added to x to form
1342 the entire address. */
1344 rtx regx1, regx2, regy1, regy2, y;
1346 /* Strip off any CONST. */
1347 y = XEXP (x, 1);
1348 if (GET_CODE (y) == CONST)
1349 y = XEXP (y, 0);
1351 if (GET_CODE (y) == PLUS || GET_CODE (y) == MINUS)
1353 /* See if this looks like
1354 (plus (mult (reg) (mem_shadd_const))
1355 (const (plus (symbol_ref) (const_int))))
1357 Where const_int is small. In that case the const
1358 expression is a valid pointer for indexing.
1360 If const_int is big, but can be divided evenly by shadd_const
1361 and added to (reg). This allows more scaled indexed addresses. */
1362 if (GET_CODE (XEXP (y, 0)) == SYMBOL_REF
1363 && mem_shadd_or_shadd_rtx_p (XEXP (x, 0))
1364 && GET_CODE (XEXP (y, 1)) == CONST_INT
1365 && INTVAL (XEXP (y, 1)) >= -4096
1366 && INTVAL (XEXP (y, 1)) <= 4095)
1368 int shift_val = INTVAL (XEXP (XEXP (x, 0), 1));
1370 /* If we were given a MULT, we must fix the constant
1371 as we're going to create the ASHIFT form. */
1372 if (GET_CODE (XEXP (x, 0)) == MULT)
1373 shift_val = exact_log2 (shift_val);
1375 rtx reg1, reg2;
1377 reg1 = XEXP (x, 1);
1378 if (GET_CODE (reg1) != REG)
1379 reg1 = force_reg (Pmode, force_operand (reg1, 0));
1381 reg2 = XEXP (XEXP (x, 0), 0);
1382 if (GET_CODE (reg2) != REG)
1383 reg2 = force_reg (Pmode, force_operand (reg2, 0));
1385 return
1386 force_reg (Pmode,
1387 gen_rtx_PLUS (Pmode,
1388 gen_rtx_ASHIFT (Pmode,
1389 reg2,
1390 GEN_INT (shift_val)),
1391 reg1));
1393 else if ((mode == DFmode || mode == SFmode)
1394 && GET_CODE (XEXP (y, 0)) == SYMBOL_REF
1395 && mem_shadd_or_shadd_rtx_p (XEXP (x, 0))
1396 && GET_CODE (XEXP (y, 1)) == CONST_INT
1397 && INTVAL (XEXP (y, 1)) % (1 << INTVAL (XEXP (XEXP (x, 0), 1))) == 0)
1399 int shift_val = INTVAL (XEXP (XEXP (x, 0), 1));
1401 /* If we were given a MULT, we must fix the constant
1402 as we're going to create the ASHIFT form. */
1403 if (GET_CODE (XEXP (x, 0)) == MULT)
1404 shift_val = exact_log2 (shift_val);
1406 regx1
1407 = force_reg (Pmode, GEN_INT (INTVAL (XEXP (y, 1))
1408 / INTVAL (XEXP (XEXP (x, 0), 1))));
1409 regx2 = XEXP (XEXP (x, 0), 0);
1410 if (GET_CODE (regx2) != REG)
1411 regx2 = force_reg (Pmode, force_operand (regx2, 0));
1412 regx2 = force_reg (Pmode, gen_rtx_fmt_ee (GET_CODE (y), Pmode,
1413 regx2, regx1));
1414 return
1415 force_reg (Pmode,
1416 gen_rtx_PLUS (Pmode,
1417 gen_rtx_ASHIFT (Pmode, regx2,
1418 GEN_INT (shift_val)),
1419 force_reg (Pmode, XEXP (y, 0))));
1421 else if (GET_CODE (XEXP (y, 1)) == CONST_INT
1422 && INTVAL (XEXP (y, 1)) >= -4096
1423 && INTVAL (XEXP (y, 1)) <= 4095)
1425 /* This is safe because of the guard page at the
1426 beginning and end of the data space. Just
1427 return the original address. */
1428 return orig;
1430 else
1432 /* Doesn't look like one we can optimize. */
1433 regx1 = force_reg (Pmode, force_operand (XEXP (x, 0), 0));
1434 regy1 = force_reg (Pmode, force_operand (XEXP (y, 0), 0));
1435 regy2 = force_reg (Pmode, force_operand (XEXP (y, 1), 0));
1436 regx1 = force_reg (Pmode,
1437 gen_rtx_fmt_ee (GET_CODE (y), Pmode,
1438 regx1, regy2));
1439 return force_reg (Pmode, gen_rtx_PLUS (Pmode, regx1, regy1));
1444 return orig;
1447 /* Implement the TARGET_REGISTER_MOVE_COST hook.
1449 Compute extra cost of moving data between one register class
1450 and another.
1452 Make moves from SAR so expensive they should never happen. We used to
1453 have 0xffff here, but that generates overflow in rare cases.
1455 Copies involving a FP register and a non-FP register are relatively
1456 expensive because they must go through memory.
1458 Other copies are reasonably cheap. */
1460 static int
1461 hppa_register_move_cost (machine_mode mode ATTRIBUTE_UNUSED,
1462 reg_class_t from, reg_class_t to)
1464 if (from == SHIFT_REGS)
1465 return 0x100;
1466 else if (to == SHIFT_REGS && FP_REG_CLASS_P (from))
1467 return 18;
1468 else if ((FP_REG_CLASS_P (from) && ! FP_REG_CLASS_P (to))
1469 || (FP_REG_CLASS_P (to) && ! FP_REG_CLASS_P (from)))
1470 return 16;
1471 else
1472 return 2;
1475 /* For the HPPA, REG and REG+CONST is cost 0
1476 and addresses involving symbolic constants are cost 2.
1478 PIC addresses are very expensive.
1480 It is no coincidence that this has the same structure
1481 as pa_legitimate_address_p. */
1483 static int
1484 hppa_address_cost (rtx X, machine_mode mode ATTRIBUTE_UNUSED,
1485 addr_space_t as ATTRIBUTE_UNUSED,
1486 bool speed ATTRIBUTE_UNUSED)
1488 switch (GET_CODE (X))
1490 case REG:
1491 case PLUS:
1492 case LO_SUM:
1493 return 1;
1494 case HIGH:
1495 return 2;
1496 default:
1497 return 4;
1501 /* Compute a (partial) cost for rtx X. Return true if the complete
1502 cost has been computed, and false if subexpressions should be
1503 scanned. In either case, *TOTAL contains the cost result. */
1505 static bool
1506 hppa_rtx_costs (rtx x, machine_mode mode, int outer_code,
1507 int opno ATTRIBUTE_UNUSED,
1508 int *total, bool speed ATTRIBUTE_UNUSED)
1510 int factor;
1511 int code = GET_CODE (x);
1513 switch (code)
1515 case CONST_INT:
1516 if (INTVAL (x) == 0)
1517 *total = 0;
1518 else if (INT_14_BITS (x))
1519 *total = 1;
1520 else
1521 *total = 2;
1522 return true;
1524 case HIGH:
1525 *total = 2;
1526 return true;
1528 case CONST:
1529 case LABEL_REF:
1530 case SYMBOL_REF:
1531 *total = 4;
1532 return true;
1534 case CONST_DOUBLE:
1535 if ((x == CONST0_RTX (DFmode) || x == CONST0_RTX (SFmode))
1536 && outer_code != SET)
1537 *total = 0;
1538 else
1539 *total = 8;
1540 return true;
1542 case MULT:
1543 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1545 *total = COSTS_N_INSNS (3);
1546 return true;
1549 /* A mode size N times larger than SImode needs O(N*N) more insns. */
1550 factor = GET_MODE_SIZE (mode) / 4;
1551 if (factor == 0)
1552 factor = 1;
1554 if (TARGET_PA_11 && !TARGET_DISABLE_FPREGS && !TARGET_SOFT_FLOAT)
1555 *total = factor * factor * COSTS_N_INSNS (8);
1556 else
1557 *total = factor * factor * COSTS_N_INSNS (20);
1558 return true;
1560 case DIV:
1561 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1563 *total = COSTS_N_INSNS (14);
1564 return true;
1566 /* FALLTHRU */
1568 case UDIV:
1569 case MOD:
1570 case UMOD:
1571 /* A mode size N times larger than SImode needs O(N*N) more insns. */
1572 factor = GET_MODE_SIZE (mode) / 4;
1573 if (factor == 0)
1574 factor = 1;
1576 *total = factor * factor * COSTS_N_INSNS (60);
1577 return true;
1579 case PLUS: /* this includes shNadd insns */
1580 case MINUS:
1581 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
1583 *total = COSTS_N_INSNS (3);
1584 return true;
1587 /* A size N times larger than UNITS_PER_WORD needs N times as
1588 many insns, taking N times as long. */
1589 factor = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1590 if (factor == 0)
1591 factor = 1;
1592 *total = factor * COSTS_N_INSNS (1);
1593 return true;
1595 case ASHIFT:
1596 case ASHIFTRT:
1597 case LSHIFTRT:
1598 *total = COSTS_N_INSNS (1);
1599 return true;
1601 default:
1602 return false;
1606 /* Ensure mode of ORIG, a REG rtx, is MODE. Returns either ORIG or a
1607 new rtx with the correct mode. */
1608 static inline rtx
1609 force_mode (machine_mode mode, rtx orig)
1611 if (mode == GET_MODE (orig))
1612 return orig;
1614 gcc_assert (REGNO (orig) < FIRST_PSEUDO_REGISTER);
1616 return gen_rtx_REG (mode, REGNO (orig));
1619 /* Implement TARGET_CANNOT_FORCE_CONST_MEM. */
1621 static bool
1622 pa_cannot_force_const_mem (machine_mode mode ATTRIBUTE_UNUSED, rtx x)
1624 return tls_referenced_p (x);
1627 /* Emit insns to move operands[1] into operands[0].
1629 Return 1 if we have written out everything that needs to be done to
1630 do the move. Otherwise, return 0 and the caller will emit the move
1631 normally.
1633 Note SCRATCH_REG may not be in the proper mode depending on how it
1634 will be used. This routine is responsible for creating a new copy
1635 of SCRATCH_REG in the proper mode. */
1638 pa_emit_move_sequence (rtx *operands, machine_mode mode, rtx scratch_reg)
1640 register rtx operand0 = operands[0];
1641 register rtx operand1 = operands[1];
1642 register rtx tem;
1644 /* We can only handle indexed addresses in the destination operand
1645 of floating point stores. Thus, we need to break out indexed
1646 addresses from the destination operand. */
1647 if (GET_CODE (operand0) == MEM && IS_INDEX_ADDR_P (XEXP (operand0, 0)))
1649 gcc_assert (can_create_pseudo_p ());
1651 tem = copy_to_mode_reg (Pmode, XEXP (operand0, 0));
1652 operand0 = replace_equiv_address (operand0, tem);
1655 /* On targets with non-equivalent space registers, break out unscaled
1656 indexed addresses from the source operand before the final CSE.
1657 We have to do this because the REG_POINTER flag is not correctly
1658 carried through various optimization passes and CSE may substitute
1659 a pseudo without the pointer set for one with the pointer set. As
1660 a result, we loose various opportunities to create insns with
1661 unscaled indexed addresses. */
1662 if (!TARGET_NO_SPACE_REGS
1663 && !cse_not_expected
1664 && GET_CODE (operand1) == MEM
1665 && GET_CODE (XEXP (operand1, 0)) == PLUS
1666 && REG_P (XEXP (XEXP (operand1, 0), 0))
1667 && REG_P (XEXP (XEXP (operand1, 0), 1)))
1668 operand1
1669 = replace_equiv_address (operand1,
1670 copy_to_mode_reg (Pmode, XEXP (operand1, 0)));
1672 if (scratch_reg
1673 && reload_in_progress && GET_CODE (operand0) == REG
1674 && REGNO (operand0) >= FIRST_PSEUDO_REGISTER)
1675 operand0 = reg_equiv_mem (REGNO (operand0));
1676 else if (scratch_reg
1677 && reload_in_progress && GET_CODE (operand0) == SUBREG
1678 && GET_CODE (SUBREG_REG (operand0)) == REG
1679 && REGNO (SUBREG_REG (operand0)) >= FIRST_PSEUDO_REGISTER)
1681 /* We must not alter SUBREG_BYTE (operand0) since that would confuse
1682 the code which tracks sets/uses for delete_output_reload. */
1683 rtx temp = gen_rtx_SUBREG (GET_MODE (operand0),
1684 reg_equiv_mem (REGNO (SUBREG_REG (operand0))),
1685 SUBREG_BYTE (operand0));
1686 operand0 = alter_subreg (&temp, true);
1689 if (scratch_reg
1690 && reload_in_progress && GET_CODE (operand1) == REG
1691 && REGNO (operand1) >= FIRST_PSEUDO_REGISTER)
1692 operand1 = reg_equiv_mem (REGNO (operand1));
1693 else if (scratch_reg
1694 && reload_in_progress && GET_CODE (operand1) == SUBREG
1695 && GET_CODE (SUBREG_REG (operand1)) == REG
1696 && REGNO (SUBREG_REG (operand1)) >= FIRST_PSEUDO_REGISTER)
1698 /* We must not alter SUBREG_BYTE (operand0) since that would confuse
1699 the code which tracks sets/uses for delete_output_reload. */
1700 rtx temp = gen_rtx_SUBREG (GET_MODE (operand1),
1701 reg_equiv_mem (REGNO (SUBREG_REG (operand1))),
1702 SUBREG_BYTE (operand1));
1703 operand1 = alter_subreg (&temp, true);
1706 if (scratch_reg && reload_in_progress && GET_CODE (operand0) == MEM
1707 && ((tem = find_replacement (&XEXP (operand0, 0)))
1708 != XEXP (operand0, 0)))
1709 operand0 = replace_equiv_address (operand0, tem);
1711 if (scratch_reg && reload_in_progress && GET_CODE (operand1) == MEM
1712 && ((tem = find_replacement (&XEXP (operand1, 0)))
1713 != XEXP (operand1, 0)))
1714 operand1 = replace_equiv_address (operand1, tem);
1716 /* Handle secondary reloads for loads/stores of FP registers from
1717 REG+D addresses where D does not fit in 5 or 14 bits, including
1718 (subreg (mem (addr))) cases, and reloads for other unsupported
1719 memory operands. */
1720 if (scratch_reg
1721 && FP_REG_P (operand0)
1722 && (MEM_P (operand1)
1723 || (GET_CODE (operand1) == SUBREG
1724 && MEM_P (XEXP (operand1, 0)))))
1726 rtx op1 = operand1;
1728 if (GET_CODE (op1) == SUBREG)
1729 op1 = XEXP (op1, 0);
1731 if (reg_plus_base_memory_operand (op1, GET_MODE (op1)))
1733 if (!(TARGET_PA_20
1734 && !TARGET_ELF32
1735 && INT_14_BITS (XEXP (XEXP (op1, 0), 1)))
1736 && !INT_5_BITS (XEXP (XEXP (op1, 0), 1)))
1738 /* SCRATCH_REG will hold an address and maybe the actual data.
1739 We want it in WORD_MODE regardless of what mode it was
1740 originally given to us. */
1741 scratch_reg = force_mode (word_mode, scratch_reg);
1743 /* D might not fit in 14 bits either; for such cases load D
1744 into scratch reg. */
1745 if (!INT_14_BITS (XEXP (XEXP (op1, 0), 1)))
1747 emit_move_insn (scratch_reg, XEXP (XEXP (op1, 0), 1));
1748 emit_move_insn (scratch_reg,
1749 gen_rtx_fmt_ee (GET_CODE (XEXP (op1, 0)),
1750 Pmode,
1751 XEXP (XEXP (op1, 0), 0),
1752 scratch_reg));
1754 else
1755 emit_move_insn (scratch_reg, XEXP (op1, 0));
1756 op1 = replace_equiv_address (op1, scratch_reg);
1759 else if ((!INT14_OK_STRICT && symbolic_memory_operand (op1, VOIDmode))
1760 || IS_LO_SUM_DLT_ADDR_P (XEXP (op1, 0))
1761 || IS_INDEX_ADDR_P (XEXP (op1, 0)))
1763 /* Load memory address into SCRATCH_REG. */
1764 scratch_reg = force_mode (word_mode, scratch_reg);
1765 emit_move_insn (scratch_reg, XEXP (op1, 0));
1766 op1 = replace_equiv_address (op1, scratch_reg);
1768 emit_insn (gen_rtx_SET (operand0, op1));
1769 return 1;
1771 else if (scratch_reg
1772 && FP_REG_P (operand1)
1773 && (MEM_P (operand0)
1774 || (GET_CODE (operand0) == SUBREG
1775 && MEM_P (XEXP (operand0, 0)))))
1777 rtx op0 = operand0;
1779 if (GET_CODE (op0) == SUBREG)
1780 op0 = XEXP (op0, 0);
1782 if (reg_plus_base_memory_operand (op0, GET_MODE (op0)))
1784 if (!(TARGET_PA_20
1785 && !TARGET_ELF32
1786 && INT_14_BITS (XEXP (XEXP (op0, 0), 1)))
1787 && !INT_5_BITS (XEXP (XEXP (op0, 0), 1)))
1789 /* SCRATCH_REG will hold an address and maybe the actual data.
1790 We want it in WORD_MODE regardless of what mode it was
1791 originally given to us. */
1792 scratch_reg = force_mode (word_mode, scratch_reg);
1794 /* D might not fit in 14 bits either; for such cases load D
1795 into scratch reg. */
1796 if (!INT_14_BITS (XEXP (XEXP (op0, 0), 1)))
1798 emit_move_insn (scratch_reg, XEXP (XEXP (op0, 0), 1));
1799 emit_move_insn (scratch_reg,
1800 gen_rtx_fmt_ee (GET_CODE (XEXP (op0, 0)),
1801 Pmode,
1802 XEXP (XEXP (op0, 0), 0),
1803 scratch_reg));
1805 else
1806 emit_move_insn (scratch_reg, XEXP (op0, 0));
1807 op0 = replace_equiv_address (op0, scratch_reg);
1810 else if ((!INT14_OK_STRICT && symbolic_memory_operand (op0, VOIDmode))
1811 || IS_LO_SUM_DLT_ADDR_P (XEXP (op0, 0))
1812 || IS_INDEX_ADDR_P (XEXP (op0, 0)))
1814 /* Load memory address into SCRATCH_REG. */
1815 scratch_reg = force_mode (word_mode, scratch_reg);
1816 emit_move_insn (scratch_reg, XEXP (op0, 0));
1817 op0 = replace_equiv_address (op0, scratch_reg);
1819 emit_insn (gen_rtx_SET (op0, operand1));
1820 return 1;
1822 /* Handle secondary reloads for loads of FP registers from constant
1823 expressions by forcing the constant into memory. For the most part,
1824 this is only necessary for SImode and DImode.
1826 Use scratch_reg to hold the address of the memory location. */
1827 else if (scratch_reg
1828 && CONSTANT_P (operand1)
1829 && FP_REG_P (operand0))
1831 rtx const_mem, xoperands[2];
1833 if (operand1 == CONST0_RTX (mode))
1835 emit_insn (gen_rtx_SET (operand0, operand1));
1836 return 1;
1839 /* SCRATCH_REG will hold an address and maybe the actual data. We want
1840 it in WORD_MODE regardless of what mode it was originally given
1841 to us. */
1842 scratch_reg = force_mode (word_mode, scratch_reg);
1844 /* Force the constant into memory and put the address of the
1845 memory location into scratch_reg. */
1846 const_mem = force_const_mem (mode, operand1);
1847 xoperands[0] = scratch_reg;
1848 xoperands[1] = XEXP (const_mem, 0);
1849 pa_emit_move_sequence (xoperands, Pmode, 0);
1851 /* Now load the destination register. */
1852 emit_insn (gen_rtx_SET (operand0,
1853 replace_equiv_address (const_mem, scratch_reg)));
1854 return 1;
1856 /* Handle secondary reloads for SAR. These occur when trying to load
1857 the SAR from memory or a constant. */
1858 else if (scratch_reg
1859 && GET_CODE (operand0) == REG
1860 && REGNO (operand0) < FIRST_PSEUDO_REGISTER
1861 && REGNO_REG_CLASS (REGNO (operand0)) == SHIFT_REGS
1862 && (GET_CODE (operand1) == MEM || GET_CODE (operand1) == CONST_INT))
1864 /* D might not fit in 14 bits either; for such cases load D into
1865 scratch reg. */
1866 if (GET_CODE (operand1) == MEM
1867 && !memory_address_p (GET_MODE (operand0), XEXP (operand1, 0)))
1869 /* We are reloading the address into the scratch register, so we
1870 want to make sure the scratch register is a full register. */
1871 scratch_reg = force_mode (word_mode, scratch_reg);
1873 emit_move_insn (scratch_reg, XEXP (XEXP (operand1, 0), 1));
1874 emit_move_insn (scratch_reg, gen_rtx_fmt_ee (GET_CODE (XEXP (operand1,
1875 0)),
1876 Pmode,
1877 XEXP (XEXP (operand1, 0),
1879 scratch_reg));
1881 /* Now we are going to load the scratch register from memory,
1882 we want to load it in the same width as the original MEM,
1883 which must be the same as the width of the ultimate destination,
1884 OPERAND0. */
1885 scratch_reg = force_mode (GET_MODE (operand0), scratch_reg);
1887 emit_move_insn (scratch_reg,
1888 replace_equiv_address (operand1, scratch_reg));
1890 else
1892 /* We want to load the scratch register using the same mode as
1893 the ultimate destination. */
1894 scratch_reg = force_mode (GET_MODE (operand0), scratch_reg);
1896 emit_move_insn (scratch_reg, operand1);
1899 /* And emit the insn to set the ultimate destination. We know that
1900 the scratch register has the same mode as the destination at this
1901 point. */
1902 emit_move_insn (operand0, scratch_reg);
1903 return 1;
1906 /* Handle the most common case: storing into a register. */
1907 if (register_operand (operand0, mode))
1909 /* Legitimize TLS symbol references. This happens for references
1910 that aren't a legitimate constant. */
1911 if (PA_SYMBOL_REF_TLS_P (operand1))
1912 operand1 = legitimize_tls_address (operand1);
1914 if (register_operand (operand1, mode)
1915 || (GET_CODE (operand1) == CONST_INT
1916 && pa_cint_ok_for_move (UINTVAL (operand1)))
1917 || (operand1 == CONST0_RTX (mode))
1918 || (GET_CODE (operand1) == HIGH
1919 && !symbolic_operand (XEXP (operand1, 0), VOIDmode))
1920 /* Only `general_operands' can come here, so MEM is ok. */
1921 || GET_CODE (operand1) == MEM)
1923 /* Various sets are created during RTL generation which don't
1924 have the REG_POINTER flag correctly set. After the CSE pass,
1925 instruction recognition can fail if we don't consistently
1926 set this flag when performing register copies. This should
1927 also improve the opportunities for creating insns that use
1928 unscaled indexing. */
1929 if (REG_P (operand0) && REG_P (operand1))
1931 if (REG_POINTER (operand1)
1932 && !REG_POINTER (operand0)
1933 && !HARD_REGISTER_P (operand0))
1934 copy_reg_pointer (operand0, operand1);
1937 /* When MEMs are broken out, the REG_POINTER flag doesn't
1938 get set. In some cases, we can set the REG_POINTER flag
1939 from the declaration for the MEM. */
1940 if (REG_P (operand0)
1941 && GET_CODE (operand1) == MEM
1942 && !REG_POINTER (operand0))
1944 tree decl = MEM_EXPR (operand1);
1946 /* Set the register pointer flag and register alignment
1947 if the declaration for this memory reference is a
1948 pointer type. */
1949 if (decl)
1951 tree type;
1953 /* If this is a COMPONENT_REF, use the FIELD_DECL from
1954 tree operand 1. */
1955 if (TREE_CODE (decl) == COMPONENT_REF)
1956 decl = TREE_OPERAND (decl, 1);
1958 type = TREE_TYPE (decl);
1959 type = strip_array_types (type);
1961 if (POINTER_TYPE_P (type))
1962 mark_reg_pointer (operand0, BITS_PER_UNIT);
1966 emit_insn (gen_rtx_SET (operand0, operand1));
1967 return 1;
1970 else if (GET_CODE (operand0) == MEM)
1972 if (mode == DFmode && operand1 == CONST0_RTX (mode)
1973 && !(reload_in_progress || reload_completed))
1975 rtx temp = gen_reg_rtx (DFmode);
1977 emit_insn (gen_rtx_SET (temp, operand1));
1978 emit_insn (gen_rtx_SET (operand0, temp));
1979 return 1;
1981 if (register_operand (operand1, mode) || operand1 == CONST0_RTX (mode))
1983 /* Run this case quickly. */
1984 emit_insn (gen_rtx_SET (operand0, operand1));
1985 return 1;
1987 if (! (reload_in_progress || reload_completed))
1989 operands[0] = validize_mem (operand0);
1990 operands[1] = operand1 = force_reg (mode, operand1);
1994 /* Simplify the source if we need to.
1995 Note we do have to handle function labels here, even though we do
1996 not consider them legitimate constants. Loop optimizations can
1997 call the emit_move_xxx with one as a source. */
1998 if ((GET_CODE (operand1) != HIGH && immediate_operand (operand1, mode))
1999 || (GET_CODE (operand1) == HIGH
2000 && symbolic_operand (XEXP (operand1, 0), mode))
2001 || function_label_operand (operand1, VOIDmode)
2002 || tls_referenced_p (operand1))
2004 int ishighonly = 0;
2006 if (GET_CODE (operand1) == HIGH)
2008 ishighonly = 1;
2009 operand1 = XEXP (operand1, 0);
2011 if (symbolic_operand (operand1, mode))
2013 /* Argh. The assembler and linker can't handle arithmetic
2014 involving plabels.
2016 So we force the plabel into memory, load operand0 from
2017 the memory location, then add in the constant part. */
2018 if ((GET_CODE (operand1) == CONST
2019 && GET_CODE (XEXP (operand1, 0)) == PLUS
2020 && function_label_operand (XEXP (XEXP (operand1, 0), 0),
2021 VOIDmode))
2022 || function_label_operand (operand1, VOIDmode))
2024 rtx temp, const_part;
2026 /* Figure out what (if any) scratch register to use. */
2027 if (reload_in_progress || reload_completed)
2029 scratch_reg = scratch_reg ? scratch_reg : operand0;
2030 /* SCRATCH_REG will hold an address and maybe the actual
2031 data. We want it in WORD_MODE regardless of what mode it
2032 was originally given to us. */
2033 scratch_reg = force_mode (word_mode, scratch_reg);
2035 else if (flag_pic)
2036 scratch_reg = gen_reg_rtx (Pmode);
2038 if (GET_CODE (operand1) == CONST)
2040 /* Save away the constant part of the expression. */
2041 const_part = XEXP (XEXP (operand1, 0), 1);
2042 gcc_assert (GET_CODE (const_part) == CONST_INT);
2044 /* Force the function label into memory. */
2045 temp = force_const_mem (mode, XEXP (XEXP (operand1, 0), 0));
2047 else
2049 /* No constant part. */
2050 const_part = NULL_RTX;
2052 /* Force the function label into memory. */
2053 temp = force_const_mem (mode, operand1);
2057 /* Get the address of the memory location. PIC-ify it if
2058 necessary. */
2059 temp = XEXP (temp, 0);
2060 if (flag_pic)
2061 temp = legitimize_pic_address (temp, mode, scratch_reg);
2063 /* Put the address of the memory location into our destination
2064 register. */
2065 operands[1] = temp;
2066 pa_emit_move_sequence (operands, mode, scratch_reg);
2068 /* Now load from the memory location into our destination
2069 register. */
2070 operands[1] = gen_rtx_MEM (Pmode, operands[0]);
2071 pa_emit_move_sequence (operands, mode, scratch_reg);
2073 /* And add back in the constant part. */
2074 if (const_part != NULL_RTX)
2075 expand_inc (operand0, const_part);
2077 return 1;
2080 if (flag_pic)
2082 rtx_insn *insn;
2083 rtx temp;
2085 if (reload_in_progress || reload_completed)
2087 temp = scratch_reg ? scratch_reg : operand0;
2088 /* TEMP will hold an address and maybe the actual
2089 data. We want it in WORD_MODE regardless of what mode it
2090 was originally given to us. */
2091 temp = force_mode (word_mode, temp);
2093 else
2094 temp = gen_reg_rtx (Pmode);
2096 /* Force (const (plus (symbol) (const_int))) to memory
2097 if the const_int will not fit in 14 bits. Although
2098 this requires a relocation, the instruction sequence
2099 needed to load the value is shorter. */
2100 if (GET_CODE (operand1) == CONST
2101 && GET_CODE (XEXP (operand1, 0)) == PLUS
2102 && GET_CODE (XEXP (XEXP (operand1, 0), 1)) == CONST_INT
2103 && !INT_14_BITS (XEXP (XEXP (operand1, 0), 1)))
2105 rtx x, m = force_const_mem (mode, operand1);
2107 x = legitimize_pic_address (XEXP (m, 0), mode, temp);
2108 x = replace_equiv_address (m, x);
2109 insn = emit_move_insn (operand0, x);
2111 else
2113 operands[1] = legitimize_pic_address (operand1, mode, temp);
2114 if (REG_P (operand0) && REG_P (operands[1]))
2115 copy_reg_pointer (operand0, operands[1]);
2116 insn = emit_move_insn (operand0, operands[1]);
2119 /* Put a REG_EQUAL note on this insn. */
2120 set_unique_reg_note (insn, REG_EQUAL, operand1);
2122 /* On the HPPA, references to data space are supposed to use dp,
2123 register 27, but showing it in the RTL inhibits various cse
2124 and loop optimizations. */
2125 else
2127 rtx temp, set;
2129 if (reload_in_progress || reload_completed)
2131 temp = scratch_reg ? scratch_reg : operand0;
2132 /* TEMP will hold an address and maybe the actual
2133 data. We want it in WORD_MODE regardless of what mode it
2134 was originally given to us. */
2135 temp = force_mode (word_mode, temp);
2137 else
2138 temp = gen_reg_rtx (mode);
2140 /* Loading a SYMBOL_REF into a register makes that register
2141 safe to be used as the base in an indexed address.
2143 Don't mark hard registers though. That loses. */
2144 if (GET_CODE (operand0) == REG
2145 && REGNO (operand0) >= FIRST_PSEUDO_REGISTER)
2146 mark_reg_pointer (operand0, BITS_PER_UNIT);
2147 if (REGNO (temp) >= FIRST_PSEUDO_REGISTER)
2148 mark_reg_pointer (temp, BITS_PER_UNIT);
2150 if (ishighonly)
2151 set = gen_rtx_SET (operand0, temp);
2152 else
2153 set = gen_rtx_SET (operand0,
2154 gen_rtx_LO_SUM (mode, temp, operand1));
2156 emit_insn (gen_rtx_SET (temp, gen_rtx_HIGH (mode, operand1)));
2157 emit_insn (set);
2160 return 1;
2162 else if (tls_referenced_p (operand1))
2164 rtx tmp = operand1;
2165 rtx addend = NULL;
2167 if (GET_CODE (tmp) == CONST && GET_CODE (XEXP (tmp, 0)) == PLUS)
2169 addend = XEXP (XEXP (tmp, 0), 1);
2170 tmp = XEXP (XEXP (tmp, 0), 0);
2173 gcc_assert (GET_CODE (tmp) == SYMBOL_REF);
2174 tmp = legitimize_tls_address (tmp);
2175 if (addend)
2177 tmp = gen_rtx_PLUS (mode, tmp, addend);
2178 tmp = force_operand (tmp, operands[0]);
2180 operands[1] = tmp;
2182 else if (GET_CODE (operand1) != CONST_INT
2183 || !pa_cint_ok_for_move (UINTVAL (operand1)))
2185 rtx temp;
2186 rtx_insn *insn;
2187 rtx op1 = operand1;
2188 HOST_WIDE_INT value = 0;
2189 HOST_WIDE_INT insv = 0;
2190 int insert = 0;
2192 if (GET_CODE (operand1) == CONST_INT)
2193 value = INTVAL (operand1);
2195 if (TARGET_64BIT
2196 && GET_CODE (operand1) == CONST_INT
2197 && HOST_BITS_PER_WIDE_INT > 32
2198 && GET_MODE_BITSIZE (GET_MODE (operand0)) > 32)
2200 HOST_WIDE_INT nval;
2202 /* Extract the low order 32 bits of the value and sign extend.
2203 If the new value is the same as the original value, we can
2204 can use the original value as-is. If the new value is
2205 different, we use it and insert the most-significant 32-bits
2206 of the original value into the final result. */
2207 nval = ((value & (((HOST_WIDE_INT) 2 << 31) - 1))
2208 ^ ((HOST_WIDE_INT) 1 << 31)) - ((HOST_WIDE_INT) 1 << 31);
2209 if (value != nval)
2211 #if HOST_BITS_PER_WIDE_INT > 32
2212 insv = value >= 0 ? value >> 32 : ~(~value >> 32);
2213 #endif
2214 insert = 1;
2215 value = nval;
2216 operand1 = GEN_INT (nval);
2220 if (reload_in_progress || reload_completed)
2221 temp = scratch_reg ? scratch_reg : operand0;
2222 else
2223 temp = gen_reg_rtx (mode);
2225 /* We don't directly split DImode constants on 32-bit targets
2226 because PLUS uses an 11-bit immediate and the insn sequence
2227 generated is not as efficient as the one using HIGH/LO_SUM. */
2228 if (GET_CODE (operand1) == CONST_INT
2229 && GET_MODE_BITSIZE (mode) <= BITS_PER_WORD
2230 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2231 && !insert)
2233 /* Directly break constant into high and low parts. This
2234 provides better optimization opportunities because various
2235 passes recognize constants split with PLUS but not LO_SUM.
2236 We use a 14-bit signed low part except when the addition
2237 of 0x4000 to the high part might change the sign of the
2238 high part. */
2239 HOST_WIDE_INT low = value & 0x3fff;
2240 HOST_WIDE_INT high = value & ~ 0x3fff;
2242 if (low >= 0x2000)
2244 if (high == 0x7fffc000 || (mode == HImode && high == 0x4000))
2245 high += 0x2000;
2246 else
2247 high += 0x4000;
2250 low = value - high;
2252 emit_insn (gen_rtx_SET (temp, GEN_INT (high)));
2253 operands[1] = gen_rtx_PLUS (mode, temp, GEN_INT (low));
2255 else
2257 emit_insn (gen_rtx_SET (temp, gen_rtx_HIGH (mode, operand1)));
2258 operands[1] = gen_rtx_LO_SUM (mode, temp, operand1);
2261 insn = emit_move_insn (operands[0], operands[1]);
2263 /* Now insert the most significant 32 bits of the value
2264 into the register. When we don't have a second register
2265 available, it could take up to nine instructions to load
2266 a 64-bit integer constant. Prior to reload, we force
2267 constants that would take more than three instructions
2268 to load to the constant pool. During and after reload,
2269 we have to handle all possible values. */
2270 if (insert)
2272 /* Use a HIGH/LO_SUM/INSV sequence if we have a second
2273 register and the value to be inserted is outside the
2274 range that can be loaded with three depdi instructions. */
2275 if (temp != operand0 && (insv >= 16384 || insv < -16384))
2277 operand1 = GEN_INT (insv);
2279 emit_insn (gen_rtx_SET (temp,
2280 gen_rtx_HIGH (mode, operand1)));
2281 emit_move_insn (temp, gen_rtx_LO_SUM (mode, temp, operand1));
2282 if (mode == DImode)
2283 insn = emit_insn (gen_insvdi (operand0, GEN_INT (32),
2284 const0_rtx, temp));
2285 else
2286 insn = emit_insn (gen_insvsi (operand0, GEN_INT (32),
2287 const0_rtx, temp));
2289 else
2291 int len = 5, pos = 27;
2293 /* Insert the bits using the depdi instruction. */
2294 while (pos >= 0)
2296 HOST_WIDE_INT v5 = ((insv & 31) ^ 16) - 16;
2297 HOST_WIDE_INT sign = v5 < 0;
2299 /* Left extend the insertion. */
2300 insv = (insv >= 0 ? insv >> len : ~(~insv >> len));
2301 while (pos > 0 && (insv & 1) == sign)
2303 insv = (insv >= 0 ? insv >> 1 : ~(~insv >> 1));
2304 len += 1;
2305 pos -= 1;
2308 if (mode == DImode)
2309 insn = emit_insn (gen_insvdi (operand0,
2310 GEN_INT (len),
2311 GEN_INT (pos),
2312 GEN_INT (v5)));
2313 else
2314 insn = emit_insn (gen_insvsi (operand0,
2315 GEN_INT (len),
2316 GEN_INT (pos),
2317 GEN_INT (v5)));
2319 len = pos > 0 && pos < 5 ? pos : 5;
2320 pos -= len;
2325 set_unique_reg_note (insn, REG_EQUAL, op1);
2327 return 1;
2330 /* Now have insn-emit do whatever it normally does. */
2331 return 0;
2334 /* Examine EXP and return nonzero if it contains an ADDR_EXPR (meaning
2335 it will need a link/runtime reloc). */
2338 pa_reloc_needed (tree exp)
2340 int reloc = 0;
2342 switch (TREE_CODE (exp))
2344 case ADDR_EXPR:
2345 return 1;
2347 case POINTER_PLUS_EXPR:
2348 case PLUS_EXPR:
2349 case MINUS_EXPR:
2350 reloc = pa_reloc_needed (TREE_OPERAND (exp, 0));
2351 reloc |= pa_reloc_needed (TREE_OPERAND (exp, 1));
2352 break;
2354 CASE_CONVERT:
2355 case NON_LVALUE_EXPR:
2356 reloc = pa_reloc_needed (TREE_OPERAND (exp, 0));
2357 break;
2359 case CONSTRUCTOR:
2361 tree value;
2362 unsigned HOST_WIDE_INT ix;
2364 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), ix, value)
2365 if (value)
2366 reloc |= pa_reloc_needed (value);
2368 break;
2370 case ERROR_MARK:
2371 break;
2373 default:
2374 break;
2376 return reloc;
2380 /* Return the best assembler insn template
2381 for moving operands[1] into operands[0] as a fullword. */
2382 const char *
2383 pa_singlemove_string (rtx *operands)
2385 HOST_WIDE_INT intval;
2387 if (GET_CODE (operands[0]) == MEM)
2388 return "stw %r1,%0";
2389 if (GET_CODE (operands[1]) == MEM)
2390 return "ldw %1,%0";
2391 if (GET_CODE (operands[1]) == CONST_DOUBLE)
2393 long i;
2395 gcc_assert (GET_MODE (operands[1]) == SFmode);
2397 /* Translate the CONST_DOUBLE to a CONST_INT with the same target
2398 bit pattern. */
2399 REAL_VALUE_TO_TARGET_SINGLE (*CONST_DOUBLE_REAL_VALUE (operands[1]), i);
2401 operands[1] = GEN_INT (i);
2402 /* Fall through to CONST_INT case. */
2404 if (GET_CODE (operands[1]) == CONST_INT)
2406 intval = INTVAL (operands[1]);
2408 if (VAL_14_BITS_P (intval))
2409 return "ldi %1,%0";
2410 else if ((intval & 0x7ff) == 0)
2411 return "ldil L'%1,%0";
2412 else if (pa_zdepi_cint_p (intval))
2413 return "{zdepi %Z1,%0|depwi,z %Z1,%0}";
2414 else
2415 return "ldil L'%1,%0\n\tldo R'%1(%0),%0";
2417 return "copy %1,%0";
2421 /* Compute position (in OP[1]) and width (in OP[2])
2422 useful for copying IMM to a register using the zdepi
2423 instructions. Store the immediate value to insert in OP[0]. */
2424 static void
2425 compute_zdepwi_operands (unsigned HOST_WIDE_INT imm, unsigned *op)
2427 int lsb, len;
2429 /* Find the least significant set bit in IMM. */
2430 for (lsb = 0; lsb < 32; lsb++)
2432 if ((imm & 1) != 0)
2433 break;
2434 imm >>= 1;
2437 /* Choose variants based on *sign* of the 5-bit field. */
2438 if ((imm & 0x10) == 0)
2439 len = (lsb <= 28) ? 4 : 32 - lsb;
2440 else
2442 /* Find the width of the bitstring in IMM. */
2443 for (len = 5; len < 32 - lsb; len++)
2445 if ((imm & ((unsigned HOST_WIDE_INT) 1 << len)) == 0)
2446 break;
2449 /* Sign extend IMM as a 5-bit value. */
2450 imm = (imm & 0xf) - 0x10;
2453 op[0] = imm;
2454 op[1] = 31 - lsb;
2455 op[2] = len;
2458 /* Compute position (in OP[1]) and width (in OP[2])
2459 useful for copying IMM to a register using the depdi,z
2460 instructions. Store the immediate value to insert in OP[0]. */
2462 static void
2463 compute_zdepdi_operands (unsigned HOST_WIDE_INT imm, unsigned *op)
2465 int lsb, len, maxlen;
2467 maxlen = MIN (HOST_BITS_PER_WIDE_INT, 64);
2469 /* Find the least significant set bit in IMM. */
2470 for (lsb = 0; lsb < maxlen; lsb++)
2472 if ((imm & 1) != 0)
2473 break;
2474 imm >>= 1;
2477 /* Choose variants based on *sign* of the 5-bit field. */
2478 if ((imm & 0x10) == 0)
2479 len = (lsb <= maxlen - 4) ? 4 : maxlen - lsb;
2480 else
2482 /* Find the width of the bitstring in IMM. */
2483 for (len = 5; len < maxlen - lsb; len++)
2485 if ((imm & ((unsigned HOST_WIDE_INT) 1 << len)) == 0)
2486 break;
2489 /* Extend length if host is narrow and IMM is negative. */
2490 if (HOST_BITS_PER_WIDE_INT == 32 && len == maxlen - lsb)
2491 len += 32;
2493 /* Sign extend IMM as a 5-bit value. */
2494 imm = (imm & 0xf) - 0x10;
2497 op[0] = imm;
2498 op[1] = 63 - lsb;
2499 op[2] = len;
2502 /* Output assembler code to perform a doubleword move insn
2503 with operands OPERANDS. */
2505 const char *
2506 pa_output_move_double (rtx *operands)
2508 enum { REGOP, OFFSOP, MEMOP, CNSTOP, RNDOP } optype0, optype1;
2509 rtx latehalf[2];
2510 rtx addreg0 = 0, addreg1 = 0;
2511 int highonly = 0;
2513 /* First classify both operands. */
2515 if (REG_P (operands[0]))
2516 optype0 = REGOP;
2517 else if (offsettable_memref_p (operands[0]))
2518 optype0 = OFFSOP;
2519 else if (GET_CODE (operands[0]) == MEM)
2520 optype0 = MEMOP;
2521 else
2522 optype0 = RNDOP;
2524 if (REG_P (operands[1]))
2525 optype1 = REGOP;
2526 else if (CONSTANT_P (operands[1]))
2527 optype1 = CNSTOP;
2528 else if (offsettable_memref_p (operands[1]))
2529 optype1 = OFFSOP;
2530 else if (GET_CODE (operands[1]) == MEM)
2531 optype1 = MEMOP;
2532 else
2533 optype1 = RNDOP;
2535 /* Check for the cases that the operand constraints are not
2536 supposed to allow to happen. */
2537 gcc_assert (optype0 == REGOP || optype1 == REGOP);
2539 /* Handle copies between general and floating registers. */
2541 if (optype0 == REGOP && optype1 == REGOP
2542 && FP_REG_P (operands[0]) ^ FP_REG_P (operands[1]))
2544 if (FP_REG_P (operands[0]))
2546 output_asm_insn ("{stws|stw} %1,-16(%%sp)", operands);
2547 output_asm_insn ("{stws|stw} %R1,-12(%%sp)", operands);
2548 return "{fldds|fldd} -16(%%sp),%0";
2550 else
2552 output_asm_insn ("{fstds|fstd} %1,-16(%%sp)", operands);
2553 output_asm_insn ("{ldws|ldw} -16(%%sp),%0", operands);
2554 return "{ldws|ldw} -12(%%sp),%R0";
2558 /* Handle auto decrementing and incrementing loads and stores
2559 specifically, since the structure of the function doesn't work
2560 for them without major modification. Do it better when we learn
2561 this port about the general inc/dec addressing of PA.
2562 (This was written by tege. Chide him if it doesn't work.) */
2564 if (optype0 == MEMOP)
2566 /* We have to output the address syntax ourselves, since print_operand
2567 doesn't deal with the addresses we want to use. Fix this later. */
2569 rtx addr = XEXP (operands[0], 0);
2570 if (GET_CODE (addr) == POST_INC || GET_CODE (addr) == POST_DEC)
2572 rtx high_reg = gen_rtx_SUBREG (SImode, operands[1], 0);
2574 operands[0] = XEXP (addr, 0);
2575 gcc_assert (GET_CODE (operands[1]) == REG
2576 && GET_CODE (operands[0]) == REG);
2578 gcc_assert (!reg_overlap_mentioned_p (high_reg, addr));
2580 /* No overlap between high target register and address
2581 register. (We do this in a non-obvious way to
2582 save a register file writeback) */
2583 if (GET_CODE (addr) == POST_INC)
2584 return "{stws|stw},ma %1,8(%0)\n\tstw %R1,-4(%0)";
2585 return "{stws|stw},ma %1,-8(%0)\n\tstw %R1,12(%0)";
2587 else if (GET_CODE (addr) == PRE_INC || GET_CODE (addr) == PRE_DEC)
2589 rtx high_reg = gen_rtx_SUBREG (SImode, operands[1], 0);
2591 operands[0] = XEXP (addr, 0);
2592 gcc_assert (GET_CODE (operands[1]) == REG
2593 && GET_CODE (operands[0]) == REG);
2595 gcc_assert (!reg_overlap_mentioned_p (high_reg, addr));
2596 /* No overlap between high target register and address
2597 register. (We do this in a non-obvious way to save a
2598 register file writeback) */
2599 if (GET_CODE (addr) == PRE_INC)
2600 return "{stws|stw},mb %1,8(%0)\n\tstw %R1,4(%0)";
2601 return "{stws|stw},mb %1,-8(%0)\n\tstw %R1,4(%0)";
2604 if (optype1 == MEMOP)
2606 /* We have to output the address syntax ourselves, since print_operand
2607 doesn't deal with the addresses we want to use. Fix this later. */
2609 rtx addr = XEXP (operands[1], 0);
2610 if (GET_CODE (addr) == POST_INC || GET_CODE (addr) == POST_DEC)
2612 rtx high_reg = gen_rtx_SUBREG (SImode, operands[0], 0);
2614 operands[1] = XEXP (addr, 0);
2615 gcc_assert (GET_CODE (operands[0]) == REG
2616 && GET_CODE (operands[1]) == REG);
2618 if (!reg_overlap_mentioned_p (high_reg, addr))
2620 /* No overlap between high target register and address
2621 register. (We do this in a non-obvious way to
2622 save a register file writeback) */
2623 if (GET_CODE (addr) == POST_INC)
2624 return "{ldws|ldw},ma 8(%1),%0\n\tldw -4(%1),%R0";
2625 return "{ldws|ldw},ma -8(%1),%0\n\tldw 12(%1),%R0";
2627 else
2629 /* This is an undefined situation. We should load into the
2630 address register *and* update that register. Probably
2631 we don't need to handle this at all. */
2632 if (GET_CODE (addr) == POST_INC)
2633 return "ldw 4(%1),%R0\n\t{ldws|ldw},ma 8(%1),%0";
2634 return "ldw 4(%1),%R0\n\t{ldws|ldw},ma -8(%1),%0";
2637 else if (GET_CODE (addr) == PRE_INC || GET_CODE (addr) == PRE_DEC)
2639 rtx high_reg = gen_rtx_SUBREG (SImode, operands[0], 0);
2641 operands[1] = XEXP (addr, 0);
2642 gcc_assert (GET_CODE (operands[0]) == REG
2643 && GET_CODE (operands[1]) == REG);
2645 if (!reg_overlap_mentioned_p (high_reg, addr))
2647 /* No overlap between high target register and address
2648 register. (We do this in a non-obvious way to
2649 save a register file writeback) */
2650 if (GET_CODE (addr) == PRE_INC)
2651 return "{ldws|ldw},mb 8(%1),%0\n\tldw 4(%1),%R0";
2652 return "{ldws|ldw},mb -8(%1),%0\n\tldw 4(%1),%R0";
2654 else
2656 /* This is an undefined situation. We should load into the
2657 address register *and* update that register. Probably
2658 we don't need to handle this at all. */
2659 if (GET_CODE (addr) == PRE_INC)
2660 return "ldw 12(%1),%R0\n\t{ldws|ldw},mb 8(%1),%0";
2661 return "ldw -4(%1),%R0\n\t{ldws|ldw},mb -8(%1),%0";
2664 else if (GET_CODE (addr) == PLUS
2665 && GET_CODE (XEXP (addr, 0)) == MULT)
2667 rtx xoperands[4];
2669 /* Load address into left half of destination register. */
2670 xoperands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
2671 xoperands[1] = XEXP (addr, 1);
2672 xoperands[2] = XEXP (XEXP (addr, 0), 0);
2673 xoperands[3] = XEXP (XEXP (addr, 0), 1);
2674 output_asm_insn ("{sh%O3addl %2,%1,%0|shladd,l %2,%O3,%1,%0}",
2675 xoperands);
2676 return "ldw 4(%0),%R0\n\tldw 0(%0),%0";
2678 else if (GET_CODE (addr) == PLUS
2679 && REG_P (XEXP (addr, 0))
2680 && REG_P (XEXP (addr, 1)))
2682 rtx xoperands[3];
2684 /* Load address into left half of destination register. */
2685 xoperands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
2686 xoperands[1] = XEXP (addr, 0);
2687 xoperands[2] = XEXP (addr, 1);
2688 output_asm_insn ("{addl|add,l} %1,%2,%0",
2689 xoperands);
2690 return "ldw 4(%0),%R0\n\tldw 0(%0),%0";
2694 /* If an operand is an unoffsettable memory ref, find a register
2695 we can increment temporarily to make it refer to the second word. */
2697 if (optype0 == MEMOP)
2698 addreg0 = find_addr_reg (XEXP (operands[0], 0));
2700 if (optype1 == MEMOP)
2701 addreg1 = find_addr_reg (XEXP (operands[1], 0));
2703 /* Ok, we can do one word at a time.
2704 Normally we do the low-numbered word first.
2706 In either case, set up in LATEHALF the operands to use
2707 for the high-numbered word and in some cases alter the
2708 operands in OPERANDS to be suitable for the low-numbered word. */
2710 if (optype0 == REGOP)
2711 latehalf[0] = gen_rtx_REG (SImode, REGNO (operands[0]) + 1);
2712 else if (optype0 == OFFSOP)
2713 latehalf[0] = adjust_address_nv (operands[0], SImode, 4);
2714 else
2715 latehalf[0] = operands[0];
2717 if (optype1 == REGOP)
2718 latehalf[1] = gen_rtx_REG (SImode, REGNO (operands[1]) + 1);
2719 else if (optype1 == OFFSOP)
2720 latehalf[1] = adjust_address_nv (operands[1], SImode, 4);
2721 else if (optype1 == CNSTOP)
2723 if (GET_CODE (operands[1]) == HIGH)
2725 operands[1] = XEXP (operands[1], 0);
2726 highonly = 1;
2728 split_double (operands[1], &operands[1], &latehalf[1]);
2730 else
2731 latehalf[1] = operands[1];
2733 /* If the first move would clobber the source of the second one,
2734 do them in the other order.
2736 This can happen in two cases:
2738 mem -> register where the first half of the destination register
2739 is the same register used in the memory's address. Reload
2740 can create such insns.
2742 mem in this case will be either register indirect or register
2743 indirect plus a valid offset.
2745 register -> register move where REGNO(dst) == REGNO(src + 1)
2746 someone (Tim/Tege?) claimed this can happen for parameter loads.
2748 Handle mem -> register case first. */
2749 if (optype0 == REGOP
2750 && (optype1 == MEMOP || optype1 == OFFSOP)
2751 && refers_to_regno_p (REGNO (operands[0]), operands[1]))
2753 /* Do the late half first. */
2754 if (addreg1)
2755 output_asm_insn ("ldo 4(%0),%0", &addreg1);
2756 output_asm_insn (pa_singlemove_string (latehalf), latehalf);
2758 /* Then clobber. */
2759 if (addreg1)
2760 output_asm_insn ("ldo -4(%0),%0", &addreg1);
2761 return pa_singlemove_string (operands);
2764 /* Now handle register -> register case. */
2765 if (optype0 == REGOP && optype1 == REGOP
2766 && REGNO (operands[0]) == REGNO (operands[1]) + 1)
2768 output_asm_insn (pa_singlemove_string (latehalf), latehalf);
2769 return pa_singlemove_string (operands);
2772 /* Normal case: do the two words, low-numbered first. */
2774 output_asm_insn (pa_singlemove_string (operands), operands);
2776 /* Make any unoffsettable addresses point at high-numbered word. */
2777 if (addreg0)
2778 output_asm_insn ("ldo 4(%0),%0", &addreg0);
2779 if (addreg1)
2780 output_asm_insn ("ldo 4(%0),%0", &addreg1);
2782 /* Do high-numbered word. */
2783 if (highonly)
2784 output_asm_insn ("ldil L'%1,%0", latehalf);
2785 else
2786 output_asm_insn (pa_singlemove_string (latehalf), latehalf);
2788 /* Undo the adds we just did. */
2789 if (addreg0)
2790 output_asm_insn ("ldo -4(%0),%0", &addreg0);
2791 if (addreg1)
2792 output_asm_insn ("ldo -4(%0),%0", &addreg1);
2794 return "";
2797 const char *
2798 pa_output_fp_move_double (rtx *operands)
2800 if (FP_REG_P (operands[0]))
2802 if (FP_REG_P (operands[1])
2803 || operands[1] == CONST0_RTX (GET_MODE (operands[0])))
2804 output_asm_insn ("fcpy,dbl %f1,%0", operands);
2805 else
2806 output_asm_insn ("fldd%F1 %1,%0", operands);
2808 else if (FP_REG_P (operands[1]))
2810 output_asm_insn ("fstd%F0 %1,%0", operands);
2812 else
2814 rtx xoperands[2];
2816 gcc_assert (operands[1] == CONST0_RTX (GET_MODE (operands[0])));
2818 /* This is a pain. You have to be prepared to deal with an
2819 arbitrary address here including pre/post increment/decrement.
2821 so avoid this in the MD. */
2822 gcc_assert (GET_CODE (operands[0]) == REG);
2824 xoperands[1] = gen_rtx_REG (SImode, REGNO (operands[0]) + 1);
2825 xoperands[0] = operands[0];
2826 output_asm_insn ("copy %%r0,%0\n\tcopy %%r0,%1", xoperands);
2828 return "";
2831 /* Return a REG that occurs in ADDR with coefficient 1.
2832 ADDR can be effectively incremented by incrementing REG. */
2834 static rtx
2835 find_addr_reg (rtx addr)
2837 while (GET_CODE (addr) == PLUS)
2839 if (GET_CODE (XEXP (addr, 0)) == REG)
2840 addr = XEXP (addr, 0);
2841 else if (GET_CODE (XEXP (addr, 1)) == REG)
2842 addr = XEXP (addr, 1);
2843 else if (CONSTANT_P (XEXP (addr, 0)))
2844 addr = XEXP (addr, 1);
2845 else if (CONSTANT_P (XEXP (addr, 1)))
2846 addr = XEXP (addr, 0);
2847 else
2848 gcc_unreachable ();
2850 gcc_assert (GET_CODE (addr) == REG);
2851 return addr;
2854 /* Emit code to perform a block move.
2856 OPERANDS[0] is the destination pointer as a REG, clobbered.
2857 OPERANDS[1] is the source pointer as a REG, clobbered.
2858 OPERANDS[2] is a register for temporary storage.
2859 OPERANDS[3] is a register for temporary storage.
2860 OPERANDS[4] is the size as a CONST_INT
2861 OPERANDS[5] is the alignment safe to use, as a CONST_INT.
2862 OPERANDS[6] is another temporary register. */
2864 const char *
2865 pa_output_block_move (rtx *operands, int size_is_constant ATTRIBUTE_UNUSED)
2867 int align = INTVAL (operands[5]);
2868 unsigned long n_bytes = INTVAL (operands[4]);
2870 /* We can't move more than a word at a time because the PA
2871 has no longer integer move insns. (Could use fp mem ops?) */
2872 if (align > (TARGET_64BIT ? 8 : 4))
2873 align = (TARGET_64BIT ? 8 : 4);
2875 /* Note that we know each loop below will execute at least twice
2876 (else we would have open-coded the copy). */
2877 switch (align)
2879 case 8:
2880 /* Pre-adjust the loop counter. */
2881 operands[4] = GEN_INT (n_bytes - 16);
2882 output_asm_insn ("ldi %4,%2", operands);
2884 /* Copying loop. */
2885 output_asm_insn ("ldd,ma 8(%1),%3", operands);
2886 output_asm_insn ("ldd,ma 8(%1),%6", operands);
2887 output_asm_insn ("std,ma %3,8(%0)", operands);
2888 output_asm_insn ("addib,>= -16,%2,.-12", operands);
2889 output_asm_insn ("std,ma %6,8(%0)", operands);
2891 /* Handle the residual. There could be up to 7 bytes of
2892 residual to copy! */
2893 if (n_bytes % 16 != 0)
2895 operands[4] = GEN_INT (n_bytes % 8);
2896 if (n_bytes % 16 >= 8)
2897 output_asm_insn ("ldd,ma 8(%1),%3", operands);
2898 if (n_bytes % 8 != 0)
2899 output_asm_insn ("ldd 0(%1),%6", operands);
2900 if (n_bytes % 16 >= 8)
2901 output_asm_insn ("std,ma %3,8(%0)", operands);
2902 if (n_bytes % 8 != 0)
2903 output_asm_insn ("stdby,e %6,%4(%0)", operands);
2905 return "";
2907 case 4:
2908 /* Pre-adjust the loop counter. */
2909 operands[4] = GEN_INT (n_bytes - 8);
2910 output_asm_insn ("ldi %4,%2", operands);
2912 /* Copying loop. */
2913 output_asm_insn ("{ldws|ldw},ma 4(%1),%3", operands);
2914 output_asm_insn ("{ldws|ldw},ma 4(%1),%6", operands);
2915 output_asm_insn ("{stws|stw},ma %3,4(%0)", operands);
2916 output_asm_insn ("addib,>= -8,%2,.-12", operands);
2917 output_asm_insn ("{stws|stw},ma %6,4(%0)", operands);
2919 /* Handle the residual. There could be up to 7 bytes of
2920 residual to copy! */
2921 if (n_bytes % 8 != 0)
2923 operands[4] = GEN_INT (n_bytes % 4);
2924 if (n_bytes % 8 >= 4)
2925 output_asm_insn ("{ldws|ldw},ma 4(%1),%3", operands);
2926 if (n_bytes % 4 != 0)
2927 output_asm_insn ("ldw 0(%1),%6", operands);
2928 if (n_bytes % 8 >= 4)
2929 output_asm_insn ("{stws|stw},ma %3,4(%0)", operands);
2930 if (n_bytes % 4 != 0)
2931 output_asm_insn ("{stbys|stby},e %6,%4(%0)", operands);
2933 return "";
2935 case 2:
2936 /* Pre-adjust the loop counter. */
2937 operands[4] = GEN_INT (n_bytes - 4);
2938 output_asm_insn ("ldi %4,%2", operands);
2940 /* Copying loop. */
2941 output_asm_insn ("{ldhs|ldh},ma 2(%1),%3", operands);
2942 output_asm_insn ("{ldhs|ldh},ma 2(%1),%6", operands);
2943 output_asm_insn ("{sths|sth},ma %3,2(%0)", operands);
2944 output_asm_insn ("addib,>= -4,%2,.-12", operands);
2945 output_asm_insn ("{sths|sth},ma %6,2(%0)", operands);
2947 /* Handle the residual. */
2948 if (n_bytes % 4 != 0)
2950 if (n_bytes % 4 >= 2)
2951 output_asm_insn ("{ldhs|ldh},ma 2(%1),%3", operands);
2952 if (n_bytes % 2 != 0)
2953 output_asm_insn ("ldb 0(%1),%6", operands);
2954 if (n_bytes % 4 >= 2)
2955 output_asm_insn ("{sths|sth},ma %3,2(%0)", operands);
2956 if (n_bytes % 2 != 0)
2957 output_asm_insn ("stb %6,0(%0)", operands);
2959 return "";
2961 case 1:
2962 /* Pre-adjust the loop counter. */
2963 operands[4] = GEN_INT (n_bytes - 2);
2964 output_asm_insn ("ldi %4,%2", operands);
2966 /* Copying loop. */
2967 output_asm_insn ("{ldbs|ldb},ma 1(%1),%3", operands);
2968 output_asm_insn ("{ldbs|ldb},ma 1(%1),%6", operands);
2969 output_asm_insn ("{stbs|stb},ma %3,1(%0)", operands);
2970 output_asm_insn ("addib,>= -2,%2,.-12", operands);
2971 output_asm_insn ("{stbs|stb},ma %6,1(%0)", operands);
2973 /* Handle the residual. */
2974 if (n_bytes % 2 != 0)
2976 output_asm_insn ("ldb 0(%1),%3", operands);
2977 output_asm_insn ("stb %3,0(%0)", operands);
2979 return "";
2981 default:
2982 gcc_unreachable ();
2986 /* Count the number of insns necessary to handle this block move.
2988 Basic structure is the same as emit_block_move, except that we
2989 count insns rather than emit them. */
2991 static int
2992 compute_movmem_length (rtx_insn *insn)
2994 rtx pat = PATTERN (insn);
2995 unsigned int align = INTVAL (XEXP (XVECEXP (pat, 0, 7), 0));
2996 unsigned long n_bytes = INTVAL (XEXP (XVECEXP (pat, 0, 6), 0));
2997 unsigned int n_insns = 0;
2999 /* We can't move more than four bytes at a time because the PA
3000 has no longer integer move insns. (Could use fp mem ops?) */
3001 if (align > (TARGET_64BIT ? 8 : 4))
3002 align = (TARGET_64BIT ? 8 : 4);
3004 /* The basic copying loop. */
3005 n_insns = 6;
3007 /* Residuals. */
3008 if (n_bytes % (2 * align) != 0)
3010 if ((n_bytes % (2 * align)) >= align)
3011 n_insns += 2;
3013 if ((n_bytes % align) != 0)
3014 n_insns += 2;
3017 /* Lengths are expressed in bytes now; each insn is 4 bytes. */
3018 return n_insns * 4;
3021 /* Emit code to perform a block clear.
3023 OPERANDS[0] is the destination pointer as a REG, clobbered.
3024 OPERANDS[1] is a register for temporary storage.
3025 OPERANDS[2] is the size as a CONST_INT
3026 OPERANDS[3] is the alignment safe to use, as a CONST_INT. */
3028 const char *
3029 pa_output_block_clear (rtx *operands, int size_is_constant ATTRIBUTE_UNUSED)
3031 int align = INTVAL (operands[3]);
3032 unsigned long n_bytes = INTVAL (operands[2]);
3034 /* We can't clear more than a word at a time because the PA
3035 has no longer integer move insns. */
3036 if (align > (TARGET_64BIT ? 8 : 4))
3037 align = (TARGET_64BIT ? 8 : 4);
3039 /* Note that we know each loop below will execute at least twice
3040 (else we would have open-coded the copy). */
3041 switch (align)
3043 case 8:
3044 /* Pre-adjust the loop counter. */
3045 operands[2] = GEN_INT (n_bytes - 16);
3046 output_asm_insn ("ldi %2,%1", operands);
3048 /* Loop. */
3049 output_asm_insn ("std,ma %%r0,8(%0)", operands);
3050 output_asm_insn ("addib,>= -16,%1,.-4", operands);
3051 output_asm_insn ("std,ma %%r0,8(%0)", operands);
3053 /* Handle the residual. There could be up to 7 bytes of
3054 residual to copy! */
3055 if (n_bytes % 16 != 0)
3057 operands[2] = GEN_INT (n_bytes % 8);
3058 if (n_bytes % 16 >= 8)
3059 output_asm_insn ("std,ma %%r0,8(%0)", operands);
3060 if (n_bytes % 8 != 0)
3061 output_asm_insn ("stdby,e %%r0,%2(%0)", operands);
3063 return "";
3065 case 4:
3066 /* Pre-adjust the loop counter. */
3067 operands[2] = GEN_INT (n_bytes - 8);
3068 output_asm_insn ("ldi %2,%1", operands);
3070 /* Loop. */
3071 output_asm_insn ("{stws|stw},ma %%r0,4(%0)", operands);
3072 output_asm_insn ("addib,>= -8,%1,.-4", operands);
3073 output_asm_insn ("{stws|stw},ma %%r0,4(%0)", operands);
3075 /* Handle the residual. There could be up to 7 bytes of
3076 residual to copy! */
3077 if (n_bytes % 8 != 0)
3079 operands[2] = GEN_INT (n_bytes % 4);
3080 if (n_bytes % 8 >= 4)
3081 output_asm_insn ("{stws|stw},ma %%r0,4(%0)", operands);
3082 if (n_bytes % 4 != 0)
3083 output_asm_insn ("{stbys|stby},e %%r0,%2(%0)", operands);
3085 return "";
3087 case 2:
3088 /* Pre-adjust the loop counter. */
3089 operands[2] = GEN_INT (n_bytes - 4);
3090 output_asm_insn ("ldi %2,%1", operands);
3092 /* Loop. */
3093 output_asm_insn ("{sths|sth},ma %%r0,2(%0)", operands);
3094 output_asm_insn ("addib,>= -4,%1,.-4", operands);
3095 output_asm_insn ("{sths|sth},ma %%r0,2(%0)", operands);
3097 /* Handle the residual. */
3098 if (n_bytes % 4 != 0)
3100 if (n_bytes % 4 >= 2)
3101 output_asm_insn ("{sths|sth},ma %%r0,2(%0)", operands);
3102 if (n_bytes % 2 != 0)
3103 output_asm_insn ("stb %%r0,0(%0)", operands);
3105 return "";
3107 case 1:
3108 /* Pre-adjust the loop counter. */
3109 operands[2] = GEN_INT (n_bytes - 2);
3110 output_asm_insn ("ldi %2,%1", operands);
3112 /* Loop. */
3113 output_asm_insn ("{stbs|stb},ma %%r0,1(%0)", operands);
3114 output_asm_insn ("addib,>= -2,%1,.-4", operands);
3115 output_asm_insn ("{stbs|stb},ma %%r0,1(%0)", operands);
3117 /* Handle the residual. */
3118 if (n_bytes % 2 != 0)
3119 output_asm_insn ("stb %%r0,0(%0)", operands);
3121 return "";
3123 default:
3124 gcc_unreachable ();
3128 /* Count the number of insns necessary to handle this block move.
3130 Basic structure is the same as emit_block_move, except that we
3131 count insns rather than emit them. */
3133 static int
3134 compute_clrmem_length (rtx_insn *insn)
3136 rtx pat = PATTERN (insn);
3137 unsigned int align = INTVAL (XEXP (XVECEXP (pat, 0, 4), 0));
3138 unsigned long n_bytes = INTVAL (XEXP (XVECEXP (pat, 0, 3), 0));
3139 unsigned int n_insns = 0;
3141 /* We can't clear more than a word at a time because the PA
3142 has no longer integer move insns. */
3143 if (align > (TARGET_64BIT ? 8 : 4))
3144 align = (TARGET_64BIT ? 8 : 4);
3146 /* The basic loop. */
3147 n_insns = 4;
3149 /* Residuals. */
3150 if (n_bytes % (2 * align) != 0)
3152 if ((n_bytes % (2 * align)) >= align)
3153 n_insns++;
3155 if ((n_bytes % align) != 0)
3156 n_insns++;
3159 /* Lengths are expressed in bytes now; each insn is 4 bytes. */
3160 return n_insns * 4;
3164 const char *
3165 pa_output_and (rtx *operands)
3167 if (GET_CODE (operands[2]) == CONST_INT && INTVAL (operands[2]) != 0)
3169 unsigned HOST_WIDE_INT mask = INTVAL (operands[2]);
3170 int ls0, ls1, ms0, p, len;
3172 for (ls0 = 0; ls0 < 32; ls0++)
3173 if ((mask & (1 << ls0)) == 0)
3174 break;
3176 for (ls1 = ls0; ls1 < 32; ls1++)
3177 if ((mask & (1 << ls1)) != 0)
3178 break;
3180 for (ms0 = ls1; ms0 < 32; ms0++)
3181 if ((mask & (1 << ms0)) == 0)
3182 break;
3184 gcc_assert (ms0 == 32);
3186 if (ls1 == 32)
3188 len = ls0;
3190 gcc_assert (len);
3192 operands[2] = GEN_INT (len);
3193 return "{extru|extrw,u} %1,31,%2,%0";
3195 else
3197 /* We could use this `depi' for the case above as well, but `depi'
3198 requires one more register file access than an `extru'. */
3200 p = 31 - ls0;
3201 len = ls1 - ls0;
3203 operands[2] = GEN_INT (p);
3204 operands[3] = GEN_INT (len);
3205 return "{depi|depwi} 0,%2,%3,%0";
3208 else
3209 return "and %1,%2,%0";
3212 /* Return a string to perform a bitwise-and of operands[1] with operands[2]
3213 storing the result in operands[0]. */
3214 const char *
3215 pa_output_64bit_and (rtx *operands)
3217 if (GET_CODE (operands[2]) == CONST_INT && INTVAL (operands[2]) != 0)
3219 unsigned HOST_WIDE_INT mask = INTVAL (operands[2]);
3220 int ls0, ls1, ms0, p, len;
3222 for (ls0 = 0; ls0 < HOST_BITS_PER_WIDE_INT; ls0++)
3223 if ((mask & ((unsigned HOST_WIDE_INT) 1 << ls0)) == 0)
3224 break;
3226 for (ls1 = ls0; ls1 < HOST_BITS_PER_WIDE_INT; ls1++)
3227 if ((mask & ((unsigned HOST_WIDE_INT) 1 << ls1)) != 0)
3228 break;
3230 for (ms0 = ls1; ms0 < HOST_BITS_PER_WIDE_INT; ms0++)
3231 if ((mask & ((unsigned HOST_WIDE_INT) 1 << ms0)) == 0)
3232 break;
3234 gcc_assert (ms0 == HOST_BITS_PER_WIDE_INT);
3236 if (ls1 == HOST_BITS_PER_WIDE_INT)
3238 len = ls0;
3240 gcc_assert (len);
3242 operands[2] = GEN_INT (len);
3243 return "extrd,u %1,63,%2,%0";
3245 else
3247 /* We could use this `depi' for the case above as well, but `depi'
3248 requires one more register file access than an `extru'. */
3250 p = 63 - ls0;
3251 len = ls1 - ls0;
3253 operands[2] = GEN_INT (p);
3254 operands[3] = GEN_INT (len);
3255 return "depdi 0,%2,%3,%0";
3258 else
3259 return "and %1,%2,%0";
3262 const char *
3263 pa_output_ior (rtx *operands)
3265 unsigned HOST_WIDE_INT mask = INTVAL (operands[2]);
3266 int bs0, bs1, p, len;
3268 if (INTVAL (operands[2]) == 0)
3269 return "copy %1,%0";
3271 for (bs0 = 0; bs0 < 32; bs0++)
3272 if ((mask & (1 << bs0)) != 0)
3273 break;
3275 for (bs1 = bs0; bs1 < 32; bs1++)
3276 if ((mask & (1 << bs1)) == 0)
3277 break;
3279 gcc_assert (bs1 == 32 || ((unsigned HOST_WIDE_INT) 1 << bs1) > mask);
3281 p = 31 - bs0;
3282 len = bs1 - bs0;
3284 operands[2] = GEN_INT (p);
3285 operands[3] = GEN_INT (len);
3286 return "{depi|depwi} -1,%2,%3,%0";
3289 /* Return a string to perform a bitwise-and of operands[1] with operands[2]
3290 storing the result in operands[0]. */
3291 const char *
3292 pa_output_64bit_ior (rtx *operands)
3294 unsigned HOST_WIDE_INT mask = INTVAL (operands[2]);
3295 int bs0, bs1, p, len;
3297 if (INTVAL (operands[2]) == 0)
3298 return "copy %1,%0";
3300 for (bs0 = 0; bs0 < HOST_BITS_PER_WIDE_INT; bs0++)
3301 if ((mask & ((unsigned HOST_WIDE_INT) 1 << bs0)) != 0)
3302 break;
3304 for (bs1 = bs0; bs1 < HOST_BITS_PER_WIDE_INT; bs1++)
3305 if ((mask & ((unsigned HOST_WIDE_INT) 1 << bs1)) == 0)
3306 break;
3308 gcc_assert (bs1 == HOST_BITS_PER_WIDE_INT
3309 || ((unsigned HOST_WIDE_INT) 1 << bs1) > mask);
3311 p = 63 - bs0;
3312 len = bs1 - bs0;
3314 operands[2] = GEN_INT (p);
3315 operands[3] = GEN_INT (len);
3316 return "depdi -1,%2,%3,%0";
3319 /* Target hook for assembling integer objects. This code handles
3320 aligned SI and DI integers specially since function references
3321 must be preceded by P%. */
3323 static bool
3324 pa_assemble_integer (rtx x, unsigned int size, int aligned_p)
3326 bool result;
3327 tree decl = NULL;
3329 /* When we have a SYMBOL_REF with a SYMBOL_REF_DECL, we need to call
3330 call assemble_external and set the SYMBOL_REF_DECL to NULL before
3331 calling output_addr_const. Otherwise, it may call assemble_external
3332 in the midst of outputing the assembler code for the SYMBOL_REF.
3333 We restore the SYMBOL_REF_DECL after the output is done. */
3334 if (GET_CODE (x) == SYMBOL_REF)
3336 decl = SYMBOL_REF_DECL (x);
3337 if (decl)
3339 assemble_external (decl);
3340 SET_SYMBOL_REF_DECL (x, NULL);
3344 if (size == UNITS_PER_WORD
3345 && aligned_p
3346 && function_label_operand (x, VOIDmode))
3348 fputs (size == 8? "\t.dword\t" : "\t.word\t", asm_out_file);
3350 /* We don't want an OPD when generating fast indirect calls. */
3351 if (!TARGET_FAST_INDIRECT_CALLS)
3352 fputs ("P%", asm_out_file);
3354 output_addr_const (asm_out_file, x);
3355 fputc ('\n', asm_out_file);
3356 result = true;
3358 else
3359 result = default_assemble_integer (x, size, aligned_p);
3361 if (decl)
3362 SET_SYMBOL_REF_DECL (x, decl);
3364 return result;
3367 /* Output an ascii string. */
3368 void
3369 pa_output_ascii (FILE *file, const char *p, int size)
3371 int i;
3372 int chars_output;
3373 unsigned char partial_output[16]; /* Max space 4 chars can occupy. */
3375 /* The HP assembler can only take strings of 256 characters at one
3376 time. This is a limitation on input line length, *not* the
3377 length of the string. Sigh. Even worse, it seems that the
3378 restriction is in number of input characters (see \xnn &
3379 \whatever). So we have to do this very carefully. */
3381 fputs ("\t.STRING \"", file);
3383 chars_output = 0;
3384 for (i = 0; i < size; i += 4)
3386 int co = 0;
3387 int io = 0;
3388 for (io = 0, co = 0; io < MIN (4, size - i); io++)
3390 register unsigned int c = (unsigned char) p[i + io];
3392 if (c == '\"' || c == '\\')
3393 partial_output[co++] = '\\';
3394 if (c >= ' ' && c < 0177)
3395 partial_output[co++] = c;
3396 else
3398 unsigned int hexd;
3399 partial_output[co++] = '\\';
3400 partial_output[co++] = 'x';
3401 hexd = c / 16 - 0 + '0';
3402 if (hexd > '9')
3403 hexd -= '9' - 'a' + 1;
3404 partial_output[co++] = hexd;
3405 hexd = c % 16 - 0 + '0';
3406 if (hexd > '9')
3407 hexd -= '9' - 'a' + 1;
3408 partial_output[co++] = hexd;
3411 if (chars_output + co > 243)
3413 fputs ("\"\n\t.STRING \"", file);
3414 chars_output = 0;
3416 fwrite (partial_output, 1, (size_t) co, file);
3417 chars_output += co;
3418 co = 0;
3420 fputs ("\"\n", file);
3423 /* Try to rewrite floating point comparisons & branches to avoid
3424 useless add,tr insns.
3426 CHECK_NOTES is nonzero if we should examine REG_DEAD notes
3427 to see if FPCC is dead. CHECK_NOTES is nonzero for the
3428 first attempt to remove useless add,tr insns. It is zero
3429 for the second pass as reorg sometimes leaves bogus REG_DEAD
3430 notes lying around.
3432 When CHECK_NOTES is zero we can only eliminate add,tr insns
3433 when there's a 1:1 correspondence between fcmp and ftest/fbranch
3434 instructions. */
3435 static void
3436 remove_useless_addtr_insns (int check_notes)
3438 rtx_insn *insn;
3439 static int pass = 0;
3441 /* This is fairly cheap, so always run it when optimizing. */
3442 if (optimize > 0)
3444 int fcmp_count = 0;
3445 int fbranch_count = 0;
3447 /* Walk all the insns in this function looking for fcmp & fbranch
3448 instructions. Keep track of how many of each we find. */
3449 for (insn = get_insns (); insn; insn = next_insn (insn))
3451 rtx tmp;
3453 /* Ignore anything that isn't an INSN or a JUMP_INSN. */
3454 if (! NONJUMP_INSN_P (insn) && ! JUMP_P (insn))
3455 continue;
3457 tmp = PATTERN (insn);
3459 /* It must be a set. */
3460 if (GET_CODE (tmp) != SET)
3461 continue;
3463 /* If the destination is CCFP, then we've found an fcmp insn. */
3464 tmp = SET_DEST (tmp);
3465 if (GET_CODE (tmp) == REG && REGNO (tmp) == 0)
3467 fcmp_count++;
3468 continue;
3471 tmp = PATTERN (insn);
3472 /* If this is an fbranch instruction, bump the fbranch counter. */
3473 if (GET_CODE (tmp) == SET
3474 && SET_DEST (tmp) == pc_rtx
3475 && GET_CODE (SET_SRC (tmp)) == IF_THEN_ELSE
3476 && GET_CODE (XEXP (SET_SRC (tmp), 0)) == NE
3477 && GET_CODE (XEXP (XEXP (SET_SRC (tmp), 0), 0)) == REG
3478 && REGNO (XEXP (XEXP (SET_SRC (tmp), 0), 0)) == 0)
3480 fbranch_count++;
3481 continue;
3486 /* Find all floating point compare + branch insns. If possible,
3487 reverse the comparison & the branch to avoid add,tr insns. */
3488 for (insn = get_insns (); insn; insn = next_insn (insn))
3490 rtx tmp;
3491 rtx_insn *next;
3493 /* Ignore anything that isn't an INSN. */
3494 if (! NONJUMP_INSN_P (insn))
3495 continue;
3497 tmp = PATTERN (insn);
3499 /* It must be a set. */
3500 if (GET_CODE (tmp) != SET)
3501 continue;
3503 /* The destination must be CCFP, which is register zero. */
3504 tmp = SET_DEST (tmp);
3505 if (GET_CODE (tmp) != REG || REGNO (tmp) != 0)
3506 continue;
3508 /* INSN should be a set of CCFP.
3510 See if the result of this insn is used in a reversed FP
3511 conditional branch. If so, reverse our condition and
3512 the branch. Doing so avoids useless add,tr insns. */
3513 next = next_insn (insn);
3514 while (next)
3516 /* Jumps, calls and labels stop our search. */
3517 if (JUMP_P (next) || CALL_P (next) || LABEL_P (next))
3518 break;
3520 /* As does another fcmp insn. */
3521 if (NONJUMP_INSN_P (next)
3522 && GET_CODE (PATTERN (next)) == SET
3523 && GET_CODE (SET_DEST (PATTERN (next))) == REG
3524 && REGNO (SET_DEST (PATTERN (next))) == 0)
3525 break;
3527 next = next_insn (next);
3530 /* Is NEXT_INSN a branch? */
3531 if (next && JUMP_P (next))
3533 rtx pattern = PATTERN (next);
3535 /* If it a reversed fp conditional branch (e.g. uses add,tr)
3536 and CCFP dies, then reverse our conditional and the branch
3537 to avoid the add,tr. */
3538 if (GET_CODE (pattern) == SET
3539 && SET_DEST (pattern) == pc_rtx
3540 && GET_CODE (SET_SRC (pattern)) == IF_THEN_ELSE
3541 && GET_CODE (XEXP (SET_SRC (pattern), 0)) == NE
3542 && GET_CODE (XEXP (XEXP (SET_SRC (pattern), 0), 0)) == REG
3543 && REGNO (XEXP (XEXP (SET_SRC (pattern), 0), 0)) == 0
3544 && GET_CODE (XEXP (SET_SRC (pattern), 1)) == PC
3545 && (fcmp_count == fbranch_count
3546 || (check_notes
3547 && find_regno_note (next, REG_DEAD, 0))))
3549 /* Reverse the branch. */
3550 tmp = XEXP (SET_SRC (pattern), 1);
3551 XEXP (SET_SRC (pattern), 1) = XEXP (SET_SRC (pattern), 2);
3552 XEXP (SET_SRC (pattern), 2) = tmp;
3553 INSN_CODE (next) = -1;
3555 /* Reverse our condition. */
3556 tmp = PATTERN (insn);
3557 PUT_CODE (XEXP (tmp, 1),
3558 (reverse_condition_maybe_unordered
3559 (GET_CODE (XEXP (tmp, 1)))));
3565 pass = !pass;
3569 /* You may have trouble believing this, but this is the 32 bit HP-PA
3570 stack layout. Wow.
3572 Offset Contents
3574 Variable arguments (optional; any number may be allocated)
3576 SP-(4*(N+9)) arg word N
3578 SP-56 arg word 5
3579 SP-52 arg word 4
3581 Fixed arguments (must be allocated; may remain unused)
3583 SP-48 arg word 3
3584 SP-44 arg word 2
3585 SP-40 arg word 1
3586 SP-36 arg word 0
3588 Frame Marker
3590 SP-32 External Data Pointer (DP)
3591 SP-28 External sr4
3592 SP-24 External/stub RP (RP')
3593 SP-20 Current RP
3594 SP-16 Static Link
3595 SP-12 Clean up
3596 SP-8 Calling Stub RP (RP'')
3597 SP-4 Previous SP
3599 Top of Frame
3601 SP-0 Stack Pointer (points to next available address)
3605 /* This function saves registers as follows. Registers marked with ' are
3606 this function's registers (as opposed to the previous function's).
3607 If a frame_pointer isn't needed, r4 is saved as a general register;
3608 the space for the frame pointer is still allocated, though, to keep
3609 things simple.
3612 Top of Frame
3614 SP (FP') Previous FP
3615 SP + 4 Alignment filler (sigh)
3616 SP + 8 Space for locals reserved here.
3620 SP + n All call saved register used.
3624 SP + o All call saved fp registers used.
3628 SP + p (SP') points to next available address.
3632 /* Global variables set by output_function_prologue(). */
3633 /* Size of frame. Need to know this to emit return insns from
3634 leaf procedures. */
3635 static HOST_WIDE_INT actual_fsize, local_fsize;
3636 static int save_fregs;
3638 /* Emit RTL to store REG at the memory location specified by BASE+DISP.
3639 Handle case where DISP > 8k by using the add_high_const patterns.
3641 Note in DISP > 8k case, we will leave the high part of the address
3642 in %r1. There is code in expand_hppa_{prologue,epilogue} that knows this.*/
3644 static void
3645 store_reg (int reg, HOST_WIDE_INT disp, int base)
3647 rtx dest, src, basereg;
3648 rtx_insn *insn;
3650 src = gen_rtx_REG (word_mode, reg);
3651 basereg = gen_rtx_REG (Pmode, base);
3652 if (VAL_14_BITS_P (disp))
3654 dest = gen_rtx_MEM (word_mode, plus_constant (Pmode, basereg, disp));
3655 insn = emit_move_insn (dest, src);
3657 else if (TARGET_64BIT && !VAL_32_BITS_P (disp))
3659 rtx delta = GEN_INT (disp);
3660 rtx tmpreg = gen_rtx_REG (Pmode, 1);
3662 emit_move_insn (tmpreg, delta);
3663 insn = emit_move_insn (tmpreg, gen_rtx_PLUS (Pmode, tmpreg, basereg));
3664 if (DO_FRAME_NOTES)
3666 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
3667 gen_rtx_SET (tmpreg,
3668 gen_rtx_PLUS (Pmode, basereg, delta)));
3669 RTX_FRAME_RELATED_P (insn) = 1;
3671 dest = gen_rtx_MEM (word_mode, tmpreg);
3672 insn = emit_move_insn (dest, src);
3674 else
3676 rtx delta = GEN_INT (disp);
3677 rtx high = gen_rtx_PLUS (Pmode, basereg, gen_rtx_HIGH (Pmode, delta));
3678 rtx tmpreg = gen_rtx_REG (Pmode, 1);
3680 emit_move_insn (tmpreg, high);
3681 dest = gen_rtx_MEM (word_mode, gen_rtx_LO_SUM (Pmode, tmpreg, delta));
3682 insn = emit_move_insn (dest, src);
3683 if (DO_FRAME_NOTES)
3684 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
3685 gen_rtx_SET (gen_rtx_MEM (word_mode,
3686 gen_rtx_PLUS (word_mode,
3687 basereg,
3688 delta)),
3689 src));
3692 if (DO_FRAME_NOTES)
3693 RTX_FRAME_RELATED_P (insn) = 1;
3696 /* Emit RTL to store REG at the memory location specified by BASE and then
3697 add MOD to BASE. MOD must be <= 8k. */
3699 static void
3700 store_reg_modify (int base, int reg, HOST_WIDE_INT mod)
3702 rtx basereg, srcreg, delta;
3703 rtx_insn *insn;
3705 gcc_assert (VAL_14_BITS_P (mod));
3707 basereg = gen_rtx_REG (Pmode, base);
3708 srcreg = gen_rtx_REG (word_mode, reg);
3709 delta = GEN_INT (mod);
3711 insn = emit_insn (gen_post_store (basereg, srcreg, delta));
3712 if (DO_FRAME_NOTES)
3714 RTX_FRAME_RELATED_P (insn) = 1;
3716 /* RTX_FRAME_RELATED_P must be set on each frame related set
3717 in a parallel with more than one element. */
3718 RTX_FRAME_RELATED_P (XVECEXP (PATTERN (insn), 0, 0)) = 1;
3719 RTX_FRAME_RELATED_P (XVECEXP (PATTERN (insn), 0, 1)) = 1;
3723 /* Emit RTL to set REG to the value specified by BASE+DISP. Handle case
3724 where DISP > 8k by using the add_high_const patterns. NOTE indicates
3725 whether to add a frame note or not.
3727 In the DISP > 8k case, we leave the high part of the address in %r1.
3728 There is code in expand_hppa_{prologue,epilogue} that knows about this. */
3730 static void
3731 set_reg_plus_d (int reg, int base, HOST_WIDE_INT disp, int note)
3733 rtx_insn *insn;
3735 if (VAL_14_BITS_P (disp))
3737 insn = emit_move_insn (gen_rtx_REG (Pmode, reg),
3738 plus_constant (Pmode,
3739 gen_rtx_REG (Pmode, base), disp));
3741 else if (TARGET_64BIT && !VAL_32_BITS_P (disp))
3743 rtx basereg = gen_rtx_REG (Pmode, base);
3744 rtx delta = GEN_INT (disp);
3745 rtx tmpreg = gen_rtx_REG (Pmode, 1);
3747 emit_move_insn (tmpreg, delta);
3748 insn = emit_move_insn (gen_rtx_REG (Pmode, reg),
3749 gen_rtx_PLUS (Pmode, tmpreg, basereg));
3750 if (DO_FRAME_NOTES)
3751 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
3752 gen_rtx_SET (tmpreg,
3753 gen_rtx_PLUS (Pmode, basereg, delta)));
3755 else
3757 rtx basereg = gen_rtx_REG (Pmode, base);
3758 rtx delta = GEN_INT (disp);
3759 rtx tmpreg = gen_rtx_REG (Pmode, 1);
3761 emit_move_insn (tmpreg,
3762 gen_rtx_PLUS (Pmode, basereg,
3763 gen_rtx_HIGH (Pmode, delta)));
3764 insn = emit_move_insn (gen_rtx_REG (Pmode, reg),
3765 gen_rtx_LO_SUM (Pmode, tmpreg, delta));
3768 if (DO_FRAME_NOTES && note)
3769 RTX_FRAME_RELATED_P (insn) = 1;
3772 HOST_WIDE_INT
3773 pa_compute_frame_size (poly_int64 size, int *fregs_live)
3775 int freg_saved = 0;
3776 int i, j;
3778 /* The code in pa_expand_prologue and pa_expand_epilogue must
3779 be consistent with the rounding and size calculation done here.
3780 Change them at the same time. */
3782 /* We do our own stack alignment. First, round the size of the
3783 stack locals up to a word boundary. */
3784 size = (size + UNITS_PER_WORD - 1) & ~(UNITS_PER_WORD - 1);
3786 /* Space for previous frame pointer + filler. If any frame is
3787 allocated, we need to add in the TARGET_STARTING_FRAME_OFFSET. We
3788 waste some space here for the sake of HP compatibility. The
3789 first slot is only used when the frame pointer is needed. */
3790 if (size || frame_pointer_needed)
3791 size += pa_starting_frame_offset ();
3793 /* If the current function calls __builtin_eh_return, then we need
3794 to allocate stack space for registers that will hold data for
3795 the exception handler. */
3796 if (DO_FRAME_NOTES && crtl->calls_eh_return)
3798 unsigned int i;
3800 for (i = 0; EH_RETURN_DATA_REGNO (i) != INVALID_REGNUM; ++i)
3801 continue;
3802 size += i * UNITS_PER_WORD;
3805 /* Account for space used by the callee general register saves. */
3806 for (i = 18, j = frame_pointer_needed ? 4 : 3; i >= j; i--)
3807 if (df_regs_ever_live_p (i))
3808 size += UNITS_PER_WORD;
3810 /* Account for space used by the callee floating point register saves. */
3811 for (i = FP_SAVED_REG_LAST; i >= FP_SAVED_REG_FIRST; i -= FP_REG_STEP)
3812 if (df_regs_ever_live_p (i)
3813 || (!TARGET_64BIT && df_regs_ever_live_p (i + 1)))
3815 freg_saved = 1;
3817 /* We always save both halves of the FP register, so always
3818 increment the frame size by 8 bytes. */
3819 size += 8;
3822 /* If any of the floating registers are saved, account for the
3823 alignment needed for the floating point register save block. */
3824 if (freg_saved)
3826 size = (size + 7) & ~7;
3827 if (fregs_live)
3828 *fregs_live = 1;
3831 /* The various ABIs include space for the outgoing parameters in the
3832 size of the current function's stack frame. We don't need to align
3833 for the outgoing arguments as their alignment is set by the final
3834 rounding for the frame as a whole. */
3835 size += crtl->outgoing_args_size;
3837 /* Allocate space for the fixed frame marker. This space must be
3838 allocated for any function that makes calls or allocates
3839 stack space. */
3840 if (!crtl->is_leaf || size)
3841 size += TARGET_64BIT ? 48 : 32;
3843 /* Finally, round to the preferred stack boundary. */
3844 return ((size + PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1)
3845 & ~(PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT - 1));
3848 /* On HP-PA, move-double insns between fpu and cpu need an 8-byte block
3849 of memory. If any fpu reg is used in the function, we allocate
3850 such a block here, at the bottom of the frame, just in case it's needed.
3852 If this function is a leaf procedure, then we may choose not
3853 to do a "save" insn. The decision about whether or not
3854 to do this is made in regclass.c. */
3856 static void
3857 pa_output_function_prologue (FILE *file)
3859 /* The function's label and associated .PROC must never be
3860 separated and must be output *after* any profiling declarations
3861 to avoid changing spaces/subspaces within a procedure. */
3862 ASM_OUTPUT_LABEL (file, XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0));
3863 fputs ("\t.PROC\n", file);
3865 /* pa_expand_prologue does the dirty work now. We just need
3866 to output the assembler directives which denote the start
3867 of a function. */
3868 fprintf (file, "\t.CALLINFO FRAME=" HOST_WIDE_INT_PRINT_DEC, actual_fsize);
3869 if (crtl->is_leaf)
3870 fputs (",NO_CALLS", file);
3871 else
3872 fputs (",CALLS", file);
3873 if (rp_saved)
3874 fputs (",SAVE_RP", file);
3876 /* The SAVE_SP flag is used to indicate that register %r3 is stored
3877 at the beginning of the frame and that it is used as the frame
3878 pointer for the frame. We do this because our current frame
3879 layout doesn't conform to that specified in the HP runtime
3880 documentation and we need a way to indicate to programs such as
3881 GDB where %r3 is saved. The SAVE_SP flag was chosen because it
3882 isn't used by HP compilers but is supported by the assembler.
3883 However, SAVE_SP is supposed to indicate that the previous stack
3884 pointer has been saved in the frame marker. */
3885 if (frame_pointer_needed)
3886 fputs (",SAVE_SP", file);
3888 /* Pass on information about the number of callee register saves
3889 performed in the prologue.
3891 The compiler is supposed to pass the highest register number
3892 saved, the assembler then has to adjust that number before
3893 entering it into the unwind descriptor (to account for any
3894 caller saved registers with lower register numbers than the
3895 first callee saved register). */
3896 if (gr_saved)
3897 fprintf (file, ",ENTRY_GR=%d", gr_saved + 2);
3899 if (fr_saved)
3900 fprintf (file, ",ENTRY_FR=%d", fr_saved + 11);
3902 fputs ("\n\t.ENTRY\n", file);
3904 remove_useless_addtr_insns (0);
3907 void
3908 pa_expand_prologue (void)
3910 int merge_sp_adjust_with_store = 0;
3911 HOST_WIDE_INT size = get_frame_size ();
3912 HOST_WIDE_INT offset;
3913 int i;
3914 rtx tmpreg;
3915 rtx_insn *insn;
3917 gr_saved = 0;
3918 fr_saved = 0;
3919 save_fregs = 0;
3921 /* Compute total size for frame pointer, filler, locals and rounding to
3922 the next word boundary. Similar code appears in pa_compute_frame_size
3923 and must be changed in tandem with this code. */
3924 local_fsize = (size + UNITS_PER_WORD - 1) & ~(UNITS_PER_WORD - 1);
3925 if (local_fsize || frame_pointer_needed)
3926 local_fsize += pa_starting_frame_offset ();
3928 actual_fsize = pa_compute_frame_size (size, &save_fregs);
3929 if (flag_stack_usage_info)
3930 current_function_static_stack_size = actual_fsize;
3932 /* Compute a few things we will use often. */
3933 tmpreg = gen_rtx_REG (word_mode, 1);
3935 /* Save RP first. The calling conventions manual states RP will
3936 always be stored into the caller's frame at sp - 20 or sp - 16
3937 depending on which ABI is in use. */
3938 if (df_regs_ever_live_p (2) || crtl->calls_eh_return)
3940 store_reg (2, TARGET_64BIT ? -16 : -20, STACK_POINTER_REGNUM);
3941 rp_saved = true;
3943 else
3944 rp_saved = false;
3946 /* Allocate the local frame and set up the frame pointer if needed. */
3947 if (actual_fsize != 0)
3949 if (frame_pointer_needed)
3951 /* Copy the old frame pointer temporarily into %r1. Set up the
3952 new stack pointer, then store away the saved old frame pointer
3953 into the stack at sp and at the same time update the stack
3954 pointer by actual_fsize bytes. Two versions, first
3955 handles small (<8k) frames. The second handles large (>=8k)
3956 frames. */
3957 insn = emit_move_insn (tmpreg, hard_frame_pointer_rtx);
3958 if (DO_FRAME_NOTES)
3959 RTX_FRAME_RELATED_P (insn) = 1;
3961 insn = emit_move_insn (hard_frame_pointer_rtx, stack_pointer_rtx);
3962 if (DO_FRAME_NOTES)
3963 RTX_FRAME_RELATED_P (insn) = 1;
3965 if (VAL_14_BITS_P (actual_fsize))
3966 store_reg_modify (STACK_POINTER_REGNUM, 1, actual_fsize);
3967 else
3969 /* It is incorrect to store the saved frame pointer at *sp,
3970 then increment sp (writes beyond the current stack boundary).
3972 So instead use stwm to store at *sp and post-increment the
3973 stack pointer as an atomic operation. Then increment sp to
3974 finish allocating the new frame. */
3975 HOST_WIDE_INT adjust1 = 8192 - 64;
3976 HOST_WIDE_INT adjust2 = actual_fsize - adjust1;
3978 store_reg_modify (STACK_POINTER_REGNUM, 1, adjust1);
3979 set_reg_plus_d (STACK_POINTER_REGNUM, STACK_POINTER_REGNUM,
3980 adjust2, 1);
3983 /* We set SAVE_SP in frames that need a frame pointer. Thus,
3984 we need to store the previous stack pointer (frame pointer)
3985 into the frame marker on targets that use the HP unwind
3986 library. This allows the HP unwind library to be used to
3987 unwind GCC frames. However, we are not fully compatible
3988 with the HP library because our frame layout differs from
3989 that specified in the HP runtime specification.
3991 We don't want a frame note on this instruction as the frame
3992 marker moves during dynamic stack allocation.
3994 This instruction also serves as a blockage to prevent
3995 register spills from being scheduled before the stack
3996 pointer is raised. This is necessary as we store
3997 registers using the frame pointer as a base register,
3998 and the frame pointer is set before sp is raised. */
3999 if (TARGET_HPUX_UNWIND_LIBRARY)
4001 rtx addr = gen_rtx_PLUS (word_mode, stack_pointer_rtx,
4002 GEN_INT (TARGET_64BIT ? -8 : -4));
4004 emit_move_insn (gen_rtx_MEM (word_mode, addr),
4005 hard_frame_pointer_rtx);
4007 else
4008 emit_insn (gen_blockage ());
4010 /* no frame pointer needed. */
4011 else
4013 /* In some cases we can perform the first callee register save
4014 and allocating the stack frame at the same time. If so, just
4015 make a note of it and defer allocating the frame until saving
4016 the callee registers. */
4017 if (VAL_14_BITS_P (actual_fsize) && local_fsize == 0)
4018 merge_sp_adjust_with_store = 1;
4019 /* Can not optimize. Adjust the stack frame by actual_fsize
4020 bytes. */
4021 else
4022 set_reg_plus_d (STACK_POINTER_REGNUM, STACK_POINTER_REGNUM,
4023 actual_fsize, 1);
4027 /* Normal register save.
4029 Do not save the frame pointer in the frame_pointer_needed case. It
4030 was done earlier. */
4031 if (frame_pointer_needed)
4033 offset = local_fsize;
4035 /* Saving the EH return data registers in the frame is the simplest
4036 way to get the frame unwind information emitted. We put them
4037 just before the general registers. */
4038 if (DO_FRAME_NOTES && crtl->calls_eh_return)
4040 unsigned int i, regno;
4042 for (i = 0; ; ++i)
4044 regno = EH_RETURN_DATA_REGNO (i);
4045 if (regno == INVALID_REGNUM)
4046 break;
4048 store_reg (regno, offset, HARD_FRAME_POINTER_REGNUM);
4049 offset += UNITS_PER_WORD;
4053 for (i = 18; i >= 4; i--)
4054 if (df_regs_ever_live_p (i) && ! call_used_regs[i])
4056 store_reg (i, offset, HARD_FRAME_POINTER_REGNUM);
4057 offset += UNITS_PER_WORD;
4058 gr_saved++;
4060 /* Account for %r3 which is saved in a special place. */
4061 gr_saved++;
4063 /* No frame pointer needed. */
4064 else
4066 offset = local_fsize - actual_fsize;
4068 /* Saving the EH return data registers in the frame is the simplest
4069 way to get the frame unwind information emitted. */
4070 if (DO_FRAME_NOTES && crtl->calls_eh_return)
4072 unsigned int i, regno;
4074 for (i = 0; ; ++i)
4076 regno = EH_RETURN_DATA_REGNO (i);
4077 if (regno == INVALID_REGNUM)
4078 break;
4080 /* If merge_sp_adjust_with_store is nonzero, then we can
4081 optimize the first save. */
4082 if (merge_sp_adjust_with_store)
4084 store_reg_modify (STACK_POINTER_REGNUM, regno, -offset);
4085 merge_sp_adjust_with_store = 0;
4087 else
4088 store_reg (regno, offset, STACK_POINTER_REGNUM);
4089 offset += UNITS_PER_WORD;
4093 for (i = 18; i >= 3; i--)
4094 if (df_regs_ever_live_p (i) && ! call_used_regs[i])
4096 /* If merge_sp_adjust_with_store is nonzero, then we can
4097 optimize the first GR save. */
4098 if (merge_sp_adjust_with_store)
4100 store_reg_modify (STACK_POINTER_REGNUM, i, -offset);
4101 merge_sp_adjust_with_store = 0;
4103 else
4104 store_reg (i, offset, STACK_POINTER_REGNUM);
4105 offset += UNITS_PER_WORD;
4106 gr_saved++;
4109 /* If we wanted to merge the SP adjustment with a GR save, but we never
4110 did any GR saves, then just emit the adjustment here. */
4111 if (merge_sp_adjust_with_store)
4112 set_reg_plus_d (STACK_POINTER_REGNUM, STACK_POINTER_REGNUM,
4113 actual_fsize, 1);
4116 /* The hppa calling conventions say that %r19, the pic offset
4117 register, is saved at sp - 32 (in this function's frame)
4118 when generating PIC code. FIXME: What is the correct thing
4119 to do for functions which make no calls and allocate no
4120 frame? Do we need to allocate a frame, or can we just omit
4121 the save? For now we'll just omit the save.
4123 We don't want a note on this insn as the frame marker can
4124 move if there is a dynamic stack allocation. */
4125 if (flag_pic && actual_fsize != 0 && !TARGET_64BIT)
4127 rtx addr = gen_rtx_PLUS (word_mode, stack_pointer_rtx, GEN_INT (-32));
4129 emit_move_insn (gen_rtx_MEM (word_mode, addr), pic_offset_table_rtx);
4133 /* Align pointer properly (doubleword boundary). */
4134 offset = (offset + 7) & ~7;
4136 /* Floating point register store. */
4137 if (save_fregs)
4139 rtx base;
4141 /* First get the frame or stack pointer to the start of the FP register
4142 save area. */
4143 if (frame_pointer_needed)
4145 set_reg_plus_d (1, HARD_FRAME_POINTER_REGNUM, offset, 0);
4146 base = hard_frame_pointer_rtx;
4148 else
4150 set_reg_plus_d (1, STACK_POINTER_REGNUM, offset, 0);
4151 base = stack_pointer_rtx;
4154 /* Now actually save the FP registers. */
4155 for (i = FP_SAVED_REG_LAST; i >= FP_SAVED_REG_FIRST; i -= FP_REG_STEP)
4157 if (df_regs_ever_live_p (i)
4158 || (! TARGET_64BIT && df_regs_ever_live_p (i + 1)))
4160 rtx addr, reg;
4161 rtx_insn *insn;
4162 addr = gen_rtx_MEM (DFmode,
4163 gen_rtx_POST_INC (word_mode, tmpreg));
4164 reg = gen_rtx_REG (DFmode, i);
4165 insn = emit_move_insn (addr, reg);
4166 if (DO_FRAME_NOTES)
4168 RTX_FRAME_RELATED_P (insn) = 1;
4169 if (TARGET_64BIT)
4171 rtx mem = gen_rtx_MEM (DFmode,
4172 plus_constant (Pmode, base,
4173 offset));
4174 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
4175 gen_rtx_SET (mem, reg));
4177 else
4179 rtx meml = gen_rtx_MEM (SFmode,
4180 plus_constant (Pmode, base,
4181 offset));
4182 rtx memr = gen_rtx_MEM (SFmode,
4183 plus_constant (Pmode, base,
4184 offset + 4));
4185 rtx regl = gen_rtx_REG (SFmode, i);
4186 rtx regr = gen_rtx_REG (SFmode, i + 1);
4187 rtx setl = gen_rtx_SET (meml, regl);
4188 rtx setr = gen_rtx_SET (memr, regr);
4189 rtvec vec;
4191 RTX_FRAME_RELATED_P (setl) = 1;
4192 RTX_FRAME_RELATED_P (setr) = 1;
4193 vec = gen_rtvec (2, setl, setr);
4194 add_reg_note (insn, REG_FRAME_RELATED_EXPR,
4195 gen_rtx_SEQUENCE (VOIDmode, vec));
4198 offset += GET_MODE_SIZE (DFmode);
4199 fr_saved++;
4205 /* Emit RTL to load REG from the memory location specified by BASE+DISP.
4206 Handle case where DISP > 8k by using the add_high_const patterns. */
4208 static void
4209 load_reg (int reg, HOST_WIDE_INT disp, int base)
4211 rtx dest = gen_rtx_REG (word_mode, reg);
4212 rtx basereg = gen_rtx_REG (Pmode, base);
4213 rtx src;
4215 if (VAL_14_BITS_P (disp))
4216 src = gen_rtx_MEM (word_mode, plus_constant (Pmode, basereg, disp));
4217 else if (TARGET_64BIT && !VAL_32_BITS_P (disp))
4219 rtx delta = GEN_INT (disp);
4220 rtx tmpreg = gen_rtx_REG (Pmode, 1);
4222 emit_move_insn (tmpreg, delta);
4223 if (TARGET_DISABLE_INDEXING)
4225 emit_move_insn (tmpreg, gen_rtx_PLUS (Pmode, tmpreg, basereg));
4226 src = gen_rtx_MEM (word_mode, tmpreg);
4228 else
4229 src = gen_rtx_MEM (word_mode, gen_rtx_PLUS (Pmode, tmpreg, basereg));
4231 else
4233 rtx delta = GEN_INT (disp);
4234 rtx high = gen_rtx_PLUS (Pmode, basereg, gen_rtx_HIGH (Pmode, delta));
4235 rtx tmpreg = gen_rtx_REG (Pmode, 1);
4237 emit_move_insn (tmpreg, high);
4238 src = gen_rtx_MEM (word_mode, gen_rtx_LO_SUM (Pmode, tmpreg, delta));
4241 emit_move_insn (dest, src);
4244 /* Update the total code bytes output to the text section. */
4246 static void
4247 update_total_code_bytes (unsigned int nbytes)
4249 if ((TARGET_PORTABLE_RUNTIME || !TARGET_GAS || !TARGET_SOM)
4250 && !IN_NAMED_SECTION_P (cfun->decl))
4252 unsigned int old_total = total_code_bytes;
4254 total_code_bytes += nbytes;
4256 /* Be prepared to handle overflows. */
4257 if (old_total > total_code_bytes)
4258 total_code_bytes = UINT_MAX;
4262 /* This function generates the assembly code for function exit.
4263 Args are as for output_function_prologue ().
4265 The function epilogue should not depend on the current stack
4266 pointer! It should use the frame pointer only. This is mandatory
4267 because of alloca; we also take advantage of it to omit stack
4268 adjustments before returning. */
4270 static void
4271 pa_output_function_epilogue (FILE *file)
4273 rtx_insn *insn = get_last_insn ();
4274 bool extra_nop;
4276 /* pa_expand_epilogue does the dirty work now. We just need
4277 to output the assembler directives which denote the end
4278 of a function.
4280 To make debuggers happy, emit a nop if the epilogue was completely
4281 eliminated due to a volatile call as the last insn in the
4282 current function. That way the return address (in %r2) will
4283 always point to a valid instruction in the current function. */
4285 /* Get the last real insn. */
4286 if (NOTE_P (insn))
4287 insn = prev_real_insn (insn);
4289 /* If it is a sequence, then look inside. */
4290 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
4291 insn = as_a <rtx_sequence *> (PATTERN (insn))-> insn (0);
4293 /* If insn is a CALL_INSN, then it must be a call to a volatile
4294 function (otherwise there would be epilogue insns). */
4295 if (insn && CALL_P (insn))
4297 fputs ("\tnop\n", file);
4298 extra_nop = true;
4300 else
4301 extra_nop = false;
4303 fputs ("\t.EXIT\n\t.PROCEND\n", file);
4305 if (TARGET_SOM && TARGET_GAS)
4307 /* We are done with this subspace except possibly for some additional
4308 debug information. Forget that we are in this subspace to ensure
4309 that the next function is output in its own subspace. */
4310 in_section = NULL;
4311 cfun->machine->in_nsubspa = 2;
4314 /* Thunks do their own insn accounting. */
4315 if (cfun->is_thunk)
4316 return;
4318 if (INSN_ADDRESSES_SET_P ())
4320 last_address = extra_nop ? 4 : 0;
4321 insn = get_last_nonnote_insn ();
4322 if (insn)
4324 last_address += INSN_ADDRESSES (INSN_UID (insn));
4325 if (INSN_P (insn))
4326 last_address += insn_default_length (insn);
4328 last_address = ((last_address + FUNCTION_BOUNDARY / BITS_PER_UNIT - 1)
4329 & ~(FUNCTION_BOUNDARY / BITS_PER_UNIT - 1));
4331 else
4332 last_address = UINT_MAX;
4334 /* Finally, update the total number of code bytes output so far. */
4335 update_total_code_bytes (last_address);
4338 void
4339 pa_expand_epilogue (void)
4341 rtx tmpreg;
4342 HOST_WIDE_INT offset;
4343 HOST_WIDE_INT ret_off = 0;
4344 int i;
4345 int merge_sp_adjust_with_load = 0;
4347 /* We will use this often. */
4348 tmpreg = gen_rtx_REG (word_mode, 1);
4350 /* Try to restore RP early to avoid load/use interlocks when
4351 RP gets used in the return (bv) instruction. This appears to still
4352 be necessary even when we schedule the prologue and epilogue. */
4353 if (rp_saved)
4355 ret_off = TARGET_64BIT ? -16 : -20;
4356 if (frame_pointer_needed)
4358 load_reg (2, ret_off, HARD_FRAME_POINTER_REGNUM);
4359 ret_off = 0;
4361 else
4363 /* No frame pointer, and stack is smaller than 8k. */
4364 if (VAL_14_BITS_P (ret_off - actual_fsize))
4366 load_reg (2, ret_off - actual_fsize, STACK_POINTER_REGNUM);
4367 ret_off = 0;
4372 /* General register restores. */
4373 if (frame_pointer_needed)
4375 offset = local_fsize;
4377 /* If the current function calls __builtin_eh_return, then we need
4378 to restore the saved EH data registers. */
4379 if (DO_FRAME_NOTES && crtl->calls_eh_return)
4381 unsigned int i, regno;
4383 for (i = 0; ; ++i)
4385 regno = EH_RETURN_DATA_REGNO (i);
4386 if (regno == INVALID_REGNUM)
4387 break;
4389 load_reg (regno, offset, HARD_FRAME_POINTER_REGNUM);
4390 offset += UNITS_PER_WORD;
4394 for (i = 18; i >= 4; i--)
4395 if (df_regs_ever_live_p (i) && ! call_used_regs[i])
4397 load_reg (i, offset, HARD_FRAME_POINTER_REGNUM);
4398 offset += UNITS_PER_WORD;
4401 else
4403 offset = local_fsize - actual_fsize;
4405 /* If the current function calls __builtin_eh_return, then we need
4406 to restore the saved EH data registers. */
4407 if (DO_FRAME_NOTES && crtl->calls_eh_return)
4409 unsigned int i, regno;
4411 for (i = 0; ; ++i)
4413 regno = EH_RETURN_DATA_REGNO (i);
4414 if (regno == INVALID_REGNUM)
4415 break;
4417 /* Only for the first load.
4418 merge_sp_adjust_with_load holds the register load
4419 with which we will merge the sp adjustment. */
4420 if (merge_sp_adjust_with_load == 0
4421 && local_fsize == 0
4422 && VAL_14_BITS_P (-actual_fsize))
4423 merge_sp_adjust_with_load = regno;
4424 else
4425 load_reg (regno, offset, STACK_POINTER_REGNUM);
4426 offset += UNITS_PER_WORD;
4430 for (i = 18; i >= 3; i--)
4432 if (df_regs_ever_live_p (i) && ! call_used_regs[i])
4434 /* Only for the first load.
4435 merge_sp_adjust_with_load holds the register load
4436 with which we will merge the sp adjustment. */
4437 if (merge_sp_adjust_with_load == 0
4438 && local_fsize == 0
4439 && VAL_14_BITS_P (-actual_fsize))
4440 merge_sp_adjust_with_load = i;
4441 else
4442 load_reg (i, offset, STACK_POINTER_REGNUM);
4443 offset += UNITS_PER_WORD;
4448 /* Align pointer properly (doubleword boundary). */
4449 offset = (offset + 7) & ~7;
4451 /* FP register restores. */
4452 if (save_fregs)
4454 /* Adjust the register to index off of. */
4455 if (frame_pointer_needed)
4456 set_reg_plus_d (1, HARD_FRAME_POINTER_REGNUM, offset, 0);
4457 else
4458 set_reg_plus_d (1, STACK_POINTER_REGNUM, offset, 0);
4460 /* Actually do the restores now. */
4461 for (i = FP_SAVED_REG_LAST; i >= FP_SAVED_REG_FIRST; i -= FP_REG_STEP)
4462 if (df_regs_ever_live_p (i)
4463 || (! TARGET_64BIT && df_regs_ever_live_p (i + 1)))
4465 rtx src = gen_rtx_MEM (DFmode,
4466 gen_rtx_POST_INC (word_mode, tmpreg));
4467 rtx dest = gen_rtx_REG (DFmode, i);
4468 emit_move_insn (dest, src);
4472 /* Emit a blockage insn here to keep these insns from being moved to
4473 an earlier spot in the epilogue, or into the main instruction stream.
4475 This is necessary as we must not cut the stack back before all the
4476 restores are finished. */
4477 emit_insn (gen_blockage ());
4479 /* Reset stack pointer (and possibly frame pointer). The stack
4480 pointer is initially set to fp + 64 to avoid a race condition. */
4481 if (frame_pointer_needed)
4483 rtx delta = GEN_INT (-64);
4485 set_reg_plus_d (STACK_POINTER_REGNUM, HARD_FRAME_POINTER_REGNUM, 64, 0);
4486 emit_insn (gen_pre_load (hard_frame_pointer_rtx,
4487 stack_pointer_rtx, delta));
4489 /* If we were deferring a callee register restore, do it now. */
4490 else if (merge_sp_adjust_with_load)
4492 rtx delta = GEN_INT (-actual_fsize);
4493 rtx dest = gen_rtx_REG (word_mode, merge_sp_adjust_with_load);
4495 emit_insn (gen_pre_load (dest, stack_pointer_rtx, delta));
4497 else if (actual_fsize != 0)
4498 set_reg_plus_d (STACK_POINTER_REGNUM, STACK_POINTER_REGNUM,
4499 - actual_fsize, 0);
4501 /* If we haven't restored %r2 yet (no frame pointer, and a stack
4502 frame greater than 8k), do so now. */
4503 if (ret_off != 0)
4504 load_reg (2, ret_off, STACK_POINTER_REGNUM);
4506 if (DO_FRAME_NOTES && crtl->calls_eh_return)
4508 rtx sa = EH_RETURN_STACKADJ_RTX;
4510 emit_insn (gen_blockage ());
4511 emit_insn (TARGET_64BIT
4512 ? gen_subdi3 (stack_pointer_rtx, stack_pointer_rtx, sa)
4513 : gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, sa));
4517 bool
4518 pa_can_use_return_insn (void)
4520 if (!reload_completed)
4521 return false;
4523 if (frame_pointer_needed)
4524 return false;
4526 if (df_regs_ever_live_p (2))
4527 return false;
4529 if (crtl->profile)
4530 return false;
4532 return pa_compute_frame_size (get_frame_size (), 0) == 0;
4536 hppa_pic_save_rtx (void)
4538 return get_hard_reg_initial_val (word_mode, PIC_OFFSET_TABLE_REGNUM);
4541 #ifndef NO_DEFERRED_PROFILE_COUNTERS
4542 #define NO_DEFERRED_PROFILE_COUNTERS 0
4543 #endif
4546 /* Vector of funcdef numbers. */
4547 static vec<int> funcdef_nos;
4549 /* Output deferred profile counters. */
4550 static void
4551 output_deferred_profile_counters (void)
4553 unsigned int i;
4554 int align, n;
4556 if (funcdef_nos.is_empty ())
4557 return;
4559 switch_to_section (data_section);
4560 align = MIN (BIGGEST_ALIGNMENT, LONG_TYPE_SIZE);
4561 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (align / BITS_PER_UNIT));
4563 for (i = 0; funcdef_nos.iterate (i, &n); i++)
4565 targetm.asm_out.internal_label (asm_out_file, "LP", n);
4566 assemble_integer (const0_rtx, LONG_TYPE_SIZE / BITS_PER_UNIT, align, 1);
4569 funcdef_nos.release ();
4572 void
4573 hppa_profile_hook (int label_no)
4575 /* We use SImode for the address of the function in both 32 and
4576 64-bit code to avoid having to provide DImode versions of the
4577 lcla2 and load_offset_label_address insn patterns. */
4578 rtx reg = gen_reg_rtx (SImode);
4579 rtx_code_label *label_rtx = gen_label_rtx ();
4580 int reg_parm_stack_space = REG_PARM_STACK_SPACE (NULL_TREE);
4581 rtx arg_bytes, begin_label_rtx, mcount, sym;
4582 rtx_insn *call_insn;
4583 char begin_label_name[16];
4584 bool use_mcount_pcrel_call;
4586 /* Set up call destination. */
4587 sym = gen_rtx_SYMBOL_REF (Pmode, "_mcount");
4588 pa_encode_label (sym);
4589 mcount = gen_rtx_MEM (Pmode, sym);
4591 /* If we can reach _mcount with a pc-relative call, we can optimize
4592 loading the address of the current function. This requires linker
4593 long branch stub support. */
4594 if (!TARGET_PORTABLE_RUNTIME
4595 && !TARGET_LONG_CALLS
4596 && (TARGET_SOM || flag_function_sections))
4597 use_mcount_pcrel_call = TRUE;
4598 else
4599 use_mcount_pcrel_call = FALSE;
4601 ASM_GENERATE_INTERNAL_LABEL (begin_label_name, FUNC_BEGIN_PROLOG_LABEL,
4602 label_no);
4603 begin_label_rtx = gen_rtx_SYMBOL_REF (SImode, ggc_strdup (begin_label_name));
4605 emit_move_insn (gen_rtx_REG (word_mode, 26), gen_rtx_REG (word_mode, 2));
4607 if (!use_mcount_pcrel_call)
4609 /* The address of the function is loaded into %r25 with an instruction-
4610 relative sequence that avoids the use of relocations. The sequence
4611 is split so that the load_offset_label_address instruction can
4612 occupy the delay slot of the call to _mcount. */
4613 if (TARGET_PA_20)
4614 emit_insn (gen_lcla2 (reg, label_rtx));
4615 else
4616 emit_insn (gen_lcla1 (reg, label_rtx));
4618 emit_insn (gen_load_offset_label_address (gen_rtx_REG (SImode, 25),
4619 reg,
4620 begin_label_rtx,
4621 label_rtx));
4624 if (!NO_DEFERRED_PROFILE_COUNTERS)
4626 rtx count_label_rtx, addr, r24;
4627 char count_label_name[16];
4629 funcdef_nos.safe_push (label_no);
4630 ASM_GENERATE_INTERNAL_LABEL (count_label_name, "LP", label_no);
4631 count_label_rtx = gen_rtx_SYMBOL_REF (Pmode,
4632 ggc_strdup (count_label_name));
4634 addr = force_reg (Pmode, count_label_rtx);
4635 r24 = gen_rtx_REG (Pmode, 24);
4636 emit_move_insn (r24, addr);
4638 arg_bytes = GEN_INT (TARGET_64BIT ? 24 : 12);
4639 if (use_mcount_pcrel_call)
4640 call_insn = emit_call_insn (gen_call_mcount (mcount, arg_bytes,
4641 begin_label_rtx));
4642 else
4643 call_insn = emit_call_insn (gen_call (mcount, arg_bytes));
4645 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), r24);
4647 else
4649 arg_bytes = GEN_INT (TARGET_64BIT ? 16 : 8);
4650 if (use_mcount_pcrel_call)
4651 call_insn = emit_call_insn (gen_call_mcount (mcount, arg_bytes,
4652 begin_label_rtx));
4653 else
4654 call_insn = emit_call_insn (gen_call (mcount, arg_bytes));
4657 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), gen_rtx_REG (SImode, 25));
4658 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), gen_rtx_REG (SImode, 26));
4660 /* Indicate the _mcount call cannot throw, nor will it execute a
4661 non-local goto. */
4662 make_reg_eh_region_note_nothrow_nononlocal (call_insn);
4664 /* Allocate space for fixed arguments. */
4665 if (reg_parm_stack_space > crtl->outgoing_args_size)
4666 crtl->outgoing_args_size = reg_parm_stack_space;
4669 /* Fetch the return address for the frame COUNT steps up from
4670 the current frame, after the prologue. FRAMEADDR is the
4671 frame pointer of the COUNT frame.
4673 We want to ignore any export stub remnants here. To handle this,
4674 we examine the code at the return address, and if it is an export
4675 stub, we return a memory rtx for the stub return address stored
4676 at frame-24.
4678 The value returned is used in two different ways:
4680 1. To find a function's caller.
4682 2. To change the return address for a function.
4684 This function handles most instances of case 1; however, it will
4685 fail if there are two levels of stubs to execute on the return
4686 path. The only way I believe that can happen is if the return value
4687 needs a parameter relocation, which never happens for C code.
4689 This function handles most instances of case 2; however, it will
4690 fail if we did not originally have stub code on the return path
4691 but will need stub code on the new return path. This can happen if
4692 the caller & callee are both in the main program, but the new
4693 return location is in a shared library. */
4696 pa_return_addr_rtx (int count, rtx frameaddr)
4698 rtx label;
4699 rtx rp;
4700 rtx saved_rp;
4701 rtx ins;
4703 /* The instruction stream at the return address of a PA1.X export stub is:
4705 0x4bc23fd1 | stub+8: ldw -18(sr0,sp),rp
4706 0x004010a1 | stub+12: ldsid (sr0,rp),r1
4707 0x00011820 | stub+16: mtsp r1,sr0
4708 0xe0400002 | stub+20: be,n 0(sr0,rp)
4710 0xe0400002 must be specified as -532676606 so that it won't be
4711 rejected as an invalid immediate operand on 64-bit hosts.
4713 The instruction stream at the return address of a PA2.0 export stub is:
4715 0x4bc23fd1 | stub+8: ldw -18(sr0,sp),rp
4716 0xe840d002 | stub+12: bve,n (rp)
4719 HOST_WIDE_INT insns[4];
4720 int i, len;
4722 if (count != 0)
4723 return NULL_RTX;
4725 rp = get_hard_reg_initial_val (Pmode, 2);
4727 if (TARGET_64BIT || TARGET_NO_SPACE_REGS)
4728 return rp;
4730 /* If there is no export stub then just use the value saved from
4731 the return pointer register. */
4733 saved_rp = gen_reg_rtx (Pmode);
4734 emit_move_insn (saved_rp, rp);
4736 /* Get pointer to the instruction stream. We have to mask out the
4737 privilege level from the two low order bits of the return address
4738 pointer here so that ins will point to the start of the first
4739 instruction that would have been executed if we returned. */
4740 ins = copy_to_reg (gen_rtx_AND (Pmode, rp, MASK_RETURN_ADDR));
4741 label = gen_label_rtx ();
4743 if (TARGET_PA_20)
4745 insns[0] = 0x4bc23fd1;
4746 insns[1] = -398405630;
4747 len = 2;
4749 else
4751 insns[0] = 0x4bc23fd1;
4752 insns[1] = 0x004010a1;
4753 insns[2] = 0x00011820;
4754 insns[3] = -532676606;
4755 len = 4;
4758 /* Check the instruction stream at the normal return address for the
4759 export stub. If it is an export stub, than our return address is
4760 really in -24[frameaddr]. */
4762 for (i = 0; i < len; i++)
4764 rtx op0 = gen_rtx_MEM (SImode, plus_constant (Pmode, ins, i * 4));
4765 rtx op1 = GEN_INT (insns[i]);
4766 emit_cmp_and_jump_insns (op0, op1, NE, NULL, SImode, 0, label);
4769 /* Here we know that our return address points to an export
4770 stub. We don't want to return the address of the export stub,
4771 but rather the return address of the export stub. That return
4772 address is stored at -24[frameaddr]. */
4774 emit_move_insn (saved_rp,
4775 gen_rtx_MEM (Pmode,
4776 memory_address (Pmode,
4777 plus_constant (Pmode, frameaddr,
4778 -24))));
4780 emit_label (label);
4782 return saved_rp;
4785 void
4786 pa_emit_bcond_fp (rtx operands[])
4788 enum rtx_code code = GET_CODE (operands[0]);
4789 rtx operand0 = operands[1];
4790 rtx operand1 = operands[2];
4791 rtx label = operands[3];
4793 emit_insn (gen_rtx_SET (gen_rtx_REG (CCFPmode, 0),
4794 gen_rtx_fmt_ee (code, CCFPmode, operand0, operand1)));
4796 emit_jump_insn (gen_rtx_SET (pc_rtx,
4797 gen_rtx_IF_THEN_ELSE (VOIDmode,
4798 gen_rtx_fmt_ee (NE,
4799 VOIDmode,
4800 gen_rtx_REG (CCFPmode, 0),
4801 const0_rtx),
4802 gen_rtx_LABEL_REF (VOIDmode, label),
4803 pc_rtx)));
4807 /* Adjust the cost of a scheduling dependency. Return the new cost of
4808 a dependency LINK or INSN on DEP_INSN. COST is the current cost. */
4810 static int
4811 pa_adjust_cost (rtx_insn *insn, int dep_type, rtx_insn *dep_insn, int cost,
4812 unsigned int)
4814 enum attr_type attr_type;
4816 /* Don't adjust costs for a pa8000 chip, also do not adjust any
4817 true dependencies as they are described with bypasses now. */
4818 if (pa_cpu >= PROCESSOR_8000 || dep_type == 0)
4819 return cost;
4821 if (! recog_memoized (insn))
4822 return 0;
4824 attr_type = get_attr_type (insn);
4826 switch (dep_type)
4828 case REG_DEP_ANTI:
4829 /* Anti dependency; DEP_INSN reads a register that INSN writes some
4830 cycles later. */
4832 if (attr_type == TYPE_FPLOAD)
4834 rtx pat = PATTERN (insn);
4835 rtx dep_pat = PATTERN (dep_insn);
4836 if (GET_CODE (pat) == PARALLEL)
4838 /* This happens for the fldXs,mb patterns. */
4839 pat = XVECEXP (pat, 0, 0);
4841 if (GET_CODE (pat) != SET || GET_CODE (dep_pat) != SET)
4842 /* If this happens, we have to extend this to schedule
4843 optimally. Return 0 for now. */
4844 return 0;
4846 if (reg_mentioned_p (SET_DEST (pat), SET_SRC (dep_pat)))
4848 if (! recog_memoized (dep_insn))
4849 return 0;
4850 switch (get_attr_type (dep_insn))
4852 case TYPE_FPALU:
4853 case TYPE_FPMULSGL:
4854 case TYPE_FPMULDBL:
4855 case TYPE_FPDIVSGL:
4856 case TYPE_FPDIVDBL:
4857 case TYPE_FPSQRTSGL:
4858 case TYPE_FPSQRTDBL:
4859 /* A fpload can't be issued until one cycle before a
4860 preceding arithmetic operation has finished if
4861 the target of the fpload is any of the sources
4862 (or destination) of the arithmetic operation. */
4863 return insn_default_latency (dep_insn) - 1;
4865 default:
4866 return 0;
4870 else if (attr_type == TYPE_FPALU)
4872 rtx pat = PATTERN (insn);
4873 rtx dep_pat = PATTERN (dep_insn);
4874 if (GET_CODE (pat) == PARALLEL)
4876 /* This happens for the fldXs,mb patterns. */
4877 pat = XVECEXP (pat, 0, 0);
4879 if (GET_CODE (pat) != SET || GET_CODE (dep_pat) != SET)
4880 /* If this happens, we have to extend this to schedule
4881 optimally. Return 0 for now. */
4882 return 0;
4884 if (reg_mentioned_p (SET_DEST (pat), SET_SRC (dep_pat)))
4886 if (! recog_memoized (dep_insn))
4887 return 0;
4888 switch (get_attr_type (dep_insn))
4890 case TYPE_FPDIVSGL:
4891 case TYPE_FPDIVDBL:
4892 case TYPE_FPSQRTSGL:
4893 case TYPE_FPSQRTDBL:
4894 /* An ALU flop can't be issued until two cycles before a
4895 preceding divide or sqrt operation has finished if
4896 the target of the ALU flop is any of the sources
4897 (or destination) of the divide or sqrt operation. */
4898 return insn_default_latency (dep_insn) - 2;
4900 default:
4901 return 0;
4906 /* For other anti dependencies, the cost is 0. */
4907 return 0;
4909 case REG_DEP_OUTPUT:
4910 /* Output dependency; DEP_INSN writes a register that INSN writes some
4911 cycles later. */
4912 if (attr_type == TYPE_FPLOAD)
4914 rtx pat = PATTERN (insn);
4915 rtx dep_pat = PATTERN (dep_insn);
4916 if (GET_CODE (pat) == PARALLEL)
4918 /* This happens for the fldXs,mb patterns. */
4919 pat = XVECEXP (pat, 0, 0);
4921 if (GET_CODE (pat) != SET || GET_CODE (dep_pat) != SET)
4922 /* If this happens, we have to extend this to schedule
4923 optimally. Return 0 for now. */
4924 return 0;
4926 if (reg_mentioned_p (SET_DEST (pat), SET_DEST (dep_pat)))
4928 if (! recog_memoized (dep_insn))
4929 return 0;
4930 switch (get_attr_type (dep_insn))
4932 case TYPE_FPALU:
4933 case TYPE_FPMULSGL:
4934 case TYPE_FPMULDBL:
4935 case TYPE_FPDIVSGL:
4936 case TYPE_FPDIVDBL:
4937 case TYPE_FPSQRTSGL:
4938 case TYPE_FPSQRTDBL:
4939 /* A fpload can't be issued until one cycle before a
4940 preceding arithmetic operation has finished if
4941 the target of the fpload is the destination of the
4942 arithmetic operation.
4944 Exception: For PA7100LC, PA7200 and PA7300, the cost
4945 is 3 cycles, unless they bundle together. We also
4946 pay the penalty if the second insn is a fpload. */
4947 return insn_default_latency (dep_insn) - 1;
4949 default:
4950 return 0;
4954 else if (attr_type == TYPE_FPALU)
4956 rtx pat = PATTERN (insn);
4957 rtx dep_pat = PATTERN (dep_insn);
4958 if (GET_CODE (pat) == PARALLEL)
4960 /* This happens for the fldXs,mb patterns. */
4961 pat = XVECEXP (pat, 0, 0);
4963 if (GET_CODE (pat) != SET || GET_CODE (dep_pat) != SET)
4964 /* If this happens, we have to extend this to schedule
4965 optimally. Return 0 for now. */
4966 return 0;
4968 if (reg_mentioned_p (SET_DEST (pat), SET_DEST (dep_pat)))
4970 if (! recog_memoized (dep_insn))
4971 return 0;
4972 switch (get_attr_type (dep_insn))
4974 case TYPE_FPDIVSGL:
4975 case TYPE_FPDIVDBL:
4976 case TYPE_FPSQRTSGL:
4977 case TYPE_FPSQRTDBL:
4978 /* An ALU flop can't be issued until two cycles before a
4979 preceding divide or sqrt operation has finished if
4980 the target of the ALU flop is also the target of
4981 the divide or sqrt operation. */
4982 return insn_default_latency (dep_insn) - 2;
4984 default:
4985 return 0;
4990 /* For other output dependencies, the cost is 0. */
4991 return 0;
4993 default:
4994 gcc_unreachable ();
4998 /* Adjust scheduling priorities. We use this to try and keep addil
4999 and the next use of %r1 close together. */
5000 static int
5001 pa_adjust_priority (rtx_insn *insn, int priority)
5003 rtx set = single_set (insn);
5004 rtx src, dest;
5005 if (set)
5007 src = SET_SRC (set);
5008 dest = SET_DEST (set);
5009 if (GET_CODE (src) == LO_SUM
5010 && symbolic_operand (XEXP (src, 1), VOIDmode)
5011 && ! read_only_operand (XEXP (src, 1), VOIDmode))
5012 priority >>= 3;
5014 else if (GET_CODE (src) == MEM
5015 && GET_CODE (XEXP (src, 0)) == LO_SUM
5016 && symbolic_operand (XEXP (XEXP (src, 0), 1), VOIDmode)
5017 && ! read_only_operand (XEXP (XEXP (src, 0), 1), VOIDmode))
5018 priority >>= 1;
5020 else if (GET_CODE (dest) == MEM
5021 && GET_CODE (XEXP (dest, 0)) == LO_SUM
5022 && symbolic_operand (XEXP (XEXP (dest, 0), 1), VOIDmode)
5023 && ! read_only_operand (XEXP (XEXP (dest, 0), 1), VOIDmode))
5024 priority >>= 3;
5026 return priority;
5029 /* The 700 can only issue a single insn at a time.
5030 The 7XXX processors can issue two insns at a time.
5031 The 8000 can issue 4 insns at a time. */
5032 static int
5033 pa_issue_rate (void)
5035 switch (pa_cpu)
5037 case PROCESSOR_700: return 1;
5038 case PROCESSOR_7100: return 2;
5039 case PROCESSOR_7100LC: return 2;
5040 case PROCESSOR_7200: return 2;
5041 case PROCESSOR_7300: return 2;
5042 case PROCESSOR_8000: return 4;
5044 default:
5045 gcc_unreachable ();
5051 /* Return any length plus adjustment needed by INSN which already has
5052 its length computed as LENGTH. Return LENGTH if no adjustment is
5053 necessary.
5055 Also compute the length of an inline block move here as it is too
5056 complicated to express as a length attribute in pa.md. */
5058 pa_adjust_insn_length (rtx_insn *insn, int length)
5060 rtx pat = PATTERN (insn);
5062 /* If length is negative or undefined, provide initial length. */
5063 if ((unsigned int) length >= INT_MAX)
5065 if (GET_CODE (pat) == SEQUENCE)
5066 insn = as_a <rtx_insn *> (XVECEXP (pat, 0, 0));
5068 switch (get_attr_type (insn))
5070 case TYPE_MILLI:
5071 length = pa_attr_length_millicode_call (insn);
5072 break;
5073 case TYPE_CALL:
5074 length = pa_attr_length_call (insn, 0);
5075 break;
5076 case TYPE_SIBCALL:
5077 length = pa_attr_length_call (insn, 1);
5078 break;
5079 case TYPE_DYNCALL:
5080 length = pa_attr_length_indirect_call (insn);
5081 break;
5082 case TYPE_SH_FUNC_ADRS:
5083 length = pa_attr_length_millicode_call (insn) + 20;
5084 break;
5085 default:
5086 gcc_unreachable ();
5090 /* Block move pattern. */
5091 if (NONJUMP_INSN_P (insn)
5092 && GET_CODE (pat) == PARALLEL
5093 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
5094 && GET_CODE (XEXP (XVECEXP (pat, 0, 0), 0)) == MEM
5095 && GET_CODE (XEXP (XVECEXP (pat, 0, 0), 1)) == MEM
5096 && GET_MODE (XEXP (XVECEXP (pat, 0, 0), 0)) == BLKmode
5097 && GET_MODE (XEXP (XVECEXP (pat, 0, 0), 1)) == BLKmode)
5098 length += compute_movmem_length (insn) - 4;
5099 /* Block clear pattern. */
5100 else if (NONJUMP_INSN_P (insn)
5101 && GET_CODE (pat) == PARALLEL
5102 && GET_CODE (XVECEXP (pat, 0, 0)) == SET
5103 && GET_CODE (XEXP (XVECEXP (pat, 0, 0), 0)) == MEM
5104 && XEXP (XVECEXP (pat, 0, 0), 1) == const0_rtx
5105 && GET_MODE (XEXP (XVECEXP (pat, 0, 0), 0)) == BLKmode)
5106 length += compute_clrmem_length (insn) - 4;
5107 /* Conditional branch with an unfilled delay slot. */
5108 else if (JUMP_P (insn) && ! simplejump_p (insn))
5110 /* Adjust a short backwards conditional with an unfilled delay slot. */
5111 if (GET_CODE (pat) == SET
5112 && length == 4
5113 && JUMP_LABEL (insn) != NULL_RTX
5114 && ! forward_branch_p (insn))
5115 length += 4;
5116 else if (GET_CODE (pat) == PARALLEL
5117 && get_attr_type (insn) == TYPE_PARALLEL_BRANCH
5118 && length == 4)
5119 length += 4;
5120 /* Adjust dbra insn with short backwards conditional branch with
5121 unfilled delay slot -- only for case where counter is in a
5122 general register register. */
5123 else if (GET_CODE (pat) == PARALLEL
5124 && GET_CODE (XVECEXP (pat, 0, 1)) == SET
5125 && GET_CODE (XEXP (XVECEXP (pat, 0, 1), 0)) == REG
5126 && ! FP_REG_P (XEXP (XVECEXP (pat, 0, 1), 0))
5127 && length == 4
5128 && ! forward_branch_p (insn))
5129 length += 4;
5131 return length;
5134 /* Implement the TARGET_PRINT_OPERAND_PUNCT_VALID_P hook. */
5136 static bool
5137 pa_print_operand_punct_valid_p (unsigned char code)
5139 if (code == '@'
5140 || code == '#'
5141 || code == '*'
5142 || code == '^')
5143 return true;
5145 return false;
5148 /* Print operand X (an rtx) in assembler syntax to file FILE.
5149 CODE is a letter or dot (`z' in `%z0') or 0 if no letter was specified.
5150 For `%' followed by punctuation, CODE is the punctuation and X is null. */
5152 void
5153 pa_print_operand (FILE *file, rtx x, int code)
5155 switch (code)
5157 case '#':
5158 /* Output a 'nop' if there's nothing for the delay slot. */
5159 if (dbr_sequence_length () == 0)
5160 fputs ("\n\tnop", file);
5161 return;
5162 case '*':
5163 /* Output a nullification completer if there's nothing for the */
5164 /* delay slot or nullification is requested. */
5165 if (dbr_sequence_length () == 0 ||
5166 (final_sequence &&
5167 INSN_ANNULLED_BRANCH_P (XVECEXP (final_sequence, 0, 0))))
5168 fputs (",n", file);
5169 return;
5170 case 'R':
5171 /* Print out the second register name of a register pair.
5172 I.e., R (6) => 7. */
5173 fputs (reg_names[REGNO (x) + 1], file);
5174 return;
5175 case 'r':
5176 /* A register or zero. */
5177 if (x == const0_rtx
5178 || (x == CONST0_RTX (DFmode))
5179 || (x == CONST0_RTX (SFmode)))
5181 fputs ("%r0", file);
5182 return;
5184 else
5185 break;
5186 case 'f':
5187 /* A register or zero (floating point). */
5188 if (x == const0_rtx
5189 || (x == CONST0_RTX (DFmode))
5190 || (x == CONST0_RTX (SFmode)))
5192 fputs ("%fr0", file);
5193 return;
5195 else
5196 break;
5197 case 'A':
5199 rtx xoperands[2];
5201 xoperands[0] = XEXP (XEXP (x, 0), 0);
5202 xoperands[1] = XVECEXP (XEXP (XEXP (x, 0), 1), 0, 0);
5203 pa_output_global_address (file, xoperands[1], 0);
5204 fprintf (file, "(%s)", reg_names [REGNO (xoperands[0])]);
5205 return;
5208 case 'C': /* Plain (C)ondition */
5209 case 'X':
5210 switch (GET_CODE (x))
5212 case EQ:
5213 fputs ("=", file); break;
5214 case NE:
5215 fputs ("<>", file); break;
5216 case GT:
5217 fputs (">", file); break;
5218 case GE:
5219 fputs (">=", file); break;
5220 case GEU:
5221 fputs (">>=", file); break;
5222 case GTU:
5223 fputs (">>", file); break;
5224 case LT:
5225 fputs ("<", file); break;
5226 case LE:
5227 fputs ("<=", file); break;
5228 case LEU:
5229 fputs ("<<=", file); break;
5230 case LTU:
5231 fputs ("<<", file); break;
5232 default:
5233 gcc_unreachable ();
5235 return;
5236 case 'N': /* Condition, (N)egated */
5237 switch (GET_CODE (x))
5239 case EQ:
5240 fputs ("<>", file); break;
5241 case NE:
5242 fputs ("=", file); break;
5243 case GT:
5244 fputs ("<=", file); break;
5245 case GE:
5246 fputs ("<", file); break;
5247 case GEU:
5248 fputs ("<<", file); break;
5249 case GTU:
5250 fputs ("<<=", file); break;
5251 case LT:
5252 fputs (">=", file); break;
5253 case LE:
5254 fputs (">", file); break;
5255 case LEU:
5256 fputs (">>", file); break;
5257 case LTU:
5258 fputs (">>=", file); break;
5259 default:
5260 gcc_unreachable ();
5262 return;
5263 /* For floating point comparisons. Note that the output
5264 predicates are the complement of the desired mode. The
5265 conditions for GT, GE, LT, LE and LTGT cause an invalid
5266 operation exception if the result is unordered and this
5267 exception is enabled in the floating-point status register. */
5268 case 'Y':
5269 switch (GET_CODE (x))
5271 case EQ:
5272 fputs ("!=", file); break;
5273 case NE:
5274 fputs ("=", file); break;
5275 case GT:
5276 fputs ("!>", file); break;
5277 case GE:
5278 fputs ("!>=", file); break;
5279 case LT:
5280 fputs ("!<", file); break;
5281 case LE:
5282 fputs ("!<=", file); break;
5283 case LTGT:
5284 fputs ("!<>", file); break;
5285 case UNLE:
5286 fputs ("!?<=", file); break;
5287 case UNLT:
5288 fputs ("!?<", file); break;
5289 case UNGE:
5290 fputs ("!?>=", file); break;
5291 case UNGT:
5292 fputs ("!?>", file); break;
5293 case UNEQ:
5294 fputs ("!?=", file); break;
5295 case UNORDERED:
5296 fputs ("!?", file); break;
5297 case ORDERED:
5298 fputs ("?", file); break;
5299 default:
5300 gcc_unreachable ();
5302 return;
5303 case 'S': /* Condition, operands are (S)wapped. */
5304 switch (GET_CODE (x))
5306 case EQ:
5307 fputs ("=", file); break;
5308 case NE:
5309 fputs ("<>", file); break;
5310 case GT:
5311 fputs ("<", file); break;
5312 case GE:
5313 fputs ("<=", file); break;
5314 case GEU:
5315 fputs ("<<=", file); break;
5316 case GTU:
5317 fputs ("<<", file); break;
5318 case LT:
5319 fputs (">", file); break;
5320 case LE:
5321 fputs (">=", file); break;
5322 case LEU:
5323 fputs (">>=", file); break;
5324 case LTU:
5325 fputs (">>", file); break;
5326 default:
5327 gcc_unreachable ();
5329 return;
5330 case 'B': /* Condition, (B)oth swapped and negate. */
5331 switch (GET_CODE (x))
5333 case EQ:
5334 fputs ("<>", file); break;
5335 case NE:
5336 fputs ("=", file); break;
5337 case GT:
5338 fputs (">=", file); break;
5339 case GE:
5340 fputs (">", file); break;
5341 case GEU:
5342 fputs (">>", file); break;
5343 case GTU:
5344 fputs (">>=", file); break;
5345 case LT:
5346 fputs ("<=", file); break;
5347 case LE:
5348 fputs ("<", file); break;
5349 case LEU:
5350 fputs ("<<", file); break;
5351 case LTU:
5352 fputs ("<<=", file); break;
5353 default:
5354 gcc_unreachable ();
5356 return;
5357 case 'k':
5358 gcc_assert (GET_CODE (x) == CONST_INT);
5359 fprintf (file, HOST_WIDE_INT_PRINT_DEC, ~INTVAL (x));
5360 return;
5361 case 'Q':
5362 gcc_assert (GET_CODE (x) == CONST_INT);
5363 fprintf (file, HOST_WIDE_INT_PRINT_DEC, 64 - (INTVAL (x) & 63));
5364 return;
5365 case 'L':
5366 gcc_assert (GET_CODE (x) == CONST_INT);
5367 fprintf (file, HOST_WIDE_INT_PRINT_DEC, 32 - (INTVAL (x) & 31));
5368 return;
5369 case 'o':
5370 gcc_assert (GET_CODE (x) == CONST_INT
5371 && (INTVAL (x) == 1 || INTVAL (x) == 2 || INTVAL (x) == 3));
5372 fprintf (file, HOST_WIDE_INT_PRINT_DEC, INTVAL (x));
5373 return;
5374 case 'O':
5375 gcc_assert (GET_CODE (x) == CONST_INT && exact_log2 (INTVAL (x)) >= 0);
5376 fprintf (file, "%d", exact_log2 (INTVAL (x)));
5377 return;
5378 case 'p':
5379 gcc_assert (GET_CODE (x) == CONST_INT);
5380 fprintf (file, HOST_WIDE_INT_PRINT_DEC, 63 - (INTVAL (x) & 63));
5381 return;
5382 case 'P':
5383 gcc_assert (GET_CODE (x) == CONST_INT);
5384 fprintf (file, HOST_WIDE_INT_PRINT_DEC, 31 - (INTVAL (x) & 31));
5385 return;
5386 case 'I':
5387 if (GET_CODE (x) == CONST_INT)
5388 fputs ("i", file);
5389 return;
5390 case 'M':
5391 case 'F':
5392 switch (GET_CODE (XEXP (x, 0)))
5394 case PRE_DEC:
5395 case PRE_INC:
5396 if (ASSEMBLER_DIALECT == 0)
5397 fputs ("s,mb", file);
5398 else
5399 fputs (",mb", file);
5400 break;
5401 case POST_DEC:
5402 case POST_INC:
5403 if (ASSEMBLER_DIALECT == 0)
5404 fputs ("s,ma", file);
5405 else
5406 fputs (",ma", file);
5407 break;
5408 case PLUS:
5409 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
5410 && GET_CODE (XEXP (XEXP (x, 0), 1)) == REG)
5412 if (ASSEMBLER_DIALECT == 0)
5413 fputs ("x", file);
5415 else if (GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT
5416 || GET_CODE (XEXP (XEXP (x, 0), 1)) == MULT)
5418 if (ASSEMBLER_DIALECT == 0)
5419 fputs ("x,s", file);
5420 else
5421 fputs (",s", file);
5423 else if (code == 'F' && ASSEMBLER_DIALECT == 0)
5424 fputs ("s", file);
5425 break;
5426 default:
5427 if (code == 'F' && ASSEMBLER_DIALECT == 0)
5428 fputs ("s", file);
5429 break;
5431 return;
5432 case 'G':
5433 pa_output_global_address (file, x, 0);
5434 return;
5435 case 'H':
5436 pa_output_global_address (file, x, 1);
5437 return;
5438 case 0: /* Don't do anything special */
5439 break;
5440 case 'Z':
5442 unsigned op[3];
5443 compute_zdepwi_operands (INTVAL (x), op);
5444 fprintf (file, "%d,%d,%d", op[0], op[1], op[2]);
5445 return;
5447 case 'z':
5449 unsigned op[3];
5450 compute_zdepdi_operands (INTVAL (x), op);
5451 fprintf (file, "%d,%d,%d", op[0], op[1], op[2]);
5452 return;
5454 case 'c':
5455 /* We can get here from a .vtable_inherit due to our
5456 CONSTANT_ADDRESS_P rejecting perfectly good constant
5457 addresses. */
5458 break;
5459 default:
5460 gcc_unreachable ();
5462 if (GET_CODE (x) == REG)
5464 fputs (reg_names [REGNO (x)], file);
5465 if (TARGET_64BIT && FP_REG_P (x) && GET_MODE_SIZE (GET_MODE (x)) <= 4)
5467 fputs ("R", file);
5468 return;
5470 if (FP_REG_P (x)
5471 && GET_MODE_SIZE (GET_MODE (x)) <= 4
5472 && (REGNO (x) & 1) == 0)
5473 fputs ("L", file);
5475 else if (GET_CODE (x) == MEM)
5477 int size = GET_MODE_SIZE (GET_MODE (x));
5478 rtx base = NULL_RTX;
5479 switch (GET_CODE (XEXP (x, 0)))
5481 case PRE_DEC:
5482 case POST_DEC:
5483 base = XEXP (XEXP (x, 0), 0);
5484 fprintf (file, "-%d(%s)", size, reg_names [REGNO (base)]);
5485 break;
5486 case PRE_INC:
5487 case POST_INC:
5488 base = XEXP (XEXP (x, 0), 0);
5489 fprintf (file, "%d(%s)", size, reg_names [REGNO (base)]);
5490 break;
5491 case PLUS:
5492 if (GET_CODE (XEXP (XEXP (x, 0), 0)) == MULT)
5493 fprintf (file, "%s(%s)",
5494 reg_names [REGNO (XEXP (XEXP (XEXP (x, 0), 0), 0))],
5495 reg_names [REGNO (XEXP (XEXP (x, 0), 1))]);
5496 else if (GET_CODE (XEXP (XEXP (x, 0), 1)) == MULT)
5497 fprintf (file, "%s(%s)",
5498 reg_names [REGNO (XEXP (XEXP (XEXP (x, 0), 1), 0))],
5499 reg_names [REGNO (XEXP (XEXP (x, 0), 0))]);
5500 else if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
5501 && GET_CODE (XEXP (XEXP (x, 0), 1)) == REG)
5503 /* Because the REG_POINTER flag can get lost during reload,
5504 pa_legitimate_address_p canonicalizes the order of the
5505 index and base registers in the combined move patterns. */
5506 rtx base = XEXP (XEXP (x, 0), 1);
5507 rtx index = XEXP (XEXP (x, 0), 0);
5509 fprintf (file, "%s(%s)",
5510 reg_names [REGNO (index)], reg_names [REGNO (base)]);
5512 else
5513 output_address (GET_MODE (x), XEXP (x, 0));
5514 break;
5515 default:
5516 output_address (GET_MODE (x), XEXP (x, 0));
5517 break;
5520 else
5521 output_addr_const (file, x);
5524 /* output a SYMBOL_REF or a CONST expression involving a SYMBOL_REF. */
5526 void
5527 pa_output_global_address (FILE *file, rtx x, int round_constant)
5530 /* Imagine (high (const (plus ...))). */
5531 if (GET_CODE (x) == HIGH)
5532 x = XEXP (x, 0);
5534 if (GET_CODE (x) == SYMBOL_REF && read_only_operand (x, VOIDmode))
5535 output_addr_const (file, x);
5536 else if (GET_CODE (x) == SYMBOL_REF && !flag_pic)
5538 output_addr_const (file, x);
5539 fputs ("-$global$", file);
5541 else if (GET_CODE (x) == CONST)
5543 const char *sep = "";
5544 int offset = 0; /* assembler wants -$global$ at end */
5545 rtx base = NULL_RTX;
5547 switch (GET_CODE (XEXP (XEXP (x, 0), 0)))
5549 case LABEL_REF:
5550 case SYMBOL_REF:
5551 base = XEXP (XEXP (x, 0), 0);
5552 output_addr_const (file, base);
5553 break;
5554 case CONST_INT:
5555 offset = INTVAL (XEXP (XEXP (x, 0), 0));
5556 break;
5557 default:
5558 gcc_unreachable ();
5561 switch (GET_CODE (XEXP (XEXP (x, 0), 1)))
5563 case LABEL_REF:
5564 case SYMBOL_REF:
5565 base = XEXP (XEXP (x, 0), 1);
5566 output_addr_const (file, base);
5567 break;
5568 case CONST_INT:
5569 offset = INTVAL (XEXP (XEXP (x, 0), 1));
5570 break;
5571 default:
5572 gcc_unreachable ();
5575 /* How bogus. The compiler is apparently responsible for
5576 rounding the constant if it uses an LR field selector.
5578 The linker and/or assembler seem a better place since
5579 they have to do this kind of thing already.
5581 If we fail to do this, HP's optimizing linker may eliminate
5582 an addil, but not update the ldw/stw/ldo instruction that
5583 uses the result of the addil. */
5584 if (round_constant)
5585 offset = ((offset + 0x1000) & ~0x1fff);
5587 switch (GET_CODE (XEXP (x, 0)))
5589 case PLUS:
5590 if (offset < 0)
5592 offset = -offset;
5593 sep = "-";
5595 else
5596 sep = "+";
5597 break;
5599 case MINUS:
5600 gcc_assert (GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF);
5601 sep = "-";
5602 break;
5604 default:
5605 gcc_unreachable ();
5608 if (!read_only_operand (base, VOIDmode) && !flag_pic)
5609 fputs ("-$global$", file);
5610 if (offset)
5611 fprintf (file, "%s%d", sep, offset);
5613 else
5614 output_addr_const (file, x);
5617 /* Output boilerplate text to appear at the beginning of the file.
5618 There are several possible versions. */
5619 #define aputs(x) fputs(x, asm_out_file)
5620 static inline void
5621 pa_file_start_level (void)
5623 if (TARGET_64BIT)
5624 aputs ("\t.LEVEL 2.0w\n");
5625 else if (TARGET_PA_20)
5626 aputs ("\t.LEVEL 2.0\n");
5627 else if (TARGET_PA_11)
5628 aputs ("\t.LEVEL 1.1\n");
5629 else
5630 aputs ("\t.LEVEL 1.0\n");
5633 static inline void
5634 pa_file_start_space (int sortspace)
5636 aputs ("\t.SPACE $PRIVATE$");
5637 if (sortspace)
5638 aputs (",SORT=16");
5639 aputs ("\n\t.SUBSPA $DATA$,QUAD=1,ALIGN=8,ACCESS=31");
5640 if (flag_tm)
5641 aputs ("\n\t.SUBSPA $TM_CLONE_TABLE$,QUAD=1,ALIGN=8,ACCESS=31");
5642 aputs ("\n\t.SUBSPA $BSS$,QUAD=1,ALIGN=8,ACCESS=31,ZERO,SORT=82"
5643 "\n\t.SPACE $TEXT$");
5644 if (sortspace)
5645 aputs (",SORT=8");
5646 aputs ("\n\t.SUBSPA $LIT$,QUAD=0,ALIGN=8,ACCESS=44"
5647 "\n\t.SUBSPA $CODE$,QUAD=0,ALIGN=8,ACCESS=44,CODE_ONLY\n");
5650 static inline void
5651 pa_file_start_file (int want_version)
5653 if (write_symbols != NO_DEBUG)
5655 output_file_directive (asm_out_file, main_input_filename);
5656 if (want_version)
5657 aputs ("\t.version\t\"01.01\"\n");
5661 static inline void
5662 pa_file_start_mcount (const char *aswhat)
5664 if (profile_flag)
5665 fprintf (asm_out_file, "\t.IMPORT _mcount,%s\n", aswhat);
5668 static void
5669 pa_elf_file_start (void)
5671 pa_file_start_level ();
5672 pa_file_start_mcount ("ENTRY");
5673 pa_file_start_file (0);
5676 static void
5677 pa_som_file_start (void)
5679 pa_file_start_level ();
5680 pa_file_start_space (0);
5681 aputs ("\t.IMPORT $global$,DATA\n"
5682 "\t.IMPORT $$dyncall,MILLICODE\n");
5683 pa_file_start_mcount ("CODE");
5684 pa_file_start_file (0);
5687 static void
5688 pa_linux_file_start (void)
5690 pa_file_start_file (1);
5691 pa_file_start_level ();
5692 pa_file_start_mcount ("CODE");
5695 static void
5696 pa_hpux64_gas_file_start (void)
5698 pa_file_start_level ();
5699 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
5700 if (profile_flag)
5701 ASM_OUTPUT_TYPE_DIRECTIVE (asm_out_file, "_mcount", "function");
5702 #endif
5703 pa_file_start_file (1);
5706 static void
5707 pa_hpux64_hpas_file_start (void)
5709 pa_file_start_level ();
5710 pa_file_start_space (1);
5711 pa_file_start_mcount ("CODE");
5712 pa_file_start_file (0);
5714 #undef aputs
5716 /* Search the deferred plabel list for SYMBOL and return its internal
5717 label. If an entry for SYMBOL is not found, a new entry is created. */
5720 pa_get_deferred_plabel (rtx symbol)
5722 const char *fname = XSTR (symbol, 0);
5723 size_t i;
5725 /* See if we have already put this function on the list of deferred
5726 plabels. This list is generally small, so a liner search is not
5727 too ugly. If it proves too slow replace it with something faster. */
5728 for (i = 0; i < n_deferred_plabels; i++)
5729 if (strcmp (fname, XSTR (deferred_plabels[i].symbol, 0)) == 0)
5730 break;
5732 /* If the deferred plabel list is empty, or this entry was not found
5733 on the list, create a new entry on the list. */
5734 if (deferred_plabels == NULL || i == n_deferred_plabels)
5736 tree id;
5738 if (deferred_plabels == 0)
5739 deferred_plabels = ggc_alloc<deferred_plabel> ();
5740 else
5741 deferred_plabels = GGC_RESIZEVEC (struct deferred_plabel,
5742 deferred_plabels,
5743 n_deferred_plabels + 1);
5745 i = n_deferred_plabels++;
5746 deferred_plabels[i].internal_label = gen_label_rtx ();
5747 deferred_plabels[i].symbol = symbol;
5749 /* Gross. We have just implicitly taken the address of this
5750 function. Mark it in the same manner as assemble_name. */
5751 id = maybe_get_identifier (targetm.strip_name_encoding (fname));
5752 if (id)
5753 mark_referenced (id);
5756 return deferred_plabels[i].internal_label;
5759 static void
5760 output_deferred_plabels (void)
5762 size_t i;
5764 /* If we have some deferred plabels, then we need to switch into the
5765 data or readonly data section, and align it to a 4 byte boundary
5766 before outputting the deferred plabels. */
5767 if (n_deferred_plabels)
5769 switch_to_section (flag_pic ? data_section : readonly_data_section);
5770 ASM_OUTPUT_ALIGN (asm_out_file, TARGET_64BIT ? 3 : 2);
5773 /* Now output the deferred plabels. */
5774 for (i = 0; i < n_deferred_plabels; i++)
5776 targetm.asm_out.internal_label (asm_out_file, "L",
5777 CODE_LABEL_NUMBER (deferred_plabels[i].internal_label));
5778 assemble_integer (deferred_plabels[i].symbol,
5779 TARGET_64BIT ? 8 : 4, TARGET_64BIT ? 64 : 32, 1);
5783 /* Initialize optabs to point to emulation routines. */
5785 static void
5786 pa_init_libfuncs (void)
5788 if (HPUX_LONG_DOUBLE_LIBRARY)
5790 set_optab_libfunc (add_optab, TFmode, "_U_Qfadd");
5791 set_optab_libfunc (sub_optab, TFmode, "_U_Qfsub");
5792 set_optab_libfunc (smul_optab, TFmode, "_U_Qfmpy");
5793 set_optab_libfunc (sdiv_optab, TFmode, "_U_Qfdiv");
5794 set_optab_libfunc (smin_optab, TFmode, "_U_Qmin");
5795 set_optab_libfunc (smax_optab, TFmode, "_U_Qfmax");
5796 set_optab_libfunc (sqrt_optab, TFmode, "_U_Qfsqrt");
5797 set_optab_libfunc (abs_optab, TFmode, "_U_Qfabs");
5798 set_optab_libfunc (neg_optab, TFmode, "_U_Qfneg");
5800 set_optab_libfunc (eq_optab, TFmode, "_U_Qfeq");
5801 set_optab_libfunc (ne_optab, TFmode, "_U_Qfne");
5802 set_optab_libfunc (gt_optab, TFmode, "_U_Qfgt");
5803 set_optab_libfunc (ge_optab, TFmode, "_U_Qfge");
5804 set_optab_libfunc (lt_optab, TFmode, "_U_Qflt");
5805 set_optab_libfunc (le_optab, TFmode, "_U_Qfle");
5806 set_optab_libfunc (unord_optab, TFmode, "_U_Qfunord");
5808 set_conv_libfunc (sext_optab, TFmode, SFmode, "_U_Qfcnvff_sgl_to_quad");
5809 set_conv_libfunc (sext_optab, TFmode, DFmode, "_U_Qfcnvff_dbl_to_quad");
5810 set_conv_libfunc (trunc_optab, SFmode, TFmode, "_U_Qfcnvff_quad_to_sgl");
5811 set_conv_libfunc (trunc_optab, DFmode, TFmode, "_U_Qfcnvff_quad_to_dbl");
5813 set_conv_libfunc (sfix_optab, SImode, TFmode,
5814 TARGET_64BIT ? "__U_Qfcnvfxt_quad_to_sgl"
5815 : "_U_Qfcnvfxt_quad_to_sgl");
5816 set_conv_libfunc (sfix_optab, DImode, TFmode,
5817 "_U_Qfcnvfxt_quad_to_dbl");
5818 set_conv_libfunc (ufix_optab, SImode, TFmode,
5819 "_U_Qfcnvfxt_quad_to_usgl");
5820 set_conv_libfunc (ufix_optab, DImode, TFmode,
5821 "_U_Qfcnvfxt_quad_to_udbl");
5823 set_conv_libfunc (sfloat_optab, TFmode, SImode,
5824 "_U_Qfcnvxf_sgl_to_quad");
5825 set_conv_libfunc (sfloat_optab, TFmode, DImode,
5826 "_U_Qfcnvxf_dbl_to_quad");
5827 set_conv_libfunc (ufloat_optab, TFmode, SImode,
5828 "_U_Qfcnvxf_usgl_to_quad");
5829 set_conv_libfunc (ufloat_optab, TFmode, DImode,
5830 "_U_Qfcnvxf_udbl_to_quad");
5833 if (TARGET_SYNC_LIBCALL)
5834 init_sync_libfuncs (8);
5837 /* HP's millicode routines mean something special to the assembler.
5838 Keep track of which ones we have used. */
5840 enum millicodes { remI, remU, divI, divU, mulI, end1000 };
5841 static void import_milli (enum millicodes);
5842 static char imported[(int) end1000];
5843 static const char * const milli_names[] = {"remI", "remU", "divI", "divU", "mulI"};
5844 static const char import_string[] = ".IMPORT $$....,MILLICODE";
5845 #define MILLI_START 10
5847 static void
5848 import_milli (enum millicodes code)
5850 char str[sizeof (import_string)];
5852 if (!imported[(int) code])
5854 imported[(int) code] = 1;
5855 strcpy (str, import_string);
5856 strncpy (str + MILLI_START, milli_names[(int) code], 4);
5857 output_asm_insn (str, 0);
5861 /* The register constraints have put the operands and return value in
5862 the proper registers. */
5864 const char *
5865 pa_output_mul_insn (int unsignedp ATTRIBUTE_UNUSED, rtx_insn *insn)
5867 import_milli (mulI);
5868 return pa_output_millicode_call (insn, gen_rtx_SYMBOL_REF (Pmode, "$$mulI"));
5871 /* Emit the rtl for doing a division by a constant. */
5873 /* Do magic division millicodes exist for this value? */
5874 const int pa_magic_milli[]= {0, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1};
5876 /* We'll use an array to keep track of the magic millicodes and
5877 whether or not we've used them already. [n][0] is signed, [n][1] is
5878 unsigned. */
5880 static int div_milli[16][2];
5883 pa_emit_hpdiv_const (rtx *operands, int unsignedp)
5885 if (GET_CODE (operands[2]) == CONST_INT
5886 && INTVAL (operands[2]) > 0
5887 && INTVAL (operands[2]) < 16
5888 && pa_magic_milli[INTVAL (operands[2])])
5890 rtx ret = gen_rtx_REG (SImode, TARGET_64BIT ? 2 : 31);
5892 emit_move_insn (gen_rtx_REG (SImode, 26), operands[1]);
5893 emit
5894 (gen_rtx_PARALLEL
5895 (VOIDmode,
5896 gen_rtvec (6, gen_rtx_SET (gen_rtx_REG (SImode, 29),
5897 gen_rtx_fmt_ee (unsignedp ? UDIV : DIV,
5898 SImode,
5899 gen_rtx_REG (SImode, 26),
5900 operands[2])),
5901 gen_rtx_CLOBBER (VOIDmode, operands[4]),
5902 gen_rtx_CLOBBER (VOIDmode, operands[3]),
5903 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 26)),
5904 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (SImode, 25)),
5905 gen_rtx_CLOBBER (VOIDmode, ret))));
5906 emit_move_insn (operands[0], gen_rtx_REG (SImode, 29));
5907 return 1;
5909 return 0;
5912 const char *
5913 pa_output_div_insn (rtx *operands, int unsignedp, rtx_insn *insn)
5915 int divisor;
5917 /* If the divisor is a constant, try to use one of the special
5918 opcodes .*/
5919 if (GET_CODE (operands[0]) == CONST_INT)
5921 static char buf[100];
5922 divisor = INTVAL (operands[0]);
5923 if (!div_milli[divisor][unsignedp])
5925 div_milli[divisor][unsignedp] = 1;
5926 if (unsignedp)
5927 output_asm_insn (".IMPORT $$divU_%0,MILLICODE", operands);
5928 else
5929 output_asm_insn (".IMPORT $$divI_%0,MILLICODE", operands);
5931 if (unsignedp)
5933 sprintf (buf, "$$divU_" HOST_WIDE_INT_PRINT_DEC,
5934 INTVAL (operands[0]));
5935 return pa_output_millicode_call (insn,
5936 gen_rtx_SYMBOL_REF (SImode, buf));
5938 else
5940 sprintf (buf, "$$divI_" HOST_WIDE_INT_PRINT_DEC,
5941 INTVAL (operands[0]));
5942 return pa_output_millicode_call (insn,
5943 gen_rtx_SYMBOL_REF (SImode, buf));
5946 /* Divisor isn't a special constant. */
5947 else
5949 if (unsignedp)
5951 import_milli (divU);
5952 return pa_output_millicode_call (insn,
5953 gen_rtx_SYMBOL_REF (SImode, "$$divU"));
5955 else
5957 import_milli (divI);
5958 return pa_output_millicode_call (insn,
5959 gen_rtx_SYMBOL_REF (SImode, "$$divI"));
5964 /* Output a $$rem millicode to do mod. */
5966 const char *
5967 pa_output_mod_insn (int unsignedp, rtx_insn *insn)
5969 if (unsignedp)
5971 import_milli (remU);
5972 return pa_output_millicode_call (insn,
5973 gen_rtx_SYMBOL_REF (SImode, "$$remU"));
5975 else
5977 import_milli (remI);
5978 return pa_output_millicode_call (insn,
5979 gen_rtx_SYMBOL_REF (SImode, "$$remI"));
5983 void
5984 pa_output_arg_descriptor (rtx_insn *call_insn)
5986 const char *arg_regs[4];
5987 machine_mode arg_mode;
5988 rtx link;
5989 int i, output_flag = 0;
5990 int regno;
5992 /* We neither need nor want argument location descriptors for the
5993 64bit runtime environment or the ELF32 environment. */
5994 if (TARGET_64BIT || TARGET_ELF32)
5995 return;
5997 for (i = 0; i < 4; i++)
5998 arg_regs[i] = 0;
6000 /* Specify explicitly that no argument relocations should take place
6001 if using the portable runtime calling conventions. */
6002 if (TARGET_PORTABLE_RUNTIME)
6004 fputs ("\t.CALL ARGW0=NO,ARGW1=NO,ARGW2=NO,ARGW3=NO,RETVAL=NO\n",
6005 asm_out_file);
6006 return;
6009 gcc_assert (CALL_P (call_insn));
6010 for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
6011 link; link = XEXP (link, 1))
6013 rtx use = XEXP (link, 0);
6015 if (! (GET_CODE (use) == USE
6016 && GET_CODE (XEXP (use, 0)) == REG
6017 && FUNCTION_ARG_REGNO_P (REGNO (XEXP (use, 0)))))
6018 continue;
6020 arg_mode = GET_MODE (XEXP (use, 0));
6021 regno = REGNO (XEXP (use, 0));
6022 if (regno >= 23 && regno <= 26)
6024 arg_regs[26 - regno] = "GR";
6025 if (arg_mode == DImode)
6026 arg_regs[25 - regno] = "GR";
6028 else if (regno >= 32 && regno <= 39)
6030 if (arg_mode == SFmode)
6031 arg_regs[(regno - 32) / 2] = "FR";
6032 else
6034 #ifndef HP_FP_ARG_DESCRIPTOR_REVERSED
6035 arg_regs[(regno - 34) / 2] = "FR";
6036 arg_regs[(regno - 34) / 2 + 1] = "FU";
6037 #else
6038 arg_regs[(regno - 34) / 2] = "FU";
6039 arg_regs[(regno - 34) / 2 + 1] = "FR";
6040 #endif
6044 fputs ("\t.CALL ", asm_out_file);
6045 for (i = 0; i < 4; i++)
6047 if (arg_regs[i])
6049 if (output_flag++)
6050 fputc (',', asm_out_file);
6051 fprintf (asm_out_file, "ARGW%d=%s", i, arg_regs[i]);
6054 fputc ('\n', asm_out_file);
6057 /* Inform reload about cases where moving X with a mode MODE to or from
6058 a register in RCLASS requires an extra scratch or immediate register.
6059 Return the class needed for the immediate register. */
6061 static reg_class_t
6062 pa_secondary_reload (bool in_p, rtx x, reg_class_t rclass_i,
6063 machine_mode mode, secondary_reload_info *sri)
6065 int regno;
6066 enum reg_class rclass = (enum reg_class) rclass_i;
6068 /* Handle the easy stuff first. */
6069 if (rclass == R1_REGS)
6070 return NO_REGS;
6072 if (REG_P (x))
6074 regno = REGNO (x);
6075 if (rclass == BASE_REG_CLASS && regno < FIRST_PSEUDO_REGISTER)
6076 return NO_REGS;
6078 else
6079 regno = -1;
6081 /* If we have something like (mem (mem (...)), we can safely assume the
6082 inner MEM will end up in a general register after reloading, so there's
6083 no need for a secondary reload. */
6084 if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == MEM)
6085 return NO_REGS;
6087 /* Trying to load a constant into a FP register during PIC code
6088 generation requires %r1 as a scratch register. For float modes,
6089 the only legitimate constant is CONST0_RTX. However, there are
6090 a few patterns that accept constant double operands. */
6091 if (flag_pic
6092 && FP_REG_CLASS_P (rclass)
6093 && (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE))
6095 switch (mode)
6097 case E_SImode:
6098 sri->icode = CODE_FOR_reload_insi_r1;
6099 break;
6101 case E_DImode:
6102 sri->icode = CODE_FOR_reload_indi_r1;
6103 break;
6105 case E_SFmode:
6106 sri->icode = CODE_FOR_reload_insf_r1;
6107 break;
6109 case E_DFmode:
6110 sri->icode = CODE_FOR_reload_indf_r1;
6111 break;
6113 default:
6114 gcc_unreachable ();
6116 return NO_REGS;
6119 /* Secondary reloads of symbolic expressions require %r1 as a scratch
6120 register when we're generating PIC code or when the operand isn't
6121 readonly. */
6122 if (pa_symbolic_expression_p (x))
6124 if (GET_CODE (x) == HIGH)
6125 x = XEXP (x, 0);
6127 if (flag_pic || !read_only_operand (x, VOIDmode))
6129 switch (mode)
6131 case E_SImode:
6132 sri->icode = CODE_FOR_reload_insi_r1;
6133 break;
6135 case E_DImode:
6136 sri->icode = CODE_FOR_reload_indi_r1;
6137 break;
6139 default:
6140 gcc_unreachable ();
6142 return NO_REGS;
6146 /* Profiling showed the PA port spends about 1.3% of its compilation
6147 time in true_regnum from calls inside pa_secondary_reload_class. */
6148 if (regno >= FIRST_PSEUDO_REGISTER || GET_CODE (x) == SUBREG)
6149 regno = true_regnum (x);
6151 /* Handle reloads for floating point loads and stores. */
6152 if ((regno >= FIRST_PSEUDO_REGISTER || regno == -1)
6153 && FP_REG_CLASS_P (rclass))
6155 if (MEM_P (x))
6157 x = XEXP (x, 0);
6159 /* We don't need a secondary reload for indexed memory addresses.
6161 When INT14_OK_STRICT is true, it might appear that we could
6162 directly allow register indirect memory addresses. However,
6163 this doesn't work because we don't support SUBREGs in
6164 floating-point register copies and reload doesn't tell us
6165 when it's going to use a SUBREG. */
6166 if (IS_INDEX_ADDR_P (x))
6167 return NO_REGS;
6170 /* Request a secondary reload with a general scratch register
6171 for everything else. ??? Could symbolic operands be handled
6172 directly when generating non-pic PA 2.0 code? */
6173 sri->icode = (in_p
6174 ? direct_optab_handler (reload_in_optab, mode)
6175 : direct_optab_handler (reload_out_optab, mode));
6176 return NO_REGS;
6179 /* A SAR<->FP register copy requires an intermediate general register
6180 and secondary memory. We need a secondary reload with a general
6181 scratch register for spills. */
6182 if (rclass == SHIFT_REGS)
6184 /* Handle spill. */
6185 if (regno >= FIRST_PSEUDO_REGISTER || regno < 0)
6187 sri->icode = (in_p
6188 ? direct_optab_handler (reload_in_optab, mode)
6189 : direct_optab_handler (reload_out_optab, mode));
6190 return NO_REGS;
6193 /* Handle FP copy. */
6194 if (FP_REG_CLASS_P (REGNO_REG_CLASS (regno)))
6195 return GENERAL_REGS;
6198 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER
6199 && REGNO_REG_CLASS (regno) == SHIFT_REGS
6200 && FP_REG_CLASS_P (rclass))
6201 return GENERAL_REGS;
6203 return NO_REGS;
6206 /* Implement TARGET_SECONDARY_MEMORY_NEEDED. */
6208 static bool
6209 pa_secondary_memory_needed (machine_mode mode ATTRIBUTE_UNUSED,
6210 reg_class_t class1 ATTRIBUTE_UNUSED,
6211 reg_class_t class2 ATTRIBUTE_UNUSED)
6213 #ifdef PA_SECONDARY_MEMORY_NEEDED
6214 return PA_SECONDARY_MEMORY_NEEDED (mode, class1, class2);
6215 #else
6216 return false;
6217 #endif
6220 /* Implement TARGET_EXTRA_LIVE_ON_ENTRY. The argument pointer
6221 is only marked as live on entry by df-scan when it is a fixed
6222 register. It isn't a fixed register in the 64-bit runtime,
6223 so we need to mark it here. */
6225 static void
6226 pa_extra_live_on_entry (bitmap regs)
6228 if (TARGET_64BIT)
6229 bitmap_set_bit (regs, ARG_POINTER_REGNUM);
6232 /* Implement EH_RETURN_HANDLER_RTX. The MEM needs to be volatile
6233 to prevent it from being deleted. */
6236 pa_eh_return_handler_rtx (void)
6238 rtx tmp;
6240 tmp = gen_rtx_PLUS (word_mode, hard_frame_pointer_rtx,
6241 TARGET_64BIT ? GEN_INT (-16) : GEN_INT (-20));
6242 tmp = gen_rtx_MEM (word_mode, tmp);
6243 tmp->volatil = 1;
6244 return tmp;
6247 /* In the 32-bit runtime, arguments larger than eight bytes are passed
6248 by invisible reference. As a GCC extension, we also pass anything
6249 with a zero or variable size by reference.
6251 The 64-bit runtime does not describe passing any types by invisible
6252 reference. The internals of GCC can't currently handle passing
6253 empty structures, and zero or variable length arrays when they are
6254 not passed entirely on the stack or by reference. Thus, as a GCC
6255 extension, we pass these types by reference. The HP compiler doesn't
6256 support these types, so hopefully there shouldn't be any compatibility
6257 issues. This may have to be revisited when HP releases a C99 compiler
6258 or updates the ABI. */
6260 static bool
6261 pa_pass_by_reference (cumulative_args_t ca ATTRIBUTE_UNUSED,
6262 machine_mode mode, const_tree type,
6263 bool named ATTRIBUTE_UNUSED)
6265 HOST_WIDE_INT size;
6267 if (type)
6268 size = int_size_in_bytes (type);
6269 else
6270 size = GET_MODE_SIZE (mode);
6272 if (TARGET_64BIT)
6273 return size <= 0;
6274 else
6275 return size <= 0 || size > 8;
6278 /* Implement TARGET_FUNCTION_ARG_PADDING. */
6280 static pad_direction
6281 pa_function_arg_padding (machine_mode mode, const_tree type)
6283 if (mode == BLKmode
6284 || (TARGET_64BIT
6285 && type
6286 && (AGGREGATE_TYPE_P (type)
6287 || TREE_CODE (type) == COMPLEX_TYPE
6288 || TREE_CODE (type) == VECTOR_TYPE)))
6290 /* Return PAD_NONE if justification is not required. */
6291 if (type
6292 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6293 && (int_size_in_bytes (type) * BITS_PER_UNIT) % PARM_BOUNDARY == 0)
6294 return PAD_NONE;
6296 /* The directions set here are ignored when a BLKmode argument larger
6297 than a word is placed in a register. Different code is used for
6298 the stack and registers. This makes it difficult to have a
6299 consistent data representation for both the stack and registers.
6300 For both runtimes, the justification and padding for arguments on
6301 the stack and in registers should be identical. */
6302 if (TARGET_64BIT)
6303 /* The 64-bit runtime specifies left justification for aggregates. */
6304 return PAD_UPWARD;
6305 else
6306 /* The 32-bit runtime architecture specifies right justification.
6307 When the argument is passed on the stack, the argument is padded
6308 with garbage on the left. The HP compiler pads with zeros. */
6309 return PAD_DOWNWARD;
6312 if (GET_MODE_BITSIZE (mode) < PARM_BOUNDARY)
6313 return PAD_DOWNWARD;
6314 else
6315 return PAD_NONE;
6319 /* Do what is necessary for `va_start'. We look at the current function
6320 to determine if stdargs or varargs is used and fill in an initial
6321 va_list. A pointer to this constructor is returned. */
6323 static rtx
6324 hppa_builtin_saveregs (void)
6326 rtx offset, dest;
6327 tree fntype = TREE_TYPE (current_function_decl);
6328 int argadj = ((!stdarg_p (fntype))
6329 ? UNITS_PER_WORD : 0);
6331 if (argadj)
6332 offset = plus_constant (Pmode, crtl->args.arg_offset_rtx, argadj);
6333 else
6334 offset = crtl->args.arg_offset_rtx;
6336 if (TARGET_64BIT)
6338 int i, off;
6340 /* Adjust for varargs/stdarg differences. */
6341 if (argadj)
6342 offset = plus_constant (Pmode, crtl->args.arg_offset_rtx, -argadj);
6343 else
6344 offset = crtl->args.arg_offset_rtx;
6346 /* We need to save %r26 .. %r19 inclusive starting at offset -64
6347 from the incoming arg pointer and growing to larger addresses. */
6348 for (i = 26, off = -64; i >= 19; i--, off += 8)
6349 emit_move_insn (gen_rtx_MEM (word_mode,
6350 plus_constant (Pmode,
6351 arg_pointer_rtx, off)),
6352 gen_rtx_REG (word_mode, i));
6354 /* The incoming args pointer points just beyond the flushback area;
6355 normally this is not a serious concern. However, when we are doing
6356 varargs/stdargs we want to make the arg pointer point to the start
6357 of the incoming argument area. */
6358 emit_move_insn (virtual_incoming_args_rtx,
6359 plus_constant (Pmode, arg_pointer_rtx, -64));
6361 /* Now return a pointer to the first anonymous argument. */
6362 return copy_to_reg (expand_binop (Pmode, add_optab,
6363 virtual_incoming_args_rtx,
6364 offset, 0, 0, OPTAB_LIB_WIDEN));
6367 /* Store general registers on the stack. */
6368 dest = gen_rtx_MEM (BLKmode,
6369 plus_constant (Pmode, crtl->args.internal_arg_pointer,
6370 -16));
6371 set_mem_alias_set (dest, get_varargs_alias_set ());
6372 set_mem_align (dest, BITS_PER_WORD);
6373 move_block_from_reg (23, dest, 4);
6375 /* move_block_from_reg will emit code to store the argument registers
6376 individually as scalar stores.
6378 However, other insns may later load from the same addresses for
6379 a structure load (passing a struct to a varargs routine).
6381 The alias code assumes that such aliasing can never happen, so we
6382 have to keep memory referencing insns from moving up beyond the
6383 last argument register store. So we emit a blockage insn here. */
6384 emit_insn (gen_blockage ());
6386 return copy_to_reg (expand_binop (Pmode, add_optab,
6387 crtl->args.internal_arg_pointer,
6388 offset, 0, 0, OPTAB_LIB_WIDEN));
6391 static void
6392 hppa_va_start (tree valist, rtx nextarg)
6394 nextarg = expand_builtin_saveregs ();
6395 std_expand_builtin_va_start (valist, nextarg);
6398 static tree
6399 hppa_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
6400 gimple_seq *post_p)
6402 if (TARGET_64BIT)
6404 /* Args grow upward. We can use the generic routines. */
6405 return std_gimplify_va_arg_expr (valist, type, pre_p, post_p);
6407 else /* !TARGET_64BIT */
6409 tree ptr = build_pointer_type (type);
6410 tree valist_type;
6411 tree t, u;
6412 unsigned int size, ofs;
6413 bool indirect;
6415 indirect = pass_by_reference (NULL, TYPE_MODE (type), type, 0);
6416 if (indirect)
6418 type = ptr;
6419 ptr = build_pointer_type (type);
6421 size = int_size_in_bytes (type);
6422 valist_type = TREE_TYPE (valist);
6424 /* Args grow down. Not handled by generic routines. */
6426 u = fold_convert (sizetype, size_in_bytes (type));
6427 u = fold_build1 (NEGATE_EXPR, sizetype, u);
6428 t = fold_build_pointer_plus (valist, u);
6430 /* Align to 4 or 8 byte boundary depending on argument size. */
6432 u = build_int_cst (TREE_TYPE (t), (HOST_WIDE_INT)(size > 4 ? -8 : -4));
6433 t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t, u);
6434 t = fold_convert (valist_type, t);
6436 t = build2 (MODIFY_EXPR, valist_type, valist, t);
6438 ofs = (8 - size) % 4;
6439 if (ofs != 0)
6440 t = fold_build_pointer_plus_hwi (t, ofs);
6442 t = fold_convert (ptr, t);
6443 t = build_va_arg_indirect_ref (t);
6445 if (indirect)
6446 t = build_va_arg_indirect_ref (t);
6448 return t;
6452 /* True if MODE is valid for the target. By "valid", we mean able to
6453 be manipulated in non-trivial ways. In particular, this means all
6454 the arithmetic is supported.
6456 Currently, TImode is not valid as the HP 64-bit runtime documentation
6457 doesn't document the alignment and calling conventions for this type.
6458 Thus, we return false when PRECISION is 2 * BITS_PER_WORD and
6459 2 * BITS_PER_WORD isn't equal LONG_LONG_TYPE_SIZE. */
6461 static bool
6462 pa_scalar_mode_supported_p (scalar_mode mode)
6464 int precision = GET_MODE_PRECISION (mode);
6466 switch (GET_MODE_CLASS (mode))
6468 case MODE_PARTIAL_INT:
6469 case MODE_INT:
6470 if (precision == CHAR_TYPE_SIZE)
6471 return true;
6472 if (precision == SHORT_TYPE_SIZE)
6473 return true;
6474 if (precision == INT_TYPE_SIZE)
6475 return true;
6476 if (precision == LONG_TYPE_SIZE)
6477 return true;
6478 if (precision == LONG_LONG_TYPE_SIZE)
6479 return true;
6480 return false;
6482 case MODE_FLOAT:
6483 if (precision == FLOAT_TYPE_SIZE)
6484 return true;
6485 if (precision == DOUBLE_TYPE_SIZE)
6486 return true;
6487 if (precision == LONG_DOUBLE_TYPE_SIZE)
6488 return true;
6489 return false;
6491 case MODE_DECIMAL_FLOAT:
6492 return false;
6494 default:
6495 gcc_unreachable ();
6499 /* Return TRUE if INSN, a jump insn, has an unfilled delay slot and
6500 it branches into the delay slot. Otherwise, return FALSE. */
6502 static bool
6503 branch_to_delay_slot_p (rtx_insn *insn)
6505 rtx_insn *jump_insn;
6507 if (dbr_sequence_length ())
6508 return FALSE;
6510 jump_insn = next_active_insn (JUMP_LABEL_AS_INSN (insn));
6511 while (insn)
6513 insn = next_active_insn (insn);
6514 if (jump_insn == insn)
6515 return TRUE;
6517 /* We can't rely on the length of asms. So, we return FALSE when
6518 the branch is followed by an asm. */
6519 if (!insn
6520 || GET_CODE (PATTERN (insn)) == ASM_INPUT
6521 || asm_noperands (PATTERN (insn)) >= 0
6522 || get_attr_length (insn) > 0)
6523 break;
6526 return FALSE;
6529 /* Return TRUE if INSN, a forward jump insn, needs a nop in its delay slot.
6531 This occurs when INSN has an unfilled delay slot and is followed
6532 by an asm. Disaster can occur if the asm is empty and the jump
6533 branches into the delay slot. So, we add a nop in the delay slot
6534 when this occurs. */
6536 static bool
6537 branch_needs_nop_p (rtx_insn *insn)
6539 rtx_insn *jump_insn;
6541 if (dbr_sequence_length ())
6542 return FALSE;
6544 jump_insn = next_active_insn (JUMP_LABEL_AS_INSN (insn));
6545 while (insn)
6547 insn = next_active_insn (insn);
6548 if (!insn || jump_insn == insn)
6549 return TRUE;
6551 if (!(GET_CODE (PATTERN (insn)) == ASM_INPUT
6552 || asm_noperands (PATTERN (insn)) >= 0)
6553 && get_attr_length (insn) > 0)
6554 break;
6557 return FALSE;
6560 /* Return TRUE if INSN, a forward jump insn, can use nullification
6561 to skip the following instruction. This avoids an extra cycle due
6562 to a mis-predicted branch when we fall through. */
6564 static bool
6565 use_skip_p (rtx_insn *insn)
6567 rtx_insn *jump_insn = next_active_insn (JUMP_LABEL_AS_INSN (insn));
6569 while (insn)
6571 insn = next_active_insn (insn);
6573 /* We can't rely on the length of asms, so we can't skip asms. */
6574 if (!insn
6575 || GET_CODE (PATTERN (insn)) == ASM_INPUT
6576 || asm_noperands (PATTERN (insn)) >= 0)
6577 break;
6578 if (get_attr_length (insn) == 4
6579 && jump_insn == next_active_insn (insn))
6580 return TRUE;
6581 if (get_attr_length (insn) > 0)
6582 break;
6585 return FALSE;
6588 /* This routine handles all the normal conditional branch sequences we
6589 might need to generate. It handles compare immediate vs compare
6590 register, nullification of delay slots, varying length branches,
6591 negated branches, and all combinations of the above. It returns the
6592 output appropriate to emit the branch corresponding to all given
6593 parameters. */
6595 const char *
6596 pa_output_cbranch (rtx *operands, int negated, rtx_insn *insn)
6598 static char buf[100];
6599 bool useskip;
6600 int nullify = INSN_ANNULLED_BRANCH_P (insn);
6601 int length = get_attr_length (insn);
6602 int xdelay;
6604 /* A conditional branch to the following instruction (e.g. the delay slot)
6605 is asking for a disaster. This can happen when not optimizing and
6606 when jump optimization fails.
6608 While it is usually safe to emit nothing, this can fail if the
6609 preceding instruction is a nullified branch with an empty delay
6610 slot and the same branch target as this branch. We could check
6611 for this but jump optimization should eliminate nop jumps. It
6612 is always safe to emit a nop. */
6613 if (branch_to_delay_slot_p (insn))
6614 return "nop";
6616 /* The doubleword form of the cmpib instruction doesn't have the LEU
6617 and GTU conditions while the cmpb instruction does. Since we accept
6618 zero for cmpb, we must ensure that we use cmpb for the comparison. */
6619 if (GET_MODE (operands[1]) == DImode && operands[2] == const0_rtx)
6620 operands[2] = gen_rtx_REG (DImode, 0);
6621 if (GET_MODE (operands[2]) == DImode && operands[1] == const0_rtx)
6622 operands[1] = gen_rtx_REG (DImode, 0);
6624 /* If this is a long branch with its delay slot unfilled, set `nullify'
6625 as it can nullify the delay slot and save a nop. */
6626 if (length == 8 && dbr_sequence_length () == 0)
6627 nullify = 1;
6629 /* If this is a short forward conditional branch which did not get
6630 its delay slot filled, the delay slot can still be nullified. */
6631 if (! nullify && length == 4 && dbr_sequence_length () == 0)
6632 nullify = forward_branch_p (insn);
6634 /* A forward branch over a single nullified insn can be done with a
6635 comclr instruction. This avoids a single cycle penalty due to
6636 mis-predicted branch if we fall through (branch not taken). */
6637 useskip = (length == 4 && nullify) ? use_skip_p (insn) : FALSE;
6639 switch (length)
6641 /* All short conditional branches except backwards with an unfilled
6642 delay slot. */
6643 case 4:
6644 if (useskip)
6645 strcpy (buf, "{com%I2clr,|cmp%I2clr,}");
6646 else
6647 strcpy (buf, "{com%I2b,|cmp%I2b,}");
6648 if (GET_MODE (operands[1]) == DImode)
6649 strcat (buf, "*");
6650 if (negated)
6651 strcat (buf, "%B3");
6652 else
6653 strcat (buf, "%S3");
6654 if (useskip)
6655 strcat (buf, " %2,%r1,%%r0");
6656 else if (nullify)
6658 if (branch_needs_nop_p (insn))
6659 strcat (buf, ",n %2,%r1,%0%#");
6660 else
6661 strcat (buf, ",n %2,%r1,%0");
6663 else
6664 strcat (buf, " %2,%r1,%0");
6665 break;
6667 /* All long conditionals. Note a short backward branch with an
6668 unfilled delay slot is treated just like a long backward branch
6669 with an unfilled delay slot. */
6670 case 8:
6671 /* Handle weird backwards branch with a filled delay slot
6672 which is nullified. */
6673 if (dbr_sequence_length () != 0
6674 && ! forward_branch_p (insn)
6675 && nullify)
6677 strcpy (buf, "{com%I2b,|cmp%I2b,}");
6678 if (GET_MODE (operands[1]) == DImode)
6679 strcat (buf, "*");
6680 if (negated)
6681 strcat (buf, "%S3");
6682 else
6683 strcat (buf, "%B3");
6684 strcat (buf, ",n %2,%r1,.+12\n\tb %0");
6686 /* Handle short backwards branch with an unfilled delay slot.
6687 Using a comb;nop rather than comiclr;bl saves 1 cycle for both
6688 taken and untaken branches. */
6689 else if (dbr_sequence_length () == 0
6690 && ! forward_branch_p (insn)
6691 && INSN_ADDRESSES_SET_P ()
6692 && VAL_14_BITS_P (INSN_ADDRESSES (INSN_UID (JUMP_LABEL (insn)))
6693 - INSN_ADDRESSES (INSN_UID (insn)) - 8))
6695 strcpy (buf, "{com%I2b,|cmp%I2b,}");
6696 if (GET_MODE (operands[1]) == DImode)
6697 strcat (buf, "*");
6698 if (negated)
6699 strcat (buf, "%B3 %2,%r1,%0%#");
6700 else
6701 strcat (buf, "%S3 %2,%r1,%0%#");
6703 else
6705 strcpy (buf, "{com%I2clr,|cmp%I2clr,}");
6706 if (GET_MODE (operands[1]) == DImode)
6707 strcat (buf, "*");
6708 if (negated)
6709 strcat (buf, "%S3");
6710 else
6711 strcat (buf, "%B3");
6712 if (nullify)
6713 strcat (buf, " %2,%r1,%%r0\n\tb,n %0");
6714 else
6715 strcat (buf, " %2,%r1,%%r0\n\tb %0");
6717 break;
6719 default:
6720 /* The reversed conditional branch must branch over one additional
6721 instruction if the delay slot is filled and needs to be extracted
6722 by pa_output_lbranch. If the delay slot is empty or this is a
6723 nullified forward branch, the instruction after the reversed
6724 condition branch must be nullified. */
6725 if (dbr_sequence_length () == 0
6726 || (nullify && forward_branch_p (insn)))
6728 nullify = 1;
6729 xdelay = 0;
6730 operands[4] = GEN_INT (length);
6732 else
6734 xdelay = 1;
6735 operands[4] = GEN_INT (length + 4);
6738 /* Create a reversed conditional branch which branches around
6739 the following insns. */
6740 if (GET_MODE (operands[1]) != DImode)
6742 if (nullify)
6744 if (negated)
6745 strcpy (buf,
6746 "{com%I2b,%S3,n %2,%r1,.+%4|cmp%I2b,%S3,n %2,%r1,.+%4}");
6747 else
6748 strcpy (buf,
6749 "{com%I2b,%B3,n %2,%r1,.+%4|cmp%I2b,%B3,n %2,%r1,.+%4}");
6751 else
6753 if (negated)
6754 strcpy (buf,
6755 "{com%I2b,%S3 %2,%r1,.+%4|cmp%I2b,%S3 %2,%r1,.+%4}");
6756 else
6757 strcpy (buf,
6758 "{com%I2b,%B3 %2,%r1,.+%4|cmp%I2b,%B3 %2,%r1,.+%4}");
6761 else
6763 if (nullify)
6765 if (negated)
6766 strcpy (buf,
6767 "{com%I2b,*%S3,n %2,%r1,.+%4|cmp%I2b,*%S3,n %2,%r1,.+%4}");
6768 else
6769 strcpy (buf,
6770 "{com%I2b,*%B3,n %2,%r1,.+%4|cmp%I2b,*%B3,n %2,%r1,.+%4}");
6772 else
6774 if (negated)
6775 strcpy (buf,
6776 "{com%I2b,*%S3 %2,%r1,.+%4|cmp%I2b,*%S3 %2,%r1,.+%4}");
6777 else
6778 strcpy (buf,
6779 "{com%I2b,*%B3 %2,%r1,.+%4|cmp%I2b,*%B3 %2,%r1,.+%4}");
6783 output_asm_insn (buf, operands);
6784 return pa_output_lbranch (operands[0], insn, xdelay);
6786 return buf;
6789 /* Output a PIC pc-relative instruction sequence to load the address of
6790 OPERANDS[0] to register OPERANDS[2]. OPERANDS[0] is a symbol ref
6791 or a code label. OPERANDS[1] specifies the register to use to load
6792 the program counter. OPERANDS[3] may be used for label generation
6793 The sequence is always three instructions in length. The program
6794 counter recorded for PA 1.X is eight bytes more than that for PA 2.0.
6795 Register %r1 is clobbered. */
6797 static void
6798 pa_output_pic_pcrel_sequence (rtx *operands)
6800 gcc_assert (SYMBOL_REF_P (operands[0]) || LABEL_P (operands[0]));
6801 if (TARGET_PA_20)
6803 /* We can use mfia to determine the current program counter. */
6804 if (TARGET_SOM || !TARGET_GAS)
6806 operands[3] = gen_label_rtx ();
6807 targetm.asm_out.internal_label (asm_out_file, "L",
6808 CODE_LABEL_NUMBER (operands[3]));
6809 output_asm_insn ("mfia %1", operands);
6810 output_asm_insn ("addil L'%0-%l3,%1", operands);
6811 output_asm_insn ("ldo R'%0-%l3(%%r1),%2", operands);
6813 else
6815 output_asm_insn ("mfia %1", operands);
6816 output_asm_insn ("addil L'%0-$PIC_pcrel$0+12,%1", operands);
6817 output_asm_insn ("ldo R'%0-$PIC_pcrel$0+16(%%r1),%2", operands);
6820 else
6822 /* We need to use a branch to determine the current program counter. */
6823 output_asm_insn ("{bl|b,l} .+8,%1", operands);
6824 if (TARGET_SOM || !TARGET_GAS)
6826 operands[3] = gen_label_rtx ();
6827 output_asm_insn ("addil L'%0-%l3,%1", operands);
6828 targetm.asm_out.internal_label (asm_out_file, "L",
6829 CODE_LABEL_NUMBER (operands[3]));
6830 output_asm_insn ("ldo R'%0-%l3(%%r1),%2", operands);
6832 else
6834 output_asm_insn ("addil L'%0-$PIC_pcrel$0+4,%1", operands);
6835 output_asm_insn ("ldo R'%0-$PIC_pcrel$0+8(%%r1),%2", operands);
6840 /* This routine handles output of long unconditional branches that
6841 exceed the maximum range of a simple branch instruction. Since
6842 we don't have a register available for the branch, we save register
6843 %r1 in the frame marker, load the branch destination DEST into %r1,
6844 execute the branch, and restore %r1 in the delay slot of the branch.
6846 Since long branches may have an insn in the delay slot and the
6847 delay slot is used to restore %r1, we in general need to extract
6848 this insn and execute it before the branch. However, to facilitate
6849 use of this function by conditional branches, we also provide an
6850 option to not extract the delay insn so that it will be emitted
6851 after the long branch. So, if there is an insn in the delay slot,
6852 it is extracted if XDELAY is nonzero.
6854 The lengths of the various long-branch sequences are 20, 16 and 24
6855 bytes for the portable runtime, non-PIC and PIC cases, respectively. */
6857 const char *
6858 pa_output_lbranch (rtx dest, rtx_insn *insn, int xdelay)
6860 rtx xoperands[4];
6862 xoperands[0] = dest;
6864 /* First, free up the delay slot. */
6865 if (xdelay && dbr_sequence_length () != 0)
6867 /* We can't handle a jump in the delay slot. */
6868 gcc_assert (! JUMP_P (NEXT_INSN (insn)));
6870 final_scan_insn (NEXT_INSN (insn), asm_out_file,
6871 optimize, 0, NULL);
6873 /* Now delete the delay insn. */
6874 SET_INSN_DELETED (NEXT_INSN (insn));
6877 /* Output an insn to save %r1. The runtime documentation doesn't
6878 specify whether the "Clean Up" slot in the callers frame can
6879 be clobbered by the callee. It isn't copied by HP's builtin
6880 alloca, so this suggests that it can be clobbered if necessary.
6881 The "Static Link" location is copied by HP builtin alloca, so
6882 we avoid using it. Using the cleanup slot might be a problem
6883 if we have to interoperate with languages that pass cleanup
6884 information. However, it should be possible to handle these
6885 situations with GCC's asm feature.
6887 The "Current RP" slot is reserved for the called procedure, so
6888 we try to use it when we don't have a frame of our own. It's
6889 rather unlikely that we won't have a frame when we need to emit
6890 a very long branch.
6892 Really the way to go long term is a register scavenger; goto
6893 the target of the jump and find a register which we can use
6894 as a scratch to hold the value in %r1. Then, we wouldn't have
6895 to free up the delay slot or clobber a slot that may be needed
6896 for other purposes. */
6897 if (TARGET_64BIT)
6899 if (actual_fsize == 0 && !df_regs_ever_live_p (2))
6900 /* Use the return pointer slot in the frame marker. */
6901 output_asm_insn ("std %%r1,-16(%%r30)", xoperands);
6902 else
6903 /* Use the slot at -40 in the frame marker since HP builtin
6904 alloca doesn't copy it. */
6905 output_asm_insn ("std %%r1,-40(%%r30)", xoperands);
6907 else
6909 if (actual_fsize == 0 && !df_regs_ever_live_p (2))
6910 /* Use the return pointer slot in the frame marker. */
6911 output_asm_insn ("stw %%r1,-20(%%r30)", xoperands);
6912 else
6913 /* Use the "Clean Up" slot in the frame marker. In GCC,
6914 the only other use of this location is for copying a
6915 floating point double argument from a floating-point
6916 register to two general registers. The copy is done
6917 as an "atomic" operation when outputting a call, so it
6918 won't interfere with our using the location here. */
6919 output_asm_insn ("stw %%r1,-12(%%r30)", xoperands);
6922 if (TARGET_PORTABLE_RUNTIME)
6924 output_asm_insn ("ldil L'%0,%%r1", xoperands);
6925 output_asm_insn ("ldo R'%0(%%r1),%%r1", xoperands);
6926 output_asm_insn ("bv %%r0(%%r1)", xoperands);
6928 else if (flag_pic)
6930 xoperands[1] = gen_rtx_REG (Pmode, 1);
6931 xoperands[2] = xoperands[1];
6932 pa_output_pic_pcrel_sequence (xoperands);
6933 output_asm_insn ("bv %%r0(%%r1)", xoperands);
6935 else
6936 /* Now output a very long branch to the original target. */
6937 output_asm_insn ("ldil L'%l0,%%r1\n\tbe R'%l0(%%sr4,%%r1)", xoperands);
6939 /* Now restore the value of %r1 in the delay slot. */
6940 if (TARGET_64BIT)
6942 if (actual_fsize == 0 && !df_regs_ever_live_p (2))
6943 return "ldd -16(%%r30),%%r1";
6944 else
6945 return "ldd -40(%%r30),%%r1";
6947 else
6949 if (actual_fsize == 0 && !df_regs_ever_live_p (2))
6950 return "ldw -20(%%r30),%%r1";
6951 else
6952 return "ldw -12(%%r30),%%r1";
6956 /* This routine handles all the branch-on-bit conditional branch sequences we
6957 might need to generate. It handles nullification of delay slots,
6958 varying length branches, negated branches and all combinations of the
6959 above. it returns the appropriate output template to emit the branch. */
6961 const char *
6962 pa_output_bb (rtx *operands ATTRIBUTE_UNUSED, int negated, rtx_insn *insn, int which)
6964 static char buf[100];
6965 bool useskip;
6966 int nullify = INSN_ANNULLED_BRANCH_P (insn);
6967 int length = get_attr_length (insn);
6968 int xdelay;
6970 /* A conditional branch to the following instruction (e.g. the delay slot) is
6971 asking for a disaster. I do not think this can happen as this pattern
6972 is only used when optimizing; jump optimization should eliminate the
6973 jump. But be prepared just in case. */
6975 if (branch_to_delay_slot_p (insn))
6976 return "nop";
6978 /* If this is a long branch with its delay slot unfilled, set `nullify'
6979 as it can nullify the delay slot and save a nop. */
6980 if (length == 8 && dbr_sequence_length () == 0)
6981 nullify = 1;
6983 /* If this is a short forward conditional branch which did not get
6984 its delay slot filled, the delay slot can still be nullified. */
6985 if (! nullify && length == 4 && dbr_sequence_length () == 0)
6986 nullify = forward_branch_p (insn);
6988 /* A forward branch over a single nullified insn can be done with a
6989 extrs instruction. This avoids a single cycle penalty due to
6990 mis-predicted branch if we fall through (branch not taken). */
6991 useskip = (length == 4 && nullify) ? use_skip_p (insn) : FALSE;
6993 switch (length)
6996 /* All short conditional branches except backwards with an unfilled
6997 delay slot. */
6998 case 4:
6999 if (useskip)
7000 strcpy (buf, "{extrs,|extrw,s,}");
7001 else
7002 strcpy (buf, "bb,");
7003 if (useskip && GET_MODE (operands[0]) == DImode)
7004 strcpy (buf, "extrd,s,*");
7005 else if (GET_MODE (operands[0]) == DImode)
7006 strcpy (buf, "bb,*");
7007 if ((which == 0 && negated)
7008 || (which == 1 && ! negated))
7009 strcat (buf, ">=");
7010 else
7011 strcat (buf, "<");
7012 if (useskip)
7013 strcat (buf, " %0,%1,1,%%r0");
7014 else if (nullify && negated)
7016 if (branch_needs_nop_p (insn))
7017 strcat (buf, ",n %0,%1,%3%#");
7018 else
7019 strcat (buf, ",n %0,%1,%3");
7021 else if (nullify && ! negated)
7023 if (branch_needs_nop_p (insn))
7024 strcat (buf, ",n %0,%1,%2%#");
7025 else
7026 strcat (buf, ",n %0,%1,%2");
7028 else if (! nullify && negated)
7029 strcat (buf, " %0,%1,%3");
7030 else if (! nullify && ! negated)
7031 strcat (buf, " %0,%1,%2");
7032 break;
7034 /* All long conditionals. Note a short backward branch with an
7035 unfilled delay slot is treated just like a long backward branch
7036 with an unfilled delay slot. */
7037 case 8:
7038 /* Handle weird backwards branch with a filled delay slot
7039 which is nullified. */
7040 if (dbr_sequence_length () != 0
7041 && ! forward_branch_p (insn)
7042 && nullify)
7044 strcpy (buf, "bb,");
7045 if (GET_MODE (operands[0]) == DImode)
7046 strcat (buf, "*");
7047 if ((which == 0 && negated)
7048 || (which == 1 && ! negated))
7049 strcat (buf, "<");
7050 else
7051 strcat (buf, ">=");
7052 if (negated)
7053 strcat (buf, ",n %0,%1,.+12\n\tb %3");
7054 else
7055 strcat (buf, ",n %0,%1,.+12\n\tb %2");
7057 /* Handle short backwards branch with an unfilled delay slot.
7058 Using a bb;nop rather than extrs;bl saves 1 cycle for both
7059 taken and untaken branches. */
7060 else if (dbr_sequence_length () == 0
7061 && ! forward_branch_p (insn)
7062 && INSN_ADDRESSES_SET_P ()
7063 && VAL_14_BITS_P (INSN_ADDRESSES (INSN_UID (JUMP_LABEL (insn)))
7064 - INSN_ADDRESSES (INSN_UID (insn)) - 8))
7066 strcpy (buf, "bb,");
7067 if (GET_MODE (operands[0]) == DImode)
7068 strcat (buf, "*");
7069 if ((which == 0 && negated)
7070 || (which == 1 && ! negated))
7071 strcat (buf, ">=");
7072 else
7073 strcat (buf, "<");
7074 if (negated)
7075 strcat (buf, " %0,%1,%3%#");
7076 else
7077 strcat (buf, " %0,%1,%2%#");
7079 else
7081 if (GET_MODE (operands[0]) == DImode)
7082 strcpy (buf, "extrd,s,*");
7083 else
7084 strcpy (buf, "{extrs,|extrw,s,}");
7085 if ((which == 0 && negated)
7086 || (which == 1 && ! negated))
7087 strcat (buf, "<");
7088 else
7089 strcat (buf, ">=");
7090 if (nullify && negated)
7091 strcat (buf, " %0,%1,1,%%r0\n\tb,n %3");
7092 else if (nullify && ! negated)
7093 strcat (buf, " %0,%1,1,%%r0\n\tb,n %2");
7094 else if (negated)
7095 strcat (buf, " %0,%1,1,%%r0\n\tb %3");
7096 else
7097 strcat (buf, " %0,%1,1,%%r0\n\tb %2");
7099 break;
7101 default:
7102 /* The reversed conditional branch must branch over one additional
7103 instruction if the delay slot is filled and needs to be extracted
7104 by pa_output_lbranch. If the delay slot is empty or this is a
7105 nullified forward branch, the instruction after the reversed
7106 condition branch must be nullified. */
7107 if (dbr_sequence_length () == 0
7108 || (nullify && forward_branch_p (insn)))
7110 nullify = 1;
7111 xdelay = 0;
7112 operands[4] = GEN_INT (length);
7114 else
7116 xdelay = 1;
7117 operands[4] = GEN_INT (length + 4);
7120 if (GET_MODE (operands[0]) == DImode)
7121 strcpy (buf, "bb,*");
7122 else
7123 strcpy (buf, "bb,");
7124 if ((which == 0 && negated)
7125 || (which == 1 && !negated))
7126 strcat (buf, "<");
7127 else
7128 strcat (buf, ">=");
7129 if (nullify)
7130 strcat (buf, ",n %0,%1,.+%4");
7131 else
7132 strcat (buf, " %0,%1,.+%4");
7133 output_asm_insn (buf, operands);
7134 return pa_output_lbranch (negated ? operands[3] : operands[2],
7135 insn, xdelay);
7137 return buf;
7140 /* This routine handles all the branch-on-variable-bit conditional branch
7141 sequences we might need to generate. It handles nullification of delay
7142 slots, varying length branches, negated branches and all combinations
7143 of the above. it returns the appropriate output template to emit the
7144 branch. */
7146 const char *
7147 pa_output_bvb (rtx *operands ATTRIBUTE_UNUSED, int negated, rtx_insn *insn,
7148 int which)
7150 static char buf[100];
7151 bool useskip;
7152 int nullify = INSN_ANNULLED_BRANCH_P (insn);
7153 int length = get_attr_length (insn);
7154 int xdelay;
7156 /* A conditional branch to the following instruction (e.g. the delay slot) is
7157 asking for a disaster. I do not think this can happen as this pattern
7158 is only used when optimizing; jump optimization should eliminate the
7159 jump. But be prepared just in case. */
7161 if (branch_to_delay_slot_p (insn))
7162 return "nop";
7164 /* If this is a long branch with its delay slot unfilled, set `nullify'
7165 as it can nullify the delay slot and save a nop. */
7166 if (length == 8 && dbr_sequence_length () == 0)
7167 nullify = 1;
7169 /* If this is a short forward conditional branch which did not get
7170 its delay slot filled, the delay slot can still be nullified. */
7171 if (! nullify && length == 4 && dbr_sequence_length () == 0)
7172 nullify = forward_branch_p (insn);
7174 /* A forward branch over a single nullified insn can be done with a
7175 extrs instruction. This avoids a single cycle penalty due to
7176 mis-predicted branch if we fall through (branch not taken). */
7177 useskip = (length == 4 && nullify) ? use_skip_p (insn) : FALSE;
7179 switch (length)
7182 /* All short conditional branches except backwards with an unfilled
7183 delay slot. */
7184 case 4:
7185 if (useskip)
7186 strcpy (buf, "{vextrs,|extrw,s,}");
7187 else
7188 strcpy (buf, "{bvb,|bb,}");
7189 if (useskip && GET_MODE (operands[0]) == DImode)
7190 strcpy (buf, "extrd,s,*");
7191 else if (GET_MODE (operands[0]) == DImode)
7192 strcpy (buf, "bb,*");
7193 if ((which == 0 && negated)
7194 || (which == 1 && ! negated))
7195 strcat (buf, ">=");
7196 else
7197 strcat (buf, "<");
7198 if (useskip)
7199 strcat (buf, "{ %0,1,%%r0| %0,%%sar,1,%%r0}");
7200 else if (nullify && negated)
7202 if (branch_needs_nop_p (insn))
7203 strcat (buf, "{,n %0,%3%#|,n %0,%%sar,%3%#}");
7204 else
7205 strcat (buf, "{,n %0,%3|,n %0,%%sar,%3}");
7207 else if (nullify && ! negated)
7209 if (branch_needs_nop_p (insn))
7210 strcat (buf, "{,n %0,%2%#|,n %0,%%sar,%2%#}");
7211 else
7212 strcat (buf, "{,n %0,%2|,n %0,%%sar,%2}");
7214 else if (! nullify && negated)
7215 strcat (buf, "{ %0,%3| %0,%%sar,%3}");
7216 else if (! nullify && ! negated)
7217 strcat (buf, "{ %0,%2| %0,%%sar,%2}");
7218 break;
7220 /* All long conditionals. Note a short backward branch with an
7221 unfilled delay slot is treated just like a long backward branch
7222 with an unfilled delay slot. */
7223 case 8:
7224 /* Handle weird backwards branch with a filled delay slot
7225 which is nullified. */
7226 if (dbr_sequence_length () != 0
7227 && ! forward_branch_p (insn)
7228 && nullify)
7230 strcpy (buf, "{bvb,|bb,}");
7231 if (GET_MODE (operands[0]) == DImode)
7232 strcat (buf, "*");
7233 if ((which == 0 && negated)
7234 || (which == 1 && ! negated))
7235 strcat (buf, "<");
7236 else
7237 strcat (buf, ">=");
7238 if (negated)
7239 strcat (buf, "{,n %0,.+12\n\tb %3|,n %0,%%sar,.+12\n\tb %3}");
7240 else
7241 strcat (buf, "{,n %0,.+12\n\tb %2|,n %0,%%sar,.+12\n\tb %2}");
7243 /* Handle short backwards branch with an unfilled delay slot.
7244 Using a bb;nop rather than extrs;bl saves 1 cycle for both
7245 taken and untaken branches. */
7246 else if (dbr_sequence_length () == 0
7247 && ! forward_branch_p (insn)
7248 && INSN_ADDRESSES_SET_P ()
7249 && VAL_14_BITS_P (INSN_ADDRESSES (INSN_UID (JUMP_LABEL (insn)))
7250 - INSN_ADDRESSES (INSN_UID (insn)) - 8))
7252 strcpy (buf, "{bvb,|bb,}");
7253 if (GET_MODE (operands[0]) == DImode)
7254 strcat (buf, "*");
7255 if ((which == 0 && negated)
7256 || (which == 1 && ! negated))
7257 strcat (buf, ">=");
7258 else
7259 strcat (buf, "<");
7260 if (negated)
7261 strcat (buf, "{ %0,%3%#| %0,%%sar,%3%#}");
7262 else
7263 strcat (buf, "{ %0,%2%#| %0,%%sar,%2%#}");
7265 else
7267 strcpy (buf, "{vextrs,|extrw,s,}");
7268 if (GET_MODE (operands[0]) == DImode)
7269 strcpy (buf, "extrd,s,*");
7270 if ((which == 0 && negated)
7271 || (which == 1 && ! negated))
7272 strcat (buf, "<");
7273 else
7274 strcat (buf, ">=");
7275 if (nullify && negated)
7276 strcat (buf, "{ %0,1,%%r0\n\tb,n %3| %0,%%sar,1,%%r0\n\tb,n %3}");
7277 else if (nullify && ! negated)
7278 strcat (buf, "{ %0,1,%%r0\n\tb,n %2| %0,%%sar,1,%%r0\n\tb,n %2}");
7279 else if (negated)
7280 strcat (buf, "{ %0,1,%%r0\n\tb %3| %0,%%sar,1,%%r0\n\tb %3}");
7281 else
7282 strcat (buf, "{ %0,1,%%r0\n\tb %2| %0,%%sar,1,%%r0\n\tb %2}");
7284 break;
7286 default:
7287 /* The reversed conditional branch must branch over one additional
7288 instruction if the delay slot is filled and needs to be extracted
7289 by pa_output_lbranch. If the delay slot is empty or this is a
7290 nullified forward branch, the instruction after the reversed
7291 condition branch must be nullified. */
7292 if (dbr_sequence_length () == 0
7293 || (nullify && forward_branch_p (insn)))
7295 nullify = 1;
7296 xdelay = 0;
7297 operands[4] = GEN_INT (length);
7299 else
7301 xdelay = 1;
7302 operands[4] = GEN_INT (length + 4);
7305 if (GET_MODE (operands[0]) == DImode)
7306 strcpy (buf, "bb,*");
7307 else
7308 strcpy (buf, "{bvb,|bb,}");
7309 if ((which == 0 && negated)
7310 || (which == 1 && !negated))
7311 strcat (buf, "<");
7312 else
7313 strcat (buf, ">=");
7314 if (nullify)
7315 strcat (buf, ",n {%0,.+%4|%0,%%sar,.+%4}");
7316 else
7317 strcat (buf, " {%0,.+%4|%0,%%sar,.+%4}");
7318 output_asm_insn (buf, operands);
7319 return pa_output_lbranch (negated ? operands[3] : operands[2],
7320 insn, xdelay);
7322 return buf;
7325 /* Return the output template for emitting a dbra type insn.
7327 Note it may perform some output operations on its own before
7328 returning the final output string. */
7329 const char *
7330 pa_output_dbra (rtx *operands, rtx_insn *insn, int which_alternative)
7332 int length = get_attr_length (insn);
7334 /* A conditional branch to the following instruction (e.g. the delay slot) is
7335 asking for a disaster. Be prepared! */
7337 if (branch_to_delay_slot_p (insn))
7339 if (which_alternative == 0)
7340 return "ldo %1(%0),%0";
7341 else if (which_alternative == 1)
7343 output_asm_insn ("{fstws|fstw} %0,-16(%%r30)", operands);
7344 output_asm_insn ("ldw -16(%%r30),%4", operands);
7345 output_asm_insn ("ldo %1(%4),%4\n\tstw %4,-16(%%r30)", operands);
7346 return "{fldws|fldw} -16(%%r30),%0";
7348 else
7350 output_asm_insn ("ldw %0,%4", operands);
7351 return "ldo %1(%4),%4\n\tstw %4,%0";
7355 if (which_alternative == 0)
7357 int nullify = INSN_ANNULLED_BRANCH_P (insn);
7358 int xdelay;
7360 /* If this is a long branch with its delay slot unfilled, set `nullify'
7361 as it can nullify the delay slot and save a nop. */
7362 if (length == 8 && dbr_sequence_length () == 0)
7363 nullify = 1;
7365 /* If this is a short forward conditional branch which did not get
7366 its delay slot filled, the delay slot can still be nullified. */
7367 if (! nullify && length == 4 && dbr_sequence_length () == 0)
7368 nullify = forward_branch_p (insn);
7370 switch (length)
7372 case 4:
7373 if (nullify)
7375 if (branch_needs_nop_p (insn))
7376 return "addib,%C2,n %1,%0,%3%#";
7377 else
7378 return "addib,%C2,n %1,%0,%3";
7380 else
7381 return "addib,%C2 %1,%0,%3";
7383 case 8:
7384 /* Handle weird backwards branch with a fulled delay slot
7385 which is nullified. */
7386 if (dbr_sequence_length () != 0
7387 && ! forward_branch_p (insn)
7388 && nullify)
7389 return "addib,%N2,n %1,%0,.+12\n\tb %3";
7390 /* Handle short backwards branch with an unfilled delay slot.
7391 Using a addb;nop rather than addi;bl saves 1 cycle for both
7392 taken and untaken branches. */
7393 else if (dbr_sequence_length () == 0
7394 && ! forward_branch_p (insn)
7395 && INSN_ADDRESSES_SET_P ()
7396 && VAL_14_BITS_P (INSN_ADDRESSES (INSN_UID (JUMP_LABEL (insn)))
7397 - INSN_ADDRESSES (INSN_UID (insn)) - 8))
7398 return "addib,%C2 %1,%0,%3%#";
7400 /* Handle normal cases. */
7401 if (nullify)
7402 return "addi,%N2 %1,%0,%0\n\tb,n %3";
7403 else
7404 return "addi,%N2 %1,%0,%0\n\tb %3";
7406 default:
7407 /* The reversed conditional branch must branch over one additional
7408 instruction if the delay slot is filled and needs to be extracted
7409 by pa_output_lbranch. If the delay slot is empty or this is a
7410 nullified forward branch, the instruction after the reversed
7411 condition branch must be nullified. */
7412 if (dbr_sequence_length () == 0
7413 || (nullify && forward_branch_p (insn)))
7415 nullify = 1;
7416 xdelay = 0;
7417 operands[4] = GEN_INT (length);
7419 else
7421 xdelay = 1;
7422 operands[4] = GEN_INT (length + 4);
7425 if (nullify)
7426 output_asm_insn ("addib,%N2,n %1,%0,.+%4", operands);
7427 else
7428 output_asm_insn ("addib,%N2 %1,%0,.+%4", operands);
7430 return pa_output_lbranch (operands[3], insn, xdelay);
7434 /* Deal with gross reload from FP register case. */
7435 else if (which_alternative == 1)
7437 /* Move loop counter from FP register to MEM then into a GR,
7438 increment the GR, store the GR into MEM, and finally reload
7439 the FP register from MEM from within the branch's delay slot. */
7440 output_asm_insn ("{fstws|fstw} %0,-16(%%r30)\n\tldw -16(%%r30),%4",
7441 operands);
7442 output_asm_insn ("ldo %1(%4),%4\n\tstw %4,-16(%%r30)", operands);
7443 if (length == 24)
7444 return "{comb|cmpb},%S2 %%r0,%4,%3\n\t{fldws|fldw} -16(%%r30),%0";
7445 else if (length == 28)
7446 return "{comclr|cmpclr},%B2 %%r0,%4,%%r0\n\tb %3\n\t{fldws|fldw} -16(%%r30),%0";
7447 else
7449 operands[5] = GEN_INT (length - 16);
7450 output_asm_insn ("{comb|cmpb},%B2 %%r0,%4,.+%5", operands);
7451 output_asm_insn ("{fldws|fldw} -16(%%r30),%0", operands);
7452 return pa_output_lbranch (operands[3], insn, 0);
7455 /* Deal with gross reload from memory case. */
7456 else
7458 /* Reload loop counter from memory, the store back to memory
7459 happens in the branch's delay slot. */
7460 output_asm_insn ("ldw %0,%4", operands);
7461 if (length == 12)
7462 return "addib,%C2 %1,%4,%3\n\tstw %4,%0";
7463 else if (length == 16)
7464 return "addi,%N2 %1,%4,%4\n\tb %3\n\tstw %4,%0";
7465 else
7467 operands[5] = GEN_INT (length - 4);
7468 output_asm_insn ("addib,%N2 %1,%4,.+%5\n\tstw %4,%0", operands);
7469 return pa_output_lbranch (operands[3], insn, 0);
7474 /* Return the output template for emitting a movb type insn.
7476 Note it may perform some output operations on its own before
7477 returning the final output string. */
7478 const char *
7479 pa_output_movb (rtx *operands, rtx_insn *insn, int which_alternative,
7480 int reverse_comparison)
7482 int length = get_attr_length (insn);
7484 /* A conditional branch to the following instruction (e.g. the delay slot) is
7485 asking for a disaster. Be prepared! */
7487 if (branch_to_delay_slot_p (insn))
7489 if (which_alternative == 0)
7490 return "copy %1,%0";
7491 else if (which_alternative == 1)
7493 output_asm_insn ("stw %1,-16(%%r30)", operands);
7494 return "{fldws|fldw} -16(%%r30),%0";
7496 else if (which_alternative == 2)
7497 return "stw %1,%0";
7498 else
7499 return "mtsar %r1";
7502 /* Support the second variant. */
7503 if (reverse_comparison)
7504 PUT_CODE (operands[2], reverse_condition (GET_CODE (operands[2])));
7506 if (which_alternative == 0)
7508 int nullify = INSN_ANNULLED_BRANCH_P (insn);
7509 int xdelay;
7511 /* If this is a long branch with its delay slot unfilled, set `nullify'
7512 as it can nullify the delay slot and save a nop. */
7513 if (length == 8 && dbr_sequence_length () == 0)
7514 nullify = 1;
7516 /* If this is a short forward conditional branch which did not get
7517 its delay slot filled, the delay slot can still be nullified. */
7518 if (! nullify && length == 4 && dbr_sequence_length () == 0)
7519 nullify = forward_branch_p (insn);
7521 switch (length)
7523 case 4:
7524 if (nullify)
7526 if (branch_needs_nop_p (insn))
7527 return "movb,%C2,n %1,%0,%3%#";
7528 else
7529 return "movb,%C2,n %1,%0,%3";
7531 else
7532 return "movb,%C2 %1,%0,%3";
7534 case 8:
7535 /* Handle weird backwards branch with a filled delay slot
7536 which is nullified. */
7537 if (dbr_sequence_length () != 0
7538 && ! forward_branch_p (insn)
7539 && nullify)
7540 return "movb,%N2,n %1,%0,.+12\n\tb %3";
7542 /* Handle short backwards branch with an unfilled delay slot.
7543 Using a movb;nop rather than or;bl saves 1 cycle for both
7544 taken and untaken branches. */
7545 else if (dbr_sequence_length () == 0
7546 && ! forward_branch_p (insn)
7547 && INSN_ADDRESSES_SET_P ()
7548 && VAL_14_BITS_P (INSN_ADDRESSES (INSN_UID (JUMP_LABEL (insn)))
7549 - INSN_ADDRESSES (INSN_UID (insn)) - 8))
7550 return "movb,%C2 %1,%0,%3%#";
7551 /* Handle normal cases. */
7552 if (nullify)
7553 return "or,%N2 %1,%%r0,%0\n\tb,n %3";
7554 else
7555 return "or,%N2 %1,%%r0,%0\n\tb %3";
7557 default:
7558 /* The reversed conditional branch must branch over one additional
7559 instruction if the delay slot is filled and needs to be extracted
7560 by pa_output_lbranch. If the delay slot is empty or this is a
7561 nullified forward branch, the instruction after the reversed
7562 condition branch must be nullified. */
7563 if (dbr_sequence_length () == 0
7564 || (nullify && forward_branch_p (insn)))
7566 nullify = 1;
7567 xdelay = 0;
7568 operands[4] = GEN_INT (length);
7570 else
7572 xdelay = 1;
7573 operands[4] = GEN_INT (length + 4);
7576 if (nullify)
7577 output_asm_insn ("movb,%N2,n %1,%0,.+%4", operands);
7578 else
7579 output_asm_insn ("movb,%N2 %1,%0,.+%4", operands);
7581 return pa_output_lbranch (operands[3], insn, xdelay);
7584 /* Deal with gross reload for FP destination register case. */
7585 else if (which_alternative == 1)
7587 /* Move source register to MEM, perform the branch test, then
7588 finally load the FP register from MEM from within the branch's
7589 delay slot. */
7590 output_asm_insn ("stw %1,-16(%%r30)", operands);
7591 if (length == 12)
7592 return "{comb|cmpb},%S2 %%r0,%1,%3\n\t{fldws|fldw} -16(%%r30),%0";
7593 else if (length == 16)
7594 return "{comclr|cmpclr},%B2 %%r0,%1,%%r0\n\tb %3\n\t{fldws|fldw} -16(%%r30),%0";
7595 else
7597 operands[4] = GEN_INT (length - 4);
7598 output_asm_insn ("{comb|cmpb},%B2 %%r0,%1,.+%4", operands);
7599 output_asm_insn ("{fldws|fldw} -16(%%r30),%0", operands);
7600 return pa_output_lbranch (operands[3], insn, 0);
7603 /* Deal with gross reload from memory case. */
7604 else if (which_alternative == 2)
7606 /* Reload loop counter from memory, the store back to memory
7607 happens in the branch's delay slot. */
7608 if (length == 8)
7609 return "{comb|cmpb},%S2 %%r0,%1,%3\n\tstw %1,%0";
7610 else if (length == 12)
7611 return "{comclr|cmpclr},%B2 %%r0,%1,%%r0\n\tb %3\n\tstw %1,%0";
7612 else
7614 operands[4] = GEN_INT (length);
7615 output_asm_insn ("{comb|cmpb},%B2 %%r0,%1,.+%4\n\tstw %1,%0",
7616 operands);
7617 return pa_output_lbranch (operands[3], insn, 0);
7620 /* Handle SAR as a destination. */
7621 else
7623 if (length == 8)
7624 return "{comb|cmpb},%S2 %%r0,%1,%3\n\tmtsar %r1";
7625 else if (length == 12)
7626 return "{comclr|cmpclr},%B2 %%r0,%1,%%r0\n\tb %3\n\tmtsar %r1";
7627 else
7629 operands[4] = GEN_INT (length);
7630 output_asm_insn ("{comb|cmpb},%B2 %%r0,%1,.+%4\n\tmtsar %r1",
7631 operands);
7632 return pa_output_lbranch (operands[3], insn, 0);
7637 /* Copy any FP arguments in INSN into integer registers. */
7638 static void
7639 copy_fp_args (rtx_insn *insn)
7641 rtx link;
7642 rtx xoperands[2];
7644 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
7646 int arg_mode, regno;
7647 rtx use = XEXP (link, 0);
7649 if (! (GET_CODE (use) == USE
7650 && GET_CODE (XEXP (use, 0)) == REG
7651 && FUNCTION_ARG_REGNO_P (REGNO (XEXP (use, 0)))))
7652 continue;
7654 arg_mode = GET_MODE (XEXP (use, 0));
7655 regno = REGNO (XEXP (use, 0));
7657 /* Is it a floating point register? */
7658 if (regno >= 32 && regno <= 39)
7660 /* Copy the FP register into an integer register via memory. */
7661 if (arg_mode == SFmode)
7663 xoperands[0] = XEXP (use, 0);
7664 xoperands[1] = gen_rtx_REG (SImode, 26 - (regno - 32) / 2);
7665 output_asm_insn ("{fstws|fstw} %0,-16(%%sr0,%%r30)", xoperands);
7666 output_asm_insn ("ldw -16(%%sr0,%%r30),%1", xoperands);
7668 else
7670 xoperands[0] = XEXP (use, 0);
7671 xoperands[1] = gen_rtx_REG (DImode, 25 - (regno - 34) / 2);
7672 output_asm_insn ("{fstds|fstd} %0,-16(%%sr0,%%r30)", xoperands);
7673 output_asm_insn ("ldw -12(%%sr0,%%r30),%R1", xoperands);
7674 output_asm_insn ("ldw -16(%%sr0,%%r30),%1", xoperands);
7680 /* Compute length of the FP argument copy sequence for INSN. */
7681 static int
7682 length_fp_args (rtx_insn *insn)
7684 int length = 0;
7685 rtx link;
7687 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
7689 int arg_mode, regno;
7690 rtx use = XEXP (link, 0);
7692 if (! (GET_CODE (use) == USE
7693 && GET_CODE (XEXP (use, 0)) == REG
7694 && FUNCTION_ARG_REGNO_P (REGNO (XEXP (use, 0)))))
7695 continue;
7697 arg_mode = GET_MODE (XEXP (use, 0));
7698 regno = REGNO (XEXP (use, 0));
7700 /* Is it a floating point register? */
7701 if (regno >= 32 && regno <= 39)
7703 if (arg_mode == SFmode)
7704 length += 8;
7705 else
7706 length += 12;
7710 return length;
7713 /* Return the attribute length for the millicode call instruction INSN.
7714 The length must match the code generated by pa_output_millicode_call.
7715 We include the delay slot in the returned length as it is better to
7716 over estimate the length than to under estimate it. */
7719 pa_attr_length_millicode_call (rtx_insn *insn)
7721 unsigned long distance = -1;
7722 unsigned long total = IN_NAMED_SECTION_P (cfun->decl) ? 0 : total_code_bytes;
7724 if (INSN_ADDRESSES_SET_P ())
7726 distance = (total + insn_current_reference_address (insn));
7727 if (distance < total)
7728 distance = -1;
7731 if (TARGET_64BIT)
7733 if (!TARGET_LONG_CALLS && distance < 7600000)
7734 return 8;
7736 return 20;
7738 else if (TARGET_PORTABLE_RUNTIME)
7739 return 24;
7740 else
7742 if (!TARGET_LONG_CALLS && distance < MAX_PCREL17F_OFFSET)
7743 return 8;
7745 if (!flag_pic)
7746 return 12;
7748 return 24;
7752 /* INSN is a function call.
7754 CALL_DEST is the routine we are calling. */
7756 const char *
7757 pa_output_millicode_call (rtx_insn *insn, rtx call_dest)
7759 int attr_length = get_attr_length (insn);
7760 int seq_length = dbr_sequence_length ();
7761 rtx xoperands[4];
7763 xoperands[0] = call_dest;
7765 /* Handle the common case where we are sure that the branch will
7766 reach the beginning of the $CODE$ subspace. The within reach
7767 form of the $$sh_func_adrs call has a length of 28. Because it
7768 has an attribute type of sh_func_adrs, it never has a nonzero
7769 sequence length (i.e., the delay slot is never filled). */
7770 if (!TARGET_LONG_CALLS
7771 && (attr_length == 8
7772 || (attr_length == 28
7773 && get_attr_type (insn) == TYPE_SH_FUNC_ADRS)))
7775 xoperands[1] = gen_rtx_REG (Pmode, TARGET_64BIT ? 2 : 31);
7776 output_asm_insn ("{bl|b,l} %0,%1", xoperands);
7778 else
7780 if (TARGET_64BIT)
7782 /* It might seem that one insn could be saved by accessing
7783 the millicode function using the linkage table. However,
7784 this doesn't work in shared libraries and other dynamically
7785 loaded objects. Using a pc-relative sequence also avoids
7786 problems related to the implicit use of the gp register. */
7787 xoperands[1] = gen_rtx_REG (Pmode, 1);
7788 xoperands[2] = xoperands[1];
7789 pa_output_pic_pcrel_sequence (xoperands);
7790 output_asm_insn ("bve,l (%%r1),%%r2", xoperands);
7792 else if (TARGET_PORTABLE_RUNTIME)
7794 /* Pure portable runtime doesn't allow be/ble; we also don't
7795 have PIC support in the assembler/linker, so this sequence
7796 is needed. */
7798 /* Get the address of our target into %r1. */
7799 output_asm_insn ("ldil L'%0,%%r1", xoperands);
7800 output_asm_insn ("ldo R'%0(%%r1),%%r1", xoperands);
7802 /* Get our return address into %r31. */
7803 output_asm_insn ("{bl|b,l} .+8,%%r31", xoperands);
7804 output_asm_insn ("addi 8,%%r31,%%r31", xoperands);
7806 /* Jump to our target address in %r1. */
7807 output_asm_insn ("bv %%r0(%%r1)", xoperands);
7809 else if (!flag_pic)
7811 output_asm_insn ("ldil L'%0,%%r1", xoperands);
7812 if (TARGET_PA_20)
7813 output_asm_insn ("be,l R'%0(%%sr4,%%r1),%%sr0,%%r31", xoperands);
7814 else
7815 output_asm_insn ("ble R'%0(%%sr4,%%r1)", xoperands);
7817 else
7819 xoperands[1] = gen_rtx_REG (Pmode, 31);
7820 xoperands[2] = gen_rtx_REG (Pmode, 1);
7821 pa_output_pic_pcrel_sequence (xoperands);
7823 /* Adjust return address. */
7824 output_asm_insn ("ldo {16|24}(%%r31),%%r31", xoperands);
7826 /* Jump to our target address in %r1. */
7827 output_asm_insn ("bv %%r0(%%r1)", xoperands);
7831 if (seq_length == 0)
7832 output_asm_insn ("nop", xoperands);
7834 return "";
7837 /* Return the attribute length of the call instruction INSN. The SIBCALL
7838 flag indicates whether INSN is a regular call or a sibling call. The
7839 length returned must be longer than the code actually generated by
7840 pa_output_call. Since branch shortening is done before delay branch
7841 sequencing, there is no way to determine whether or not the delay
7842 slot will be filled during branch shortening. Even when the delay
7843 slot is filled, we may have to add a nop if the delay slot contains
7844 a branch that can't reach its target. Thus, we always have to include
7845 the delay slot in the length estimate. This used to be done in
7846 pa_adjust_insn_length but we do it here now as some sequences always
7847 fill the delay slot and we can save four bytes in the estimate for
7848 these sequences. */
7851 pa_attr_length_call (rtx_insn *insn, int sibcall)
7853 int local_call;
7854 rtx call, call_dest;
7855 tree call_decl;
7856 int length = 0;
7857 rtx pat = PATTERN (insn);
7858 unsigned long distance = -1;
7860 gcc_assert (CALL_P (insn));
7862 if (INSN_ADDRESSES_SET_P ())
7864 unsigned long total;
7866 total = IN_NAMED_SECTION_P (cfun->decl) ? 0 : total_code_bytes;
7867 distance = (total + insn_current_reference_address (insn));
7868 if (distance < total)
7869 distance = -1;
7872 gcc_assert (GET_CODE (pat) == PARALLEL);
7874 /* Get the call rtx. */
7875 call = XVECEXP (pat, 0, 0);
7876 if (GET_CODE (call) == SET)
7877 call = SET_SRC (call);
7879 gcc_assert (GET_CODE (call) == CALL);
7881 /* Determine if this is a local call. */
7882 call_dest = XEXP (XEXP (call, 0), 0);
7883 call_decl = SYMBOL_REF_DECL (call_dest);
7884 local_call = call_decl && targetm.binds_local_p (call_decl);
7886 /* pc-relative branch. */
7887 if (!TARGET_LONG_CALLS
7888 && ((TARGET_PA_20 && !sibcall && distance < 7600000)
7889 || distance < MAX_PCREL17F_OFFSET))
7890 length += 8;
7892 /* 64-bit plabel sequence. */
7893 else if (TARGET_64BIT && !local_call)
7894 length += sibcall ? 28 : 24;
7896 /* non-pic long absolute branch sequence. */
7897 else if ((TARGET_LONG_ABS_CALL || local_call) && !flag_pic)
7898 length += 12;
7900 /* long pc-relative branch sequence. */
7901 else if (TARGET_LONG_PIC_SDIFF_CALL
7902 || (TARGET_GAS && !TARGET_SOM && local_call))
7904 length += 20;
7906 if (!TARGET_PA_20 && !TARGET_NO_SPACE_REGS && (!local_call || flag_pic))
7907 length += 8;
7910 /* 32-bit plabel sequence. */
7911 else
7913 length += 32;
7915 if (TARGET_SOM)
7916 length += length_fp_args (insn);
7918 if (flag_pic)
7919 length += 4;
7921 if (!TARGET_PA_20)
7923 if (!sibcall)
7924 length += 8;
7926 if (!TARGET_NO_SPACE_REGS && (!local_call || flag_pic))
7927 length += 8;
7931 return length;
7934 /* INSN is a function call.
7936 CALL_DEST is the routine we are calling. */
7938 const char *
7939 pa_output_call (rtx_insn *insn, rtx call_dest, int sibcall)
7941 int seq_length = dbr_sequence_length ();
7942 tree call_decl = SYMBOL_REF_DECL (call_dest);
7943 int local_call = call_decl && targetm.binds_local_p (call_decl);
7944 rtx xoperands[4];
7946 xoperands[0] = call_dest;
7948 /* Handle the common case where we're sure that the branch will reach
7949 the beginning of the "$CODE$" subspace. This is the beginning of
7950 the current function if we are in a named section. */
7951 if (!TARGET_LONG_CALLS && pa_attr_length_call (insn, sibcall) == 8)
7953 xoperands[1] = gen_rtx_REG (word_mode, sibcall ? 0 : 2);
7954 output_asm_insn ("{bl|b,l} %0,%1", xoperands);
7956 else
7958 if (TARGET_64BIT && !local_call)
7960 /* ??? As far as I can tell, the HP linker doesn't support the
7961 long pc-relative sequence described in the 64-bit runtime
7962 architecture. So, we use a slightly longer indirect call. */
7963 xoperands[0] = pa_get_deferred_plabel (call_dest);
7964 xoperands[1] = gen_label_rtx ();
7966 /* If this isn't a sibcall, we put the load of %r27 into the
7967 delay slot. We can't do this in a sibcall as we don't
7968 have a second call-clobbered scratch register available.
7969 We don't need to do anything when generating fast indirect
7970 calls. */
7971 if (seq_length != 0 && !sibcall)
7973 final_scan_insn (NEXT_INSN (insn), asm_out_file,
7974 optimize, 0, NULL);
7976 /* Now delete the delay insn. */
7977 SET_INSN_DELETED (NEXT_INSN (insn));
7978 seq_length = 0;
7981 output_asm_insn ("addil LT'%0,%%r27", xoperands);
7982 output_asm_insn ("ldd RT'%0(%%r1),%%r1", xoperands);
7983 output_asm_insn ("ldd 0(%%r1),%%r1", xoperands);
7985 if (sibcall)
7987 output_asm_insn ("ldd 24(%%r1),%%r27", xoperands);
7988 output_asm_insn ("ldd 16(%%r1),%%r1", xoperands);
7989 output_asm_insn ("bve (%%r1)", xoperands);
7991 else
7993 output_asm_insn ("ldd 16(%%r1),%%r2", xoperands);
7994 output_asm_insn ("bve,l (%%r2),%%r2", xoperands);
7995 output_asm_insn ("ldd 24(%%r1),%%r27", xoperands);
7996 seq_length = 1;
7999 else
8001 int indirect_call = 0;
8003 /* Emit a long call. There are several different sequences
8004 of increasing length and complexity. In most cases,
8005 they don't allow an instruction in the delay slot. */
8006 if (!((TARGET_LONG_ABS_CALL || local_call) && !flag_pic)
8007 && !TARGET_LONG_PIC_SDIFF_CALL
8008 && !(TARGET_GAS && !TARGET_SOM && local_call)
8009 && !TARGET_64BIT)
8010 indirect_call = 1;
8012 if (seq_length != 0
8013 && !sibcall
8014 && (!TARGET_PA_20
8015 || indirect_call
8016 || ((TARGET_LONG_ABS_CALL || local_call) && !flag_pic)))
8018 /* A non-jump insn in the delay slot. By definition we can
8019 emit this insn before the call (and in fact before argument
8020 relocating. */
8021 final_scan_insn (NEXT_INSN (insn), asm_out_file, optimize, 0,
8022 NULL);
8024 /* Now delete the delay insn. */
8025 SET_INSN_DELETED (NEXT_INSN (insn));
8026 seq_length = 0;
8029 if ((TARGET_LONG_ABS_CALL || local_call) && !flag_pic)
8031 /* This is the best sequence for making long calls in
8032 non-pic code. Unfortunately, GNU ld doesn't provide
8033 the stub needed for external calls, and GAS's support
8034 for this with the SOM linker is buggy. It is safe
8035 to use this for local calls. */
8036 output_asm_insn ("ldil L'%0,%%r1", xoperands);
8037 if (sibcall)
8038 output_asm_insn ("be R'%0(%%sr4,%%r1)", xoperands);
8039 else
8041 if (TARGET_PA_20)
8042 output_asm_insn ("be,l R'%0(%%sr4,%%r1),%%sr0,%%r31",
8043 xoperands);
8044 else
8045 output_asm_insn ("ble R'%0(%%sr4,%%r1)", xoperands);
8047 output_asm_insn ("copy %%r31,%%r2", xoperands);
8048 seq_length = 1;
8051 else
8053 /* The HP assembler and linker can handle relocations for
8054 the difference of two symbols. The HP assembler
8055 recognizes the sequence as a pc-relative call and
8056 the linker provides stubs when needed. */
8058 /* GAS currently can't generate the relocations that
8059 are needed for the SOM linker under HP-UX using this
8060 sequence. The GNU linker doesn't generate the stubs
8061 that are needed for external calls on TARGET_ELF32
8062 with this sequence. For now, we have to use a longer
8063 plabel sequence when using GAS for non local calls. */
8064 if (TARGET_LONG_PIC_SDIFF_CALL
8065 || (TARGET_GAS && !TARGET_SOM && local_call))
8067 xoperands[1] = gen_rtx_REG (Pmode, 1);
8068 xoperands[2] = xoperands[1];
8069 pa_output_pic_pcrel_sequence (xoperands);
8071 else
8073 /* Emit a long plabel-based call sequence. This is
8074 essentially an inline implementation of $$dyncall.
8075 We don't actually try to call $$dyncall as this is
8076 as difficult as calling the function itself. */
8077 xoperands[0] = pa_get_deferred_plabel (call_dest);
8078 xoperands[1] = gen_label_rtx ();
8080 /* Since the call is indirect, FP arguments in registers
8081 need to be copied to the general registers. Then, the
8082 argument relocation stub will copy them back. */
8083 if (TARGET_SOM)
8084 copy_fp_args (insn);
8086 if (flag_pic)
8088 output_asm_insn ("addil LT'%0,%%r19", xoperands);
8089 output_asm_insn ("ldw RT'%0(%%r1),%%r1", xoperands);
8090 output_asm_insn ("ldw 0(%%r1),%%r1", xoperands);
8092 else
8094 output_asm_insn ("addil LR'%0-$global$,%%r27",
8095 xoperands);
8096 output_asm_insn ("ldw RR'%0-$global$(%%r1),%%r1",
8097 xoperands);
8100 output_asm_insn ("bb,>=,n %%r1,30,.+16", xoperands);
8101 output_asm_insn ("depi 0,31,2,%%r1", xoperands);
8102 output_asm_insn ("ldw 4(%%sr0,%%r1),%%r19", xoperands);
8103 output_asm_insn ("ldw 0(%%sr0,%%r1),%%r1", xoperands);
8105 if (!sibcall && !TARGET_PA_20)
8107 output_asm_insn ("{bl|b,l} .+8,%%r2", xoperands);
8108 if (TARGET_NO_SPACE_REGS || (local_call && !flag_pic))
8109 output_asm_insn ("addi 8,%%r2,%%r2", xoperands);
8110 else
8111 output_asm_insn ("addi 16,%%r2,%%r2", xoperands);
8115 if (TARGET_PA_20)
8117 if (sibcall)
8118 output_asm_insn ("bve (%%r1)", xoperands);
8119 else
8121 if (indirect_call)
8123 output_asm_insn ("bve,l (%%r1),%%r2", xoperands);
8124 output_asm_insn ("stw %%r2,-24(%%sp)", xoperands);
8125 seq_length = 1;
8127 else
8128 output_asm_insn ("bve,l (%%r1),%%r2", xoperands);
8131 else
8133 if (!TARGET_NO_SPACE_REGS && (!local_call || flag_pic))
8134 output_asm_insn ("ldsid (%%r1),%%r31\n\tmtsp %%r31,%%sr0",
8135 xoperands);
8137 if (sibcall)
8139 if (TARGET_NO_SPACE_REGS || (local_call && !flag_pic))
8140 output_asm_insn ("be 0(%%sr4,%%r1)", xoperands);
8141 else
8142 output_asm_insn ("be 0(%%sr0,%%r1)", xoperands);
8144 else
8146 if (TARGET_NO_SPACE_REGS || (local_call && !flag_pic))
8147 output_asm_insn ("ble 0(%%sr4,%%r1)", xoperands);
8148 else
8149 output_asm_insn ("ble 0(%%sr0,%%r1)", xoperands);
8151 if (indirect_call)
8152 output_asm_insn ("stw %%r31,-24(%%sp)", xoperands);
8153 else
8154 output_asm_insn ("copy %%r31,%%r2", xoperands);
8155 seq_length = 1;
8162 if (seq_length == 0)
8163 output_asm_insn ("nop", xoperands);
8165 return "";
8168 /* Return the attribute length of the indirect call instruction INSN.
8169 The length must match the code generated by output_indirect call.
8170 The returned length includes the delay slot. Currently, the delay
8171 slot of an indirect call sequence is not exposed and it is used by
8172 the sequence itself. */
8175 pa_attr_length_indirect_call (rtx_insn *insn)
8177 unsigned long distance = -1;
8178 unsigned long total = IN_NAMED_SECTION_P (cfun->decl) ? 0 : total_code_bytes;
8180 if (INSN_ADDRESSES_SET_P ())
8182 distance = (total + insn_current_reference_address (insn));
8183 if (distance < total)
8184 distance = -1;
8187 if (TARGET_64BIT)
8188 return 12;
8190 if (TARGET_FAST_INDIRECT_CALLS)
8191 return 8;
8193 if (TARGET_PORTABLE_RUNTIME)
8194 return 16;
8196 /* Inline version of $$dyncall. */
8197 if ((TARGET_NO_SPACE_REGS || TARGET_PA_20) && !optimize_size)
8198 return 20;
8200 if (!TARGET_LONG_CALLS
8201 && ((TARGET_PA_20 && !TARGET_SOM && distance < 7600000)
8202 || distance < MAX_PCREL17F_OFFSET))
8203 return 8;
8205 /* Out of reach, can use ble. */
8206 if (!flag_pic)
8207 return 12;
8209 /* Inline version of $$dyncall. */
8210 if (TARGET_NO_SPACE_REGS || TARGET_PA_20)
8211 return 20;
8213 if (!optimize_size)
8214 return 36;
8216 /* Long PIC pc-relative call. */
8217 return 20;
8220 const char *
8221 pa_output_indirect_call (rtx_insn *insn, rtx call_dest)
8223 rtx xoperands[4];
8224 int length;
8226 if (TARGET_64BIT)
8228 xoperands[0] = call_dest;
8229 output_asm_insn ("ldd 16(%0),%%r2\n\t"
8230 "bve,l (%%r2),%%r2\n\t"
8231 "ldd 24(%0),%%r27", xoperands);
8232 return "";
8235 /* First the special case for kernels, level 0 systems, etc. */
8236 if (TARGET_FAST_INDIRECT_CALLS)
8238 pa_output_arg_descriptor (insn);
8239 if (TARGET_PA_20)
8240 return "bve,l,n (%%r22),%%r2\n\tnop";
8241 return "ble 0(%%sr4,%%r22)\n\tcopy %%r31,%%r2";
8244 if (TARGET_PORTABLE_RUNTIME)
8246 output_asm_insn ("ldil L'$$dyncall,%%r31\n\t"
8247 "ldo R'$$dyncall(%%r31),%%r31", xoperands);
8248 pa_output_arg_descriptor (insn);
8249 return "blr %%r0,%%r2\n\tbv,n %%r0(%%r31)";
8252 /* Maybe emit a fast inline version of $$dyncall. */
8253 if ((TARGET_NO_SPACE_REGS || TARGET_PA_20) && !optimize_size)
8255 output_asm_insn ("bb,>=,n %%r22,30,.+12\n\t"
8256 "ldw 2(%%r22),%%r19\n\t"
8257 "ldw -2(%%r22),%%r22", xoperands);
8258 pa_output_arg_descriptor (insn);
8259 if (TARGET_NO_SPACE_REGS)
8261 if (TARGET_PA_20)
8262 return "bve,l,n (%%r22),%%r2\n\tnop";
8263 return "ble 0(%%sr4,%%r22)\n\tcopy %%r31,%%r2";
8265 return "bve,l (%%r22),%%r2\n\tstw %%r2,-24(%%sp)";
8268 /* Now the normal case -- we can reach $$dyncall directly or
8269 we're sure that we can get there via a long-branch stub.
8271 No need to check target flags as the length uniquely identifies
8272 the remaining cases. */
8273 length = pa_attr_length_indirect_call (insn);
8274 if (length == 8)
8276 pa_output_arg_descriptor (insn);
8278 /* The HP linker sometimes substitutes a BLE for BL/B,L calls to
8279 $$dyncall. Since BLE uses %r31 as the link register, the 22-bit
8280 variant of the B,L instruction can't be used on the SOM target. */
8281 if (TARGET_PA_20 && !TARGET_SOM)
8282 return "b,l,n $$dyncall,%%r2\n\tnop";
8283 else
8284 return "bl $$dyncall,%%r31\n\tcopy %%r31,%%r2";
8287 /* Long millicode call, but we are not generating PIC or portable runtime
8288 code. */
8289 if (length == 12)
8291 output_asm_insn ("ldil L'$$dyncall,%%r2", xoperands);
8292 pa_output_arg_descriptor (insn);
8293 return "ble R'$$dyncall(%%sr4,%%r2)\n\tcopy %%r31,%%r2";
8296 /* Maybe emit a fast inline version of $$dyncall. The long PIC
8297 pc-relative call sequence is five instructions. The inline PA 2.0
8298 version of $$dyncall is also five instructions. The PA 1.X versions
8299 are longer but still an overall win. */
8300 if (TARGET_NO_SPACE_REGS || TARGET_PA_20 || !optimize_size)
8302 output_asm_insn ("bb,>=,n %%r22,30,.+12\n\t"
8303 "ldw 2(%%r22),%%r19\n\t"
8304 "ldw -2(%%r22),%%r22", xoperands);
8305 if (TARGET_NO_SPACE_REGS)
8307 pa_output_arg_descriptor (insn);
8308 if (TARGET_PA_20)
8309 return "bve,l,n (%%r22),%%r2\n\tnop";
8310 return "ble 0(%%sr4,%%r22)\n\tcopy %%r31,%%r2";
8312 if (TARGET_PA_20)
8314 pa_output_arg_descriptor (insn);
8315 return "bve,l (%%r22),%%r2\n\tstw %%r2,-24(%%sp)";
8317 output_asm_insn ("bl .+8,%%r2\n\t"
8318 "ldo 16(%%r2),%%r2\n\t"
8319 "ldsid (%%r22),%%r1\n\t"
8320 "mtsp %%r1,%%sr0", xoperands);
8321 pa_output_arg_descriptor (insn);
8322 return "be 0(%%sr0,%%r22)\n\tstw %%r2,-24(%%sp)";
8325 /* We need a long PIC call to $$dyncall. */
8326 xoperands[0] = gen_rtx_SYMBOL_REF (Pmode, "$$dyncall");
8327 xoperands[1] = gen_rtx_REG (Pmode, 2);
8328 xoperands[2] = gen_rtx_REG (Pmode, 1);
8329 pa_output_pic_pcrel_sequence (xoperands);
8330 pa_output_arg_descriptor (insn);
8331 return "bv %%r0(%%r1)\n\tldo {12|20}(%%r2),%%r2";
8334 /* In HPUX 8.0's shared library scheme, special relocations are needed
8335 for function labels if they might be passed to a function
8336 in a shared library (because shared libraries don't live in code
8337 space), and special magic is needed to construct their address. */
8339 void
8340 pa_encode_label (rtx sym)
8342 const char *str = XSTR (sym, 0);
8343 int len = strlen (str) + 1;
8344 char *newstr, *p;
8346 p = newstr = XALLOCAVEC (char, len + 1);
8347 *p++ = '@';
8348 strcpy (p, str);
8350 XSTR (sym, 0) = ggc_alloc_string (newstr, len);
8353 static void
8354 pa_encode_section_info (tree decl, rtx rtl, int first)
8356 int old_referenced = 0;
8358 if (!first && MEM_P (rtl) && GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF)
8359 old_referenced
8360 = SYMBOL_REF_FLAGS (XEXP (rtl, 0)) & SYMBOL_FLAG_REFERENCED;
8362 default_encode_section_info (decl, rtl, first);
8364 if (first && TEXT_SPACE_P (decl))
8366 SYMBOL_REF_FLAG (XEXP (rtl, 0)) = 1;
8367 if (TREE_CODE (decl) == FUNCTION_DECL)
8368 pa_encode_label (XEXP (rtl, 0));
8370 else if (old_referenced)
8371 SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= old_referenced;
8374 /* This is sort of inverse to pa_encode_section_info. */
8376 static const char *
8377 pa_strip_name_encoding (const char *str)
8379 str += (*str == '@');
8380 str += (*str == '*');
8381 return str;
8384 /* Returns 1 if OP is a function label involved in a simple addition
8385 with a constant. Used to keep certain patterns from matching
8386 during instruction combination. */
8388 pa_is_function_label_plus_const (rtx op)
8390 /* Strip off any CONST. */
8391 if (GET_CODE (op) == CONST)
8392 op = XEXP (op, 0);
8394 return (GET_CODE (op) == PLUS
8395 && function_label_operand (XEXP (op, 0), VOIDmode)
8396 && GET_CODE (XEXP (op, 1)) == CONST_INT);
8399 /* Output assembly code for a thunk to FUNCTION. */
8401 static void
8402 pa_asm_output_mi_thunk (FILE *file, tree thunk_fndecl, HOST_WIDE_INT delta,
8403 HOST_WIDE_INT vcall_offset ATTRIBUTE_UNUSED,
8404 tree function)
8406 static unsigned int current_thunk_number;
8407 int val_14 = VAL_14_BITS_P (delta);
8408 unsigned int old_last_address = last_address, nbytes = 0;
8409 char label[17];
8410 rtx xoperands[4];
8412 xoperands[0] = XEXP (DECL_RTL (function), 0);
8413 xoperands[1] = XEXP (DECL_RTL (thunk_fndecl), 0);
8414 xoperands[2] = GEN_INT (delta);
8416 final_start_function (emit_barrier (), file, 1);
8418 /* Output the thunk. We know that the function is in the same
8419 translation unit (i.e., the same space) as the thunk, and that
8420 thunks are output after their method. Thus, we don't need an
8421 external branch to reach the function. With SOM and GAS,
8422 functions and thunks are effectively in different sections.
8423 Thus, we can always use a IA-relative branch and the linker
8424 will add a long branch stub if necessary.
8426 However, we have to be careful when generating PIC code on the
8427 SOM port to ensure that the sequence does not transfer to an
8428 import stub for the target function as this could clobber the
8429 return value saved at SP-24. This would also apply to the
8430 32-bit linux port if the multi-space model is implemented. */
8431 if ((!TARGET_LONG_CALLS && TARGET_SOM && !TARGET_PORTABLE_RUNTIME
8432 && !(flag_pic && TREE_PUBLIC (function))
8433 && (TARGET_GAS || last_address < 262132))
8434 || (!TARGET_LONG_CALLS && !TARGET_SOM && !TARGET_PORTABLE_RUNTIME
8435 && ((targetm_common.have_named_sections
8436 && DECL_SECTION_NAME (thunk_fndecl) != NULL
8437 /* The GNU 64-bit linker has rather poor stub management.
8438 So, we use a long branch from thunks that aren't in
8439 the same section as the target function. */
8440 && ((!TARGET_64BIT
8441 && (DECL_SECTION_NAME (thunk_fndecl)
8442 != DECL_SECTION_NAME (function)))
8443 || ((DECL_SECTION_NAME (thunk_fndecl)
8444 == DECL_SECTION_NAME (function))
8445 && last_address < 262132)))
8446 /* In this case, we need to be able to reach the start of
8447 the stub table even though the function is likely closer
8448 and can be jumped to directly. */
8449 || (targetm_common.have_named_sections
8450 && DECL_SECTION_NAME (thunk_fndecl) == NULL
8451 && DECL_SECTION_NAME (function) == NULL
8452 && total_code_bytes < MAX_PCREL17F_OFFSET)
8453 /* Likewise. */
8454 || (!targetm_common.have_named_sections
8455 && total_code_bytes < MAX_PCREL17F_OFFSET))))
8457 if (!val_14)
8458 output_asm_insn ("addil L'%2,%%r26", xoperands);
8460 output_asm_insn ("b %0", xoperands);
8462 if (val_14)
8464 output_asm_insn ("ldo %2(%%r26),%%r26", xoperands);
8465 nbytes += 8;
8467 else
8469 output_asm_insn ("ldo R'%2(%%r1),%%r26", xoperands);
8470 nbytes += 12;
8473 else if (TARGET_64BIT)
8475 rtx xop[4];
8477 /* We only have one call-clobbered scratch register, so we can't
8478 make use of the delay slot if delta doesn't fit in 14 bits. */
8479 if (!val_14)
8481 output_asm_insn ("addil L'%2,%%r26", xoperands);
8482 output_asm_insn ("ldo R'%2(%%r1),%%r26", xoperands);
8485 /* Load function address into %r1. */
8486 xop[0] = xoperands[0];
8487 xop[1] = gen_rtx_REG (Pmode, 1);
8488 xop[2] = xop[1];
8489 pa_output_pic_pcrel_sequence (xop);
8491 if (val_14)
8493 output_asm_insn ("bv %%r0(%%r1)", xoperands);
8494 output_asm_insn ("ldo %2(%%r26),%%r26", xoperands);
8495 nbytes += 20;
8497 else
8499 output_asm_insn ("bv,n %%r0(%%r1)", xoperands);
8500 nbytes += 24;
8503 else if (TARGET_PORTABLE_RUNTIME)
8505 output_asm_insn ("ldil L'%0,%%r1", xoperands);
8506 output_asm_insn ("ldo R'%0(%%r1),%%r22", xoperands);
8508 if (!val_14)
8509 output_asm_insn ("ldil L'%2,%%r26", xoperands);
8511 output_asm_insn ("bv %%r0(%%r22)", xoperands);
8513 if (val_14)
8515 output_asm_insn ("ldo %2(%%r26),%%r26", xoperands);
8516 nbytes += 16;
8518 else
8520 output_asm_insn ("ldo R'%2(%%r26),%%r26", xoperands);
8521 nbytes += 20;
8524 else if (TARGET_SOM && flag_pic && TREE_PUBLIC (function))
8526 /* The function is accessible from outside this module. The only
8527 way to avoid an import stub between the thunk and function is to
8528 call the function directly with an indirect sequence similar to
8529 that used by $$dyncall. This is possible because $$dyncall acts
8530 as the import stub in an indirect call. */
8531 ASM_GENERATE_INTERNAL_LABEL (label, "LTHN", current_thunk_number);
8532 xoperands[3] = gen_rtx_SYMBOL_REF (Pmode, label);
8533 output_asm_insn ("addil LT'%3,%%r19", xoperands);
8534 output_asm_insn ("ldw RT'%3(%%r1),%%r22", xoperands);
8535 output_asm_insn ("ldw 0(%%sr0,%%r22),%%r22", xoperands);
8536 output_asm_insn ("bb,>=,n %%r22,30,.+16", xoperands);
8537 output_asm_insn ("depi 0,31,2,%%r22", xoperands);
8538 output_asm_insn ("ldw 4(%%sr0,%%r22),%%r19", xoperands);
8539 output_asm_insn ("ldw 0(%%sr0,%%r22),%%r22", xoperands);
8541 if (!val_14)
8543 output_asm_insn ("addil L'%2,%%r26", xoperands);
8544 nbytes += 4;
8547 if (TARGET_PA_20)
8549 output_asm_insn ("bve (%%r22)", xoperands);
8550 nbytes += 36;
8552 else if (TARGET_NO_SPACE_REGS)
8554 output_asm_insn ("be 0(%%sr4,%%r22)", xoperands);
8555 nbytes += 36;
8557 else
8559 output_asm_insn ("ldsid (%%sr0,%%r22),%%r21", xoperands);
8560 output_asm_insn ("mtsp %%r21,%%sr0", xoperands);
8561 output_asm_insn ("be 0(%%sr0,%%r22)", xoperands);
8562 nbytes += 44;
8565 if (val_14)
8566 output_asm_insn ("ldo %2(%%r26),%%r26", xoperands);
8567 else
8568 output_asm_insn ("ldo R'%2(%%r1),%%r26", xoperands);
8570 else if (flag_pic)
8572 rtx xop[4];
8574 /* Load function address into %r22. */
8575 xop[0] = xoperands[0];
8576 xop[1] = gen_rtx_REG (Pmode, 1);
8577 xop[2] = gen_rtx_REG (Pmode, 22);
8578 pa_output_pic_pcrel_sequence (xop);
8580 if (!val_14)
8581 output_asm_insn ("addil L'%2,%%r26", xoperands);
8583 output_asm_insn ("bv %%r0(%%r22)", xoperands);
8585 if (val_14)
8587 output_asm_insn ("ldo %2(%%r26),%%r26", xoperands);
8588 nbytes += 20;
8590 else
8592 output_asm_insn ("ldo R'%2(%%r1),%%r26", xoperands);
8593 nbytes += 24;
8596 else
8598 if (!val_14)
8599 output_asm_insn ("addil L'%2,%%r26", xoperands);
8601 output_asm_insn ("ldil L'%0,%%r22", xoperands);
8602 output_asm_insn ("be R'%0(%%sr4,%%r22)", xoperands);
8604 if (val_14)
8606 output_asm_insn ("ldo %2(%%r26),%%r26", xoperands);
8607 nbytes += 12;
8609 else
8611 output_asm_insn ("ldo R'%2(%%r1),%%r26", xoperands);
8612 nbytes += 16;
8616 final_end_function ();
8618 if (TARGET_SOM && flag_pic && TREE_PUBLIC (function))
8620 switch_to_section (data_section);
8621 output_asm_insn (".align 4", xoperands);
8622 ASM_OUTPUT_LABEL (file, label);
8623 output_asm_insn (".word P'%0", xoperands);
8626 current_thunk_number++;
8627 nbytes = ((nbytes + FUNCTION_BOUNDARY / BITS_PER_UNIT - 1)
8628 & ~(FUNCTION_BOUNDARY / BITS_PER_UNIT - 1));
8629 last_address += nbytes;
8630 if (old_last_address > last_address)
8631 last_address = UINT_MAX;
8632 update_total_code_bytes (nbytes);
8635 /* Only direct calls to static functions are allowed to be sibling (tail)
8636 call optimized.
8638 This restriction is necessary because some linker generated stubs will
8639 store return pointers into rp' in some cases which might clobber a
8640 live value already in rp'.
8642 In a sibcall the current function and the target function share stack
8643 space. Thus if the path to the current function and the path to the
8644 target function save a value in rp', they save the value into the
8645 same stack slot, which has undesirable consequences.
8647 Because of the deferred binding nature of shared libraries any function
8648 with external scope could be in a different load module and thus require
8649 rp' to be saved when calling that function. So sibcall optimizations
8650 can only be safe for static function.
8652 Note that GCC never needs return value relocations, so we don't have to
8653 worry about static calls with return value relocations (which require
8654 saving rp').
8656 It is safe to perform a sibcall optimization when the target function
8657 will never return. */
8658 static bool
8659 pa_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED)
8661 /* Sibcalls are not ok because the arg pointer register is not a fixed
8662 register. This prevents the sibcall optimization from occurring. In
8663 addition, there are problems with stub placement using GNU ld. This
8664 is because a normal sibcall branch uses a 17-bit relocation while
8665 a regular call branch uses a 22-bit relocation. As a result, more
8666 care needs to be taken in the placement of long-branch stubs. */
8667 if (TARGET_64BIT)
8668 return false;
8670 if (TARGET_PORTABLE_RUNTIME)
8671 return false;
8673 /* Sibcalls are only ok within a translation unit. */
8674 return decl && targetm.binds_local_p (decl);
8677 /* ??? Addition is not commutative on the PA due to the weird implicit
8678 space register selection rules for memory addresses. Therefore, we
8679 don't consider a + b == b + a, as this might be inside a MEM. */
8680 static bool
8681 pa_commutative_p (const_rtx x, int outer_code)
8683 return (COMMUTATIVE_P (x)
8684 && (TARGET_NO_SPACE_REGS
8685 || (outer_code != UNKNOWN && outer_code != MEM)
8686 || GET_CODE (x) != PLUS));
8689 /* Returns 1 if the 6 operands specified in OPERANDS are suitable for
8690 use in fmpyadd instructions. */
8692 pa_fmpyaddoperands (rtx *operands)
8694 machine_mode mode = GET_MODE (operands[0]);
8696 /* Must be a floating point mode. */
8697 if (mode != SFmode && mode != DFmode)
8698 return 0;
8700 /* All modes must be the same. */
8701 if (! (mode == GET_MODE (operands[1])
8702 && mode == GET_MODE (operands[2])
8703 && mode == GET_MODE (operands[3])
8704 && mode == GET_MODE (operands[4])
8705 && mode == GET_MODE (operands[5])))
8706 return 0;
8708 /* All operands must be registers. */
8709 if (! (GET_CODE (operands[1]) == REG
8710 && GET_CODE (operands[2]) == REG
8711 && GET_CODE (operands[3]) == REG
8712 && GET_CODE (operands[4]) == REG
8713 && GET_CODE (operands[5]) == REG))
8714 return 0;
8716 /* Only 2 real operands to the addition. One of the input operands must
8717 be the same as the output operand. */
8718 if (! rtx_equal_p (operands[3], operands[4])
8719 && ! rtx_equal_p (operands[3], operands[5]))
8720 return 0;
8722 /* Inout operand of add cannot conflict with any operands from multiply. */
8723 if (rtx_equal_p (operands[3], operands[0])
8724 || rtx_equal_p (operands[3], operands[1])
8725 || rtx_equal_p (operands[3], operands[2]))
8726 return 0;
8728 /* multiply cannot feed into addition operands. */
8729 if (rtx_equal_p (operands[4], operands[0])
8730 || rtx_equal_p (operands[5], operands[0]))
8731 return 0;
8733 /* SFmode limits the registers to the upper 32 of the 32bit FP regs. */
8734 if (mode == SFmode
8735 && (REGNO_REG_CLASS (REGNO (operands[0])) != FPUPPER_REGS
8736 || REGNO_REG_CLASS (REGNO (operands[1])) != FPUPPER_REGS
8737 || REGNO_REG_CLASS (REGNO (operands[2])) != FPUPPER_REGS
8738 || REGNO_REG_CLASS (REGNO (operands[3])) != FPUPPER_REGS
8739 || REGNO_REG_CLASS (REGNO (operands[4])) != FPUPPER_REGS
8740 || REGNO_REG_CLASS (REGNO (operands[5])) != FPUPPER_REGS))
8741 return 0;
8743 /* Passed. Operands are suitable for fmpyadd. */
8744 return 1;
8747 #if !defined(USE_COLLECT2)
8748 static void
8749 pa_asm_out_constructor (rtx symbol, int priority)
8751 if (!function_label_operand (symbol, VOIDmode))
8752 pa_encode_label (symbol);
8754 #ifdef CTORS_SECTION_ASM_OP
8755 default_ctor_section_asm_out_constructor (symbol, priority);
8756 #else
8757 # ifdef TARGET_ASM_NAMED_SECTION
8758 default_named_section_asm_out_constructor (symbol, priority);
8759 # else
8760 default_stabs_asm_out_constructor (symbol, priority);
8761 # endif
8762 #endif
8765 static void
8766 pa_asm_out_destructor (rtx symbol, int priority)
8768 if (!function_label_operand (symbol, VOIDmode))
8769 pa_encode_label (symbol);
8771 #ifdef DTORS_SECTION_ASM_OP
8772 default_dtor_section_asm_out_destructor (symbol, priority);
8773 #else
8774 # ifdef TARGET_ASM_NAMED_SECTION
8775 default_named_section_asm_out_destructor (symbol, priority);
8776 # else
8777 default_stabs_asm_out_destructor (symbol, priority);
8778 # endif
8779 #endif
8781 #endif
8783 /* This function places uninitialized global data in the bss section.
8784 The ASM_OUTPUT_ALIGNED_BSS macro needs to be defined to call this
8785 function on the SOM port to prevent uninitialized global data from
8786 being placed in the data section. */
8788 void
8789 pa_asm_output_aligned_bss (FILE *stream,
8790 const char *name,
8791 unsigned HOST_WIDE_INT size,
8792 unsigned int align)
8794 switch_to_section (bss_section);
8795 fprintf (stream, "\t.align %u\n", align / BITS_PER_UNIT);
8797 #ifdef ASM_OUTPUT_TYPE_DIRECTIVE
8798 ASM_OUTPUT_TYPE_DIRECTIVE (stream, name, "object");
8799 #endif
8801 #ifdef ASM_OUTPUT_SIZE_DIRECTIVE
8802 ASM_OUTPUT_SIZE_DIRECTIVE (stream, name, size);
8803 #endif
8805 fprintf (stream, "\t.align %u\n", align / BITS_PER_UNIT);
8806 ASM_OUTPUT_LABEL (stream, name);
8807 fprintf (stream, "\t.block " HOST_WIDE_INT_PRINT_UNSIGNED"\n", size);
8810 /* Both the HP and GNU assemblers under HP-UX provide a .comm directive
8811 that doesn't allow the alignment of global common storage to be directly
8812 specified. The SOM linker aligns common storage based on the rounded
8813 value of the NUM_BYTES parameter in the .comm directive. It's not
8814 possible to use the .align directive as it doesn't affect the alignment
8815 of the label associated with a .comm directive. */
8817 void
8818 pa_asm_output_aligned_common (FILE *stream,
8819 const char *name,
8820 unsigned HOST_WIDE_INT size,
8821 unsigned int align)
8823 unsigned int max_common_align;
8825 max_common_align = TARGET_64BIT ? 128 : (size >= 4096 ? 256 : 64);
8826 if (align > max_common_align)
8828 warning (0, "alignment (%u) for %s exceeds maximum alignment "
8829 "for global common data. Using %u",
8830 align / BITS_PER_UNIT, name, max_common_align / BITS_PER_UNIT);
8831 align = max_common_align;
8834 switch_to_section (bss_section);
8836 assemble_name (stream, name);
8837 fprintf (stream, "\t.comm " HOST_WIDE_INT_PRINT_UNSIGNED"\n",
8838 MAX (size, align / BITS_PER_UNIT));
8841 /* We can't use .comm for local common storage as the SOM linker effectively
8842 treats the symbol as universal and uses the same storage for local symbols
8843 with the same name in different object files. The .block directive
8844 reserves an uninitialized block of storage. However, it's not common
8845 storage. Fortunately, GCC never requests common storage with the same
8846 name in any given translation unit. */
8848 void
8849 pa_asm_output_aligned_local (FILE *stream,
8850 const char *name,
8851 unsigned HOST_WIDE_INT size,
8852 unsigned int align)
8854 switch_to_section (bss_section);
8855 fprintf (stream, "\t.align %u\n", align / BITS_PER_UNIT);
8857 #ifdef LOCAL_ASM_OP
8858 fprintf (stream, "%s", LOCAL_ASM_OP);
8859 assemble_name (stream, name);
8860 fprintf (stream, "\n");
8861 #endif
8863 ASM_OUTPUT_LABEL (stream, name);
8864 fprintf (stream, "\t.block " HOST_WIDE_INT_PRINT_UNSIGNED"\n", size);
8867 /* Returns 1 if the 6 operands specified in OPERANDS are suitable for
8868 use in fmpysub instructions. */
8870 pa_fmpysuboperands (rtx *operands)
8872 machine_mode mode = GET_MODE (operands[0]);
8874 /* Must be a floating point mode. */
8875 if (mode != SFmode && mode != DFmode)
8876 return 0;
8878 /* All modes must be the same. */
8879 if (! (mode == GET_MODE (operands[1])
8880 && mode == GET_MODE (operands[2])
8881 && mode == GET_MODE (operands[3])
8882 && mode == GET_MODE (operands[4])
8883 && mode == GET_MODE (operands[5])))
8884 return 0;
8886 /* All operands must be registers. */
8887 if (! (GET_CODE (operands[1]) == REG
8888 && GET_CODE (operands[2]) == REG
8889 && GET_CODE (operands[3]) == REG
8890 && GET_CODE (operands[4]) == REG
8891 && GET_CODE (operands[5]) == REG))
8892 return 0;
8894 /* Only 2 real operands to the subtraction. Subtraction is not a commutative
8895 operation, so operands[4] must be the same as operand[3]. */
8896 if (! rtx_equal_p (operands[3], operands[4]))
8897 return 0;
8899 /* multiply cannot feed into subtraction. */
8900 if (rtx_equal_p (operands[5], operands[0]))
8901 return 0;
8903 /* Inout operand of sub cannot conflict with any operands from multiply. */
8904 if (rtx_equal_p (operands[3], operands[0])
8905 || rtx_equal_p (operands[3], operands[1])
8906 || rtx_equal_p (operands[3], operands[2]))
8907 return 0;
8909 /* SFmode limits the registers to the upper 32 of the 32bit FP regs. */
8910 if (mode == SFmode
8911 && (REGNO_REG_CLASS (REGNO (operands[0])) != FPUPPER_REGS
8912 || REGNO_REG_CLASS (REGNO (operands[1])) != FPUPPER_REGS
8913 || REGNO_REG_CLASS (REGNO (operands[2])) != FPUPPER_REGS
8914 || REGNO_REG_CLASS (REGNO (operands[3])) != FPUPPER_REGS
8915 || REGNO_REG_CLASS (REGNO (operands[4])) != FPUPPER_REGS
8916 || REGNO_REG_CLASS (REGNO (operands[5])) != FPUPPER_REGS))
8917 return 0;
8919 /* Passed. Operands are suitable for fmpysub. */
8920 return 1;
8923 /* Return 1 if the given constant is 2, 4, or 8. These are the valid
8924 constants for a MULT embedded inside a memory address. */
8926 pa_mem_shadd_constant_p (int val)
8928 if (val == 2 || val == 4 || val == 8)
8929 return 1;
8930 else
8931 return 0;
8934 /* Return 1 if the given constant is 1, 2, or 3. These are the valid
8935 constants for shadd instructions. */
8937 pa_shadd_constant_p (int val)
8939 if (val == 1 || val == 2 || val == 3)
8940 return 1;
8941 else
8942 return 0;
8945 /* Return TRUE if INSN branches forward. */
8947 static bool
8948 forward_branch_p (rtx_insn *insn)
8950 rtx lab = JUMP_LABEL (insn);
8952 /* The INSN must have a jump label. */
8953 gcc_assert (lab != NULL_RTX);
8955 if (INSN_ADDRESSES_SET_P ())
8956 return INSN_ADDRESSES (INSN_UID (lab)) > INSN_ADDRESSES (INSN_UID (insn));
8958 while (insn)
8960 if (insn == lab)
8961 return true;
8962 else
8963 insn = NEXT_INSN (insn);
8966 return false;
8969 /* Output an unconditional move and branch insn. */
8971 const char *
8972 pa_output_parallel_movb (rtx *operands, rtx_insn *insn)
8974 int length = get_attr_length (insn);
8976 /* These are the cases in which we win. */
8977 if (length == 4)
8978 return "mov%I1b,tr %1,%0,%2";
8980 /* None of the following cases win, but they don't lose either. */
8981 if (length == 8)
8983 if (dbr_sequence_length () == 0)
8985 /* Nothing in the delay slot, fake it by putting the combined
8986 insn (the copy or add) in the delay slot of a bl. */
8987 if (GET_CODE (operands[1]) == CONST_INT)
8988 return "b %2\n\tldi %1,%0";
8989 else
8990 return "b %2\n\tcopy %1,%0";
8992 else
8994 /* Something in the delay slot, but we've got a long branch. */
8995 if (GET_CODE (operands[1]) == CONST_INT)
8996 return "ldi %1,%0\n\tb %2";
8997 else
8998 return "copy %1,%0\n\tb %2";
9002 if (GET_CODE (operands[1]) == CONST_INT)
9003 output_asm_insn ("ldi %1,%0", operands);
9004 else
9005 output_asm_insn ("copy %1,%0", operands);
9006 return pa_output_lbranch (operands[2], insn, 1);
9009 /* Output an unconditional add and branch insn. */
9011 const char *
9012 pa_output_parallel_addb (rtx *operands, rtx_insn *insn)
9014 int length = get_attr_length (insn);
9016 /* To make life easy we want operand0 to be the shared input/output
9017 operand and operand1 to be the readonly operand. */
9018 if (operands[0] == operands[1])
9019 operands[1] = operands[2];
9021 /* These are the cases in which we win. */
9022 if (length == 4)
9023 return "add%I1b,tr %1,%0,%3";
9025 /* None of the following cases win, but they don't lose either. */
9026 if (length == 8)
9028 if (dbr_sequence_length () == 0)
9029 /* Nothing in the delay slot, fake it by putting the combined
9030 insn (the copy or add) in the delay slot of a bl. */
9031 return "b %3\n\tadd%I1 %1,%0,%0";
9032 else
9033 /* Something in the delay slot, but we've got a long branch. */
9034 return "add%I1 %1,%0,%0\n\tb %3";
9037 output_asm_insn ("add%I1 %1,%0,%0", operands);
9038 return pa_output_lbranch (operands[3], insn, 1);
9041 /* We use this hook to perform a PA specific optimization which is difficult
9042 to do in earlier passes. */
9044 static void
9045 pa_reorg (void)
9047 remove_useless_addtr_insns (1);
9049 if (pa_cpu < PROCESSOR_8000)
9050 pa_combine_instructions ();
9053 /* The PA has a number of odd instructions which can perform multiple
9054 tasks at once. On first generation PA machines (PA1.0 and PA1.1)
9055 it may be profitable to combine two instructions into one instruction
9056 with two outputs. It's not profitable PA2.0 machines because the
9057 two outputs would take two slots in the reorder buffers.
9059 This routine finds instructions which can be combined and combines
9060 them. We only support some of the potential combinations, and we
9061 only try common ways to find suitable instructions.
9063 * addb can add two registers or a register and a small integer
9064 and jump to a nearby (+-8k) location. Normally the jump to the
9065 nearby location is conditional on the result of the add, but by
9066 using the "true" condition we can make the jump unconditional.
9067 Thus addb can perform two independent operations in one insn.
9069 * movb is similar to addb in that it can perform a reg->reg
9070 or small immediate->reg copy and jump to a nearby (+-8k location).
9072 * fmpyadd and fmpysub can perform a FP multiply and either an
9073 FP add or FP sub if the operands of the multiply and add/sub are
9074 independent (there are other minor restrictions). Note both
9075 the fmpy and fadd/fsub can in theory move to better spots according
9076 to data dependencies, but for now we require the fmpy stay at a
9077 fixed location.
9079 * Many of the memory operations can perform pre & post updates
9080 of index registers. GCC's pre/post increment/decrement addressing
9081 is far too simple to take advantage of all the possibilities. This
9082 pass may not be suitable since those insns may not be independent.
9084 * comclr can compare two ints or an int and a register, nullify
9085 the following instruction and zero some other register. This
9086 is more difficult to use as it's harder to find an insn which
9087 will generate a comclr than finding something like an unconditional
9088 branch. (conditional moves & long branches create comclr insns).
9090 * Most arithmetic operations can conditionally skip the next
9091 instruction. They can be viewed as "perform this operation
9092 and conditionally jump to this nearby location" (where nearby
9093 is an insns away). These are difficult to use due to the
9094 branch length restrictions. */
9096 static void
9097 pa_combine_instructions (void)
9099 rtx_insn *anchor;
9101 /* This can get expensive since the basic algorithm is on the
9102 order of O(n^2) (or worse). Only do it for -O2 or higher
9103 levels of optimization. */
9104 if (optimize < 2)
9105 return;
9107 /* Walk down the list of insns looking for "anchor" insns which
9108 may be combined with "floating" insns. As the name implies,
9109 "anchor" instructions don't move, while "floating" insns may
9110 move around. */
9111 rtx par = gen_rtx_PARALLEL (VOIDmode, gen_rtvec (2, NULL_RTX, NULL_RTX));
9112 rtx_insn *new_rtx = make_insn_raw (par);
9114 for (anchor = get_insns (); anchor; anchor = NEXT_INSN (anchor))
9116 enum attr_pa_combine_type anchor_attr;
9117 enum attr_pa_combine_type floater_attr;
9119 /* We only care about INSNs, JUMP_INSNs, and CALL_INSNs.
9120 Also ignore any special USE insns. */
9121 if ((! NONJUMP_INSN_P (anchor) && ! JUMP_P (anchor) && ! CALL_P (anchor))
9122 || GET_CODE (PATTERN (anchor)) == USE
9123 || GET_CODE (PATTERN (anchor)) == CLOBBER)
9124 continue;
9126 anchor_attr = get_attr_pa_combine_type (anchor);
9127 /* See if anchor is an insn suitable for combination. */
9128 if (anchor_attr == PA_COMBINE_TYPE_FMPY
9129 || anchor_attr == PA_COMBINE_TYPE_FADDSUB
9130 || (anchor_attr == PA_COMBINE_TYPE_UNCOND_BRANCH
9131 && ! forward_branch_p (anchor)))
9133 rtx_insn *floater;
9135 for (floater = PREV_INSN (anchor);
9136 floater;
9137 floater = PREV_INSN (floater))
9139 if (NOTE_P (floater)
9140 || (NONJUMP_INSN_P (floater)
9141 && (GET_CODE (PATTERN (floater)) == USE
9142 || GET_CODE (PATTERN (floater)) == CLOBBER)))
9143 continue;
9145 /* Anything except a regular INSN will stop our search. */
9146 if (! NONJUMP_INSN_P (floater))
9148 floater = NULL;
9149 break;
9152 /* See if FLOATER is suitable for combination with the
9153 anchor. */
9154 floater_attr = get_attr_pa_combine_type (floater);
9155 if ((anchor_attr == PA_COMBINE_TYPE_FMPY
9156 && floater_attr == PA_COMBINE_TYPE_FADDSUB)
9157 || (anchor_attr == PA_COMBINE_TYPE_FADDSUB
9158 && floater_attr == PA_COMBINE_TYPE_FMPY))
9160 /* If ANCHOR and FLOATER can be combined, then we're
9161 done with this pass. */
9162 if (pa_can_combine_p (new_rtx, anchor, floater, 0,
9163 SET_DEST (PATTERN (floater)),
9164 XEXP (SET_SRC (PATTERN (floater)), 0),
9165 XEXP (SET_SRC (PATTERN (floater)), 1)))
9166 break;
9169 else if (anchor_attr == PA_COMBINE_TYPE_UNCOND_BRANCH
9170 && floater_attr == PA_COMBINE_TYPE_ADDMOVE)
9172 if (GET_CODE (SET_SRC (PATTERN (floater))) == PLUS)
9174 if (pa_can_combine_p (new_rtx, anchor, floater, 0,
9175 SET_DEST (PATTERN (floater)),
9176 XEXP (SET_SRC (PATTERN (floater)), 0),
9177 XEXP (SET_SRC (PATTERN (floater)), 1)))
9178 break;
9180 else
9182 if (pa_can_combine_p (new_rtx, anchor, floater, 0,
9183 SET_DEST (PATTERN (floater)),
9184 SET_SRC (PATTERN (floater)),
9185 SET_SRC (PATTERN (floater))))
9186 break;
9191 /* If we didn't find anything on the backwards scan try forwards. */
9192 if (!floater
9193 && (anchor_attr == PA_COMBINE_TYPE_FMPY
9194 || anchor_attr == PA_COMBINE_TYPE_FADDSUB))
9196 for (floater = anchor; floater; floater = NEXT_INSN (floater))
9198 if (NOTE_P (floater)
9199 || (NONJUMP_INSN_P (floater)
9200 && (GET_CODE (PATTERN (floater)) == USE
9201 || GET_CODE (PATTERN (floater)) == CLOBBER)))
9203 continue;
9205 /* Anything except a regular INSN will stop our search. */
9206 if (! NONJUMP_INSN_P (floater))
9208 floater = NULL;
9209 break;
9212 /* See if FLOATER is suitable for combination with the
9213 anchor. */
9214 floater_attr = get_attr_pa_combine_type (floater);
9215 if ((anchor_attr == PA_COMBINE_TYPE_FMPY
9216 && floater_attr == PA_COMBINE_TYPE_FADDSUB)
9217 || (anchor_attr == PA_COMBINE_TYPE_FADDSUB
9218 && floater_attr == PA_COMBINE_TYPE_FMPY))
9220 /* If ANCHOR and FLOATER can be combined, then we're
9221 done with this pass. */
9222 if (pa_can_combine_p (new_rtx, anchor, floater, 1,
9223 SET_DEST (PATTERN (floater)),
9224 XEXP (SET_SRC (PATTERN (floater)),
9226 XEXP (SET_SRC (PATTERN (floater)),
9227 1)))
9228 break;
9233 /* FLOATER will be nonzero if we found a suitable floating
9234 insn for combination with ANCHOR. */
9235 if (floater
9236 && (anchor_attr == PA_COMBINE_TYPE_FADDSUB
9237 || anchor_attr == PA_COMBINE_TYPE_FMPY))
9239 /* Emit the new instruction and delete the old anchor. */
9240 rtvec vtemp = gen_rtvec (2, copy_rtx (PATTERN (anchor)),
9241 copy_rtx (PATTERN (floater)));
9242 rtx temp = gen_rtx_PARALLEL (VOIDmode, vtemp);
9243 emit_insn_before (temp, anchor);
9245 SET_INSN_DELETED (anchor);
9247 /* Emit a special USE insn for FLOATER, then delete
9248 the floating insn. */
9249 temp = copy_rtx (PATTERN (floater));
9250 emit_insn_before (gen_rtx_USE (VOIDmode, temp), floater);
9251 delete_insn (floater);
9253 continue;
9255 else if (floater
9256 && anchor_attr == PA_COMBINE_TYPE_UNCOND_BRANCH)
9258 /* Emit the new_jump instruction and delete the old anchor. */
9259 rtvec vtemp = gen_rtvec (2, copy_rtx (PATTERN (anchor)),
9260 copy_rtx (PATTERN (floater)));
9261 rtx temp = gen_rtx_PARALLEL (VOIDmode, vtemp);
9262 temp = emit_jump_insn_before (temp, anchor);
9264 JUMP_LABEL (temp) = JUMP_LABEL (anchor);
9265 SET_INSN_DELETED (anchor);
9267 /* Emit a special USE insn for FLOATER, then delete
9268 the floating insn. */
9269 temp = copy_rtx (PATTERN (floater));
9270 emit_insn_before (gen_rtx_USE (VOIDmode, temp), floater);
9271 delete_insn (floater);
9272 continue;
9278 static int
9279 pa_can_combine_p (rtx_insn *new_rtx, rtx_insn *anchor, rtx_insn *floater,
9280 int reversed, rtx dest,
9281 rtx src1, rtx src2)
9283 int insn_code_number;
9284 rtx_insn *start, *end;
9286 /* Create a PARALLEL with the patterns of ANCHOR and
9287 FLOATER, try to recognize it, then test constraints
9288 for the resulting pattern.
9290 If the pattern doesn't match or the constraints
9291 aren't met keep searching for a suitable floater
9292 insn. */
9293 XVECEXP (PATTERN (new_rtx), 0, 0) = PATTERN (anchor);
9294 XVECEXP (PATTERN (new_rtx), 0, 1) = PATTERN (floater);
9295 INSN_CODE (new_rtx) = -1;
9296 insn_code_number = recog_memoized (new_rtx);
9297 basic_block bb = BLOCK_FOR_INSN (anchor);
9298 if (insn_code_number < 0
9299 || (extract_insn (new_rtx),
9300 !constrain_operands (1, get_preferred_alternatives (new_rtx, bb))))
9301 return 0;
9303 if (reversed)
9305 start = anchor;
9306 end = floater;
9308 else
9310 start = floater;
9311 end = anchor;
9314 /* There's up to three operands to consider. One
9315 output and two inputs.
9317 The output must not be used between FLOATER & ANCHOR
9318 exclusive. The inputs must not be set between
9319 FLOATER and ANCHOR exclusive. */
9321 if (reg_used_between_p (dest, start, end))
9322 return 0;
9324 if (reg_set_between_p (src1, start, end))
9325 return 0;
9327 if (reg_set_between_p (src2, start, end))
9328 return 0;
9330 /* If we get here, then everything is good. */
9331 return 1;
9334 /* Return nonzero if references for INSN are delayed.
9336 Millicode insns are actually function calls with some special
9337 constraints on arguments and register usage.
9339 Millicode calls always expect their arguments in the integer argument
9340 registers, and always return their result in %r29 (ret1). They
9341 are expected to clobber their arguments, %r1, %r29, and the return
9342 pointer which is %r31 on 32-bit and %r2 on 64-bit, and nothing else.
9344 This function tells reorg that the references to arguments and
9345 millicode calls do not appear to happen until after the millicode call.
9346 This allows reorg to put insns which set the argument registers into the
9347 delay slot of the millicode call -- thus they act more like traditional
9348 CALL_INSNs.
9350 Note we cannot consider side effects of the insn to be delayed because
9351 the branch and link insn will clobber the return pointer. If we happened
9352 to use the return pointer in the delay slot of the call, then we lose.
9354 get_attr_type will try to recognize the given insn, so make sure to
9355 filter out things it will not accept -- SEQUENCE, USE and CLOBBER insns
9356 in particular. */
9358 pa_insn_refs_are_delayed (rtx_insn *insn)
9360 return ((NONJUMP_INSN_P (insn)
9361 && GET_CODE (PATTERN (insn)) != SEQUENCE
9362 && GET_CODE (PATTERN (insn)) != USE
9363 && GET_CODE (PATTERN (insn)) != CLOBBER
9364 && get_attr_type (insn) == TYPE_MILLI));
9367 /* Promote the return value, but not the arguments. */
9369 static machine_mode
9370 pa_promote_function_mode (const_tree type ATTRIBUTE_UNUSED,
9371 machine_mode mode,
9372 int *punsignedp ATTRIBUTE_UNUSED,
9373 const_tree fntype ATTRIBUTE_UNUSED,
9374 int for_return)
9376 if (for_return == 0)
9377 return mode;
9378 return promote_mode (type, mode, punsignedp);
9381 /* On the HP-PA the value is found in register(s) 28(-29), unless
9382 the mode is SF or DF. Then the value is returned in fr4 (32).
9384 This must perform the same promotions as PROMOTE_MODE, else promoting
9385 return values in TARGET_PROMOTE_FUNCTION_MODE will not work correctly.
9387 Small structures must be returned in a PARALLEL on PA64 in order
9388 to match the HP Compiler ABI. */
9390 static rtx
9391 pa_function_value (const_tree valtype,
9392 const_tree func ATTRIBUTE_UNUSED,
9393 bool outgoing ATTRIBUTE_UNUSED)
9395 machine_mode valmode;
9397 if (AGGREGATE_TYPE_P (valtype)
9398 || TREE_CODE (valtype) == COMPLEX_TYPE
9399 || TREE_CODE (valtype) == VECTOR_TYPE)
9401 HOST_WIDE_INT valsize = int_size_in_bytes (valtype);
9403 /* Handle aggregates that fit exactly in a word or double word. */
9404 if ((valsize & (UNITS_PER_WORD - 1)) == 0)
9405 return gen_rtx_REG (TYPE_MODE (valtype), 28);
9407 if (TARGET_64BIT)
9409 /* Aggregates with a size less than or equal to 128 bits are
9410 returned in GR 28(-29). They are left justified. The pad
9411 bits are undefined. Larger aggregates are returned in
9412 memory. */
9413 rtx loc[2];
9414 int i, offset = 0;
9415 int ub = valsize <= UNITS_PER_WORD ? 1 : 2;
9417 for (i = 0; i < ub; i++)
9419 loc[i] = gen_rtx_EXPR_LIST (VOIDmode,
9420 gen_rtx_REG (DImode, 28 + i),
9421 GEN_INT (offset));
9422 offset += 8;
9425 return gen_rtx_PARALLEL (BLKmode, gen_rtvec_v (ub, loc));
9427 else if (valsize > UNITS_PER_WORD)
9429 /* Aggregates 5 to 8 bytes in size are returned in general
9430 registers r28-r29 in the same manner as other non
9431 floating-point objects. The data is right-justified and
9432 zero-extended to 64 bits. This is opposite to the normal
9433 justification used on big endian targets and requires
9434 special treatment. */
9435 rtx loc = gen_rtx_EXPR_LIST (VOIDmode,
9436 gen_rtx_REG (DImode, 28), const0_rtx);
9437 return gen_rtx_PARALLEL (BLKmode, gen_rtvec (1, loc));
9441 if ((INTEGRAL_TYPE_P (valtype)
9442 && GET_MODE_BITSIZE (TYPE_MODE (valtype)) < BITS_PER_WORD)
9443 || POINTER_TYPE_P (valtype))
9444 valmode = word_mode;
9445 else
9446 valmode = TYPE_MODE (valtype);
9448 if (TREE_CODE (valtype) == REAL_TYPE
9449 && !AGGREGATE_TYPE_P (valtype)
9450 && TYPE_MODE (valtype) != TFmode
9451 && !TARGET_SOFT_FLOAT)
9452 return gen_rtx_REG (valmode, 32);
9454 return gen_rtx_REG (valmode, 28);
9457 /* Implement the TARGET_LIBCALL_VALUE hook. */
9459 static rtx
9460 pa_libcall_value (machine_mode mode,
9461 const_rtx fun ATTRIBUTE_UNUSED)
9463 if (! TARGET_SOFT_FLOAT
9464 && (mode == SFmode || mode == DFmode))
9465 return gen_rtx_REG (mode, 32);
9466 else
9467 return gen_rtx_REG (mode, 28);
9470 /* Implement the TARGET_FUNCTION_VALUE_REGNO_P hook. */
9472 static bool
9473 pa_function_value_regno_p (const unsigned int regno)
9475 if (regno == 28
9476 || (! TARGET_SOFT_FLOAT && regno == 32))
9477 return true;
9479 return false;
9482 /* Update the data in CUM to advance over an argument
9483 of mode MODE and data type TYPE.
9484 (TYPE is null for libcalls where that information may not be available.) */
9486 static void
9487 pa_function_arg_advance (cumulative_args_t cum_v, machine_mode mode,
9488 const_tree type, bool named ATTRIBUTE_UNUSED)
9490 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
9491 int arg_size = pa_function_arg_size (mode, type);
9493 cum->nargs_prototype--;
9494 cum->words += (arg_size
9495 + ((cum->words & 01)
9496 && type != NULL_TREE
9497 && arg_size > 1));
9500 /* Return the location of a parameter that is passed in a register or NULL
9501 if the parameter has any component that is passed in memory.
9503 This is new code and will be pushed to into the net sources after
9504 further testing.
9506 ??? We might want to restructure this so that it looks more like other
9507 ports. */
9508 static rtx
9509 pa_function_arg (cumulative_args_t cum_v, machine_mode mode,
9510 const_tree type, bool named ATTRIBUTE_UNUSED)
9512 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
9513 int max_arg_words = (TARGET_64BIT ? 8 : 4);
9514 int alignment = 0;
9515 int arg_size;
9516 int fpr_reg_base;
9517 int gpr_reg_base;
9518 rtx retval;
9520 if (mode == VOIDmode)
9521 return NULL_RTX;
9523 arg_size = pa_function_arg_size (mode, type);
9525 /* If this arg would be passed partially or totally on the stack, then
9526 this routine should return zero. pa_arg_partial_bytes will
9527 handle arguments which are split between regs and stack slots if
9528 the ABI mandates split arguments. */
9529 if (!TARGET_64BIT)
9531 /* The 32-bit ABI does not split arguments. */
9532 if (cum->words + arg_size > max_arg_words)
9533 return NULL_RTX;
9535 else
9537 if (arg_size > 1)
9538 alignment = cum->words & 1;
9539 if (cum->words + alignment >= max_arg_words)
9540 return NULL_RTX;
9543 /* The 32bit ABIs and the 64bit ABIs are rather different,
9544 particularly in their handling of FP registers. We might
9545 be able to cleverly share code between them, but I'm not
9546 going to bother in the hope that splitting them up results
9547 in code that is more easily understood. */
9549 if (TARGET_64BIT)
9551 /* Advance the base registers to their current locations.
9553 Remember, gprs grow towards smaller register numbers while
9554 fprs grow to higher register numbers. Also remember that
9555 although FP regs are 32-bit addressable, we pretend that
9556 the registers are 64-bits wide. */
9557 gpr_reg_base = 26 - cum->words;
9558 fpr_reg_base = 32 + cum->words;
9560 /* Arguments wider than one word and small aggregates need special
9561 treatment. */
9562 if (arg_size > 1
9563 || mode == BLKmode
9564 || (type && (AGGREGATE_TYPE_P (type)
9565 || TREE_CODE (type) == COMPLEX_TYPE
9566 || TREE_CODE (type) == VECTOR_TYPE)))
9568 /* Double-extended precision (80-bit), quad-precision (128-bit)
9569 and aggregates including complex numbers are aligned on
9570 128-bit boundaries. The first eight 64-bit argument slots
9571 are associated one-to-one, with general registers r26
9572 through r19, and also with floating-point registers fr4
9573 through fr11. Arguments larger than one word are always
9574 passed in general registers.
9576 Using a PARALLEL with a word mode register results in left
9577 justified data on a big-endian target. */
9579 rtx loc[8];
9580 int i, offset = 0, ub = arg_size;
9582 /* Align the base register. */
9583 gpr_reg_base -= alignment;
9585 ub = MIN (ub, max_arg_words - cum->words - alignment);
9586 for (i = 0; i < ub; i++)
9588 loc[i] = gen_rtx_EXPR_LIST (VOIDmode,
9589 gen_rtx_REG (DImode, gpr_reg_base),
9590 GEN_INT (offset));
9591 gpr_reg_base -= 1;
9592 offset += 8;
9595 return gen_rtx_PARALLEL (mode, gen_rtvec_v (ub, loc));
9598 else
9600 /* If the argument is larger than a word, then we know precisely
9601 which registers we must use. */
9602 if (arg_size > 1)
9604 if (cum->words)
9606 gpr_reg_base = 23;
9607 fpr_reg_base = 38;
9609 else
9611 gpr_reg_base = 25;
9612 fpr_reg_base = 34;
9615 /* Structures 5 to 8 bytes in size are passed in the general
9616 registers in the same manner as other non floating-point
9617 objects. The data is right-justified and zero-extended
9618 to 64 bits. This is opposite to the normal justification
9619 used on big endian targets and requires special treatment.
9620 We now define BLOCK_REG_PADDING to pad these objects.
9621 Aggregates, complex and vector types are passed in the same
9622 manner as structures. */
9623 if (mode == BLKmode
9624 || (type && (AGGREGATE_TYPE_P (type)
9625 || TREE_CODE (type) == COMPLEX_TYPE
9626 || TREE_CODE (type) == VECTOR_TYPE)))
9628 rtx loc = gen_rtx_EXPR_LIST (VOIDmode,
9629 gen_rtx_REG (DImode, gpr_reg_base),
9630 const0_rtx);
9631 return gen_rtx_PARALLEL (BLKmode, gen_rtvec (1, loc));
9634 else
9636 /* We have a single word (32 bits). A simple computation
9637 will get us the register #s we need. */
9638 gpr_reg_base = 26 - cum->words;
9639 fpr_reg_base = 32 + 2 * cum->words;
9643 /* Determine if the argument needs to be passed in both general and
9644 floating point registers. */
9645 if (((TARGET_PORTABLE_RUNTIME || TARGET_64BIT || TARGET_ELF32)
9646 /* If we are doing soft-float with portable runtime, then there
9647 is no need to worry about FP regs. */
9648 && !TARGET_SOFT_FLOAT
9649 /* The parameter must be some kind of scalar float, else we just
9650 pass it in integer registers. */
9651 && GET_MODE_CLASS (mode) == MODE_FLOAT
9652 /* The target function must not have a prototype. */
9653 && cum->nargs_prototype <= 0
9654 /* libcalls do not need to pass items in both FP and general
9655 registers. */
9656 && type != NULL_TREE
9657 /* All this hair applies to "outgoing" args only. This includes
9658 sibcall arguments setup with FUNCTION_INCOMING_ARG. */
9659 && !cum->incoming)
9660 /* Also pass outgoing floating arguments in both registers in indirect
9661 calls with the 32 bit ABI and the HP assembler since there is no
9662 way to the specify argument locations in static functions. */
9663 || (!TARGET_64BIT
9664 && !TARGET_GAS
9665 && !cum->incoming
9666 && cum->indirect
9667 && GET_MODE_CLASS (mode) == MODE_FLOAT))
9669 retval
9670 = gen_rtx_PARALLEL
9671 (mode,
9672 gen_rtvec (2,
9673 gen_rtx_EXPR_LIST (VOIDmode,
9674 gen_rtx_REG (mode, fpr_reg_base),
9675 const0_rtx),
9676 gen_rtx_EXPR_LIST (VOIDmode,
9677 gen_rtx_REG (mode, gpr_reg_base),
9678 const0_rtx)));
9680 else
9682 /* See if we should pass this parameter in a general register. */
9683 if (TARGET_SOFT_FLOAT
9684 /* Indirect calls in the normal 32bit ABI require all arguments
9685 to be passed in general registers. */
9686 || (!TARGET_PORTABLE_RUNTIME
9687 && !TARGET_64BIT
9688 && !TARGET_ELF32
9689 && cum->indirect)
9690 /* If the parameter is not a scalar floating-point parameter,
9691 then it belongs in GPRs. */
9692 || GET_MODE_CLASS (mode) != MODE_FLOAT
9693 /* Structure with single SFmode field belongs in GPR. */
9694 || (type && AGGREGATE_TYPE_P (type)))
9695 retval = gen_rtx_REG (mode, gpr_reg_base);
9696 else
9697 retval = gen_rtx_REG (mode, fpr_reg_base);
9699 return retval;
9702 /* Arguments larger than one word are double word aligned. */
9704 static unsigned int
9705 pa_function_arg_boundary (machine_mode mode, const_tree type)
9707 bool singleword = (type
9708 ? (integer_zerop (TYPE_SIZE (type))
9709 || !TREE_CONSTANT (TYPE_SIZE (type))
9710 || int_size_in_bytes (type) <= UNITS_PER_WORD)
9711 : GET_MODE_SIZE (mode) <= UNITS_PER_WORD);
9713 return singleword ? PARM_BOUNDARY : MAX_PARM_BOUNDARY;
9716 /* If this arg would be passed totally in registers or totally on the stack,
9717 then this routine should return zero. */
9719 static int
9720 pa_arg_partial_bytes (cumulative_args_t cum_v, machine_mode mode,
9721 tree type, bool named ATTRIBUTE_UNUSED)
9723 CUMULATIVE_ARGS *cum = get_cumulative_args (cum_v);
9724 unsigned int max_arg_words = 8;
9725 unsigned int offset = 0;
9727 if (!TARGET_64BIT)
9728 return 0;
9730 if (pa_function_arg_size (mode, type) > 1 && (cum->words & 1))
9731 offset = 1;
9733 if (cum->words + offset + pa_function_arg_size (mode, type) <= max_arg_words)
9734 /* Arg fits fully into registers. */
9735 return 0;
9736 else if (cum->words + offset >= max_arg_words)
9737 /* Arg fully on the stack. */
9738 return 0;
9739 else
9740 /* Arg is split. */
9741 return (max_arg_words - cum->words - offset) * UNITS_PER_WORD;
9745 /* A get_unnamed_section callback for switching to the text section.
9747 This function is only used with SOM. Because we don't support
9748 named subspaces, we can only create a new subspace or switch back
9749 to the default text subspace. */
9751 static void
9752 som_output_text_section_asm_op (const void *data ATTRIBUTE_UNUSED)
9754 gcc_assert (TARGET_SOM);
9755 if (TARGET_GAS)
9757 if (cfun && cfun->machine && !cfun->machine->in_nsubspa)
9759 /* We only want to emit a .nsubspa directive once at the
9760 start of the function. */
9761 cfun->machine->in_nsubspa = 1;
9763 /* Create a new subspace for the text. This provides
9764 better stub placement and one-only functions. */
9765 if (cfun->decl
9766 && DECL_ONE_ONLY (cfun->decl)
9767 && !DECL_WEAK (cfun->decl))
9769 output_section_asm_op ("\t.SPACE $TEXT$\n"
9770 "\t.NSUBSPA $CODE$,QUAD=0,ALIGN=8,"
9771 "ACCESS=44,SORT=24,COMDAT");
9772 return;
9775 else
9777 /* There isn't a current function or the body of the current
9778 function has been completed. So, we are changing to the
9779 text section to output debugging information. Thus, we
9780 need to forget that we are in the text section so that
9781 varasm.c will call us when text_section is selected again. */
9782 gcc_assert (!cfun || !cfun->machine
9783 || cfun->machine->in_nsubspa == 2);
9784 in_section = NULL;
9786 output_section_asm_op ("\t.SPACE $TEXT$\n\t.NSUBSPA $CODE$");
9787 return;
9789 output_section_asm_op ("\t.SPACE $TEXT$\n\t.SUBSPA $CODE$");
9792 /* A get_unnamed_section callback for switching to comdat data
9793 sections. This function is only used with SOM. */
9795 static void
9796 som_output_comdat_data_section_asm_op (const void *data)
9798 in_section = NULL;
9799 output_section_asm_op (data);
9802 /* Implement TARGET_ASM_INIT_SECTIONS. */
9804 static void
9805 pa_som_asm_init_sections (void)
9807 text_section
9808 = get_unnamed_section (0, som_output_text_section_asm_op, NULL);
9810 /* SOM puts readonly data in the default $LIT$ subspace when PIC code
9811 is not being generated. */
9812 som_readonly_data_section
9813 = get_unnamed_section (0, output_section_asm_op,
9814 "\t.SPACE $TEXT$\n\t.SUBSPA $LIT$");
9816 /* When secondary definitions are not supported, SOM makes readonly
9817 data one-only by creating a new $LIT$ subspace in $TEXT$ with
9818 the comdat flag. */
9819 som_one_only_readonly_data_section
9820 = get_unnamed_section (0, som_output_comdat_data_section_asm_op,
9821 "\t.SPACE $TEXT$\n"
9822 "\t.NSUBSPA $LIT$,QUAD=0,ALIGN=8,"
9823 "ACCESS=0x2c,SORT=16,COMDAT");
9826 /* When secondary definitions are not supported, SOM makes data one-only
9827 by creating a new $DATA$ subspace in $PRIVATE$ with the comdat flag. */
9828 som_one_only_data_section
9829 = get_unnamed_section (SECTION_WRITE,
9830 som_output_comdat_data_section_asm_op,
9831 "\t.SPACE $PRIVATE$\n"
9832 "\t.NSUBSPA $DATA$,QUAD=1,ALIGN=8,"
9833 "ACCESS=31,SORT=24,COMDAT");
9835 if (flag_tm)
9836 som_tm_clone_table_section
9837 = get_unnamed_section (0, output_section_asm_op,
9838 "\t.SPACE $PRIVATE$\n\t.SUBSPA $TM_CLONE_TABLE$");
9840 /* FIXME: HPUX ld generates incorrect GOT entries for "T" fixups
9841 which reference data within the $TEXT$ space (for example constant
9842 strings in the $LIT$ subspace).
9844 The assemblers (GAS and HP as) both have problems with handling
9845 the difference of two symbols which is the other correct way to
9846 reference constant data during PIC code generation.
9848 So, there's no way to reference constant data which is in the
9849 $TEXT$ space during PIC generation. Instead place all constant
9850 data into the $PRIVATE$ subspace (this reduces sharing, but it
9851 works correctly). */
9852 readonly_data_section = flag_pic ? data_section : som_readonly_data_section;
9854 /* We must not have a reference to an external symbol defined in a
9855 shared library in a readonly section, else the SOM linker will
9856 complain.
9858 So, we force exception information into the data section. */
9859 exception_section = data_section;
9862 /* Implement TARGET_ASM_TM_CLONE_TABLE_SECTION. */
9864 static section *
9865 pa_som_tm_clone_table_section (void)
9867 return som_tm_clone_table_section;
9870 /* On hpux10, the linker will give an error if we have a reference
9871 in the read-only data section to a symbol defined in a shared
9872 library. Therefore, expressions that might require a reloc can
9873 not be placed in the read-only data section. */
9875 static section *
9876 pa_select_section (tree exp, int reloc,
9877 unsigned HOST_WIDE_INT align ATTRIBUTE_UNUSED)
9879 if (TREE_CODE (exp) == VAR_DECL
9880 && TREE_READONLY (exp)
9881 && !TREE_THIS_VOLATILE (exp)
9882 && DECL_INITIAL (exp)
9883 && (DECL_INITIAL (exp) == error_mark_node
9884 || TREE_CONSTANT (DECL_INITIAL (exp)))
9885 && !reloc)
9887 if (TARGET_SOM
9888 && DECL_ONE_ONLY (exp)
9889 && !DECL_WEAK (exp))
9890 return som_one_only_readonly_data_section;
9891 else
9892 return readonly_data_section;
9894 else if (CONSTANT_CLASS_P (exp) && !reloc)
9895 return readonly_data_section;
9896 else if (TARGET_SOM
9897 && TREE_CODE (exp) == VAR_DECL
9898 && DECL_ONE_ONLY (exp)
9899 && !DECL_WEAK (exp))
9900 return som_one_only_data_section;
9901 else
9902 return data_section;
9905 /* Implement pa_reloc_rw_mask. */
9907 static int
9908 pa_reloc_rw_mask (void)
9910 /* We force (const (plus (symbol) (const_int))) to memory when the
9911 const_int doesn't fit in a 14-bit integer. The SOM linker can't
9912 handle this construct in read-only memory and we want to avoid
9913 this for ELF. So, we always force an RTX needing relocation to
9914 the data section. */
9915 return 3;
9918 static void
9919 pa_globalize_label (FILE *stream, const char *name)
9921 /* We only handle DATA objects here, functions are globalized in
9922 ASM_DECLARE_FUNCTION_NAME. */
9923 if (! FUNCTION_NAME_P (name))
9925 fputs ("\t.EXPORT ", stream);
9926 assemble_name (stream, name);
9927 fputs (",DATA\n", stream);
9931 /* Worker function for TARGET_STRUCT_VALUE_RTX. */
9933 static rtx
9934 pa_struct_value_rtx (tree fntype ATTRIBUTE_UNUSED,
9935 int incoming ATTRIBUTE_UNUSED)
9937 return gen_rtx_REG (Pmode, PA_STRUCT_VALUE_REGNUM);
9940 /* Worker function for TARGET_RETURN_IN_MEMORY. */
9942 bool
9943 pa_return_in_memory (const_tree type, const_tree fntype ATTRIBUTE_UNUSED)
9945 /* SOM ABI says that objects larger than 64 bits are returned in memory.
9946 PA64 ABI says that objects larger than 128 bits are returned in memory.
9947 Note, int_size_in_bytes can return -1 if the size of the object is
9948 variable or larger than the maximum value that can be expressed as
9949 a HOST_WIDE_INT. It can also return zero for an empty type. The
9950 simplest way to handle variable and empty types is to pass them in
9951 memory. This avoids problems in defining the boundaries of argument
9952 slots, allocating registers, etc. */
9953 return (int_size_in_bytes (type) > (TARGET_64BIT ? 16 : 8)
9954 || int_size_in_bytes (type) <= 0);
9957 /* Structure to hold declaration and name of external symbols that are
9958 emitted by GCC. We generate a vector of these symbols and output them
9959 at the end of the file if and only if SYMBOL_REF_REFERENCED_P is true.
9960 This avoids putting out names that are never really used. */
9962 typedef struct GTY(()) extern_symbol
9964 tree decl;
9965 const char *name;
9966 } extern_symbol;
9968 /* Define gc'd vector type for extern_symbol. */
9970 /* Vector of extern_symbol pointers. */
9971 static GTY(()) vec<extern_symbol, va_gc> *extern_symbols;
9973 #ifdef ASM_OUTPUT_EXTERNAL_REAL
9974 /* Mark DECL (name NAME) as an external reference (assembler output
9975 file FILE). This saves the names to output at the end of the file
9976 if actually referenced. */
9978 void
9979 pa_hpux_asm_output_external (FILE *file, tree decl, const char *name)
9981 gcc_assert (file == asm_out_file);
9982 extern_symbol p = {decl, name};
9983 vec_safe_push (extern_symbols, p);
9985 #endif
9987 /* Output text required at the end of an assembler file.
9988 This includes deferred plabels and .import directives for
9989 all external symbols that were actually referenced. */
9991 static void
9992 pa_file_end (void)
9994 #ifdef ASM_OUTPUT_EXTERNAL_REAL
9995 unsigned int i;
9996 extern_symbol *p;
9998 if (!NO_DEFERRED_PROFILE_COUNTERS)
9999 output_deferred_profile_counters ();
10000 #endif
10002 output_deferred_plabels ();
10004 #ifdef ASM_OUTPUT_EXTERNAL_REAL
10005 for (i = 0; vec_safe_iterate (extern_symbols, i, &p); i++)
10007 tree decl = p->decl;
10009 if (!TREE_ASM_WRITTEN (decl)
10010 && SYMBOL_REF_REFERENCED_P (XEXP (DECL_RTL (decl), 0)))
10011 ASM_OUTPUT_EXTERNAL_REAL (asm_out_file, decl, p->name);
10014 vec_free (extern_symbols);
10015 #endif
10017 if (NEED_INDICATE_EXEC_STACK)
10018 file_end_indicate_exec_stack ();
10021 /* Implement TARGET_CAN_CHANGE_MODE_CLASS. */
10023 static bool
10024 pa_can_change_mode_class (machine_mode from, machine_mode to,
10025 reg_class_t rclass)
10027 if (from == to)
10028 return true;
10030 if (GET_MODE_SIZE (from) == GET_MODE_SIZE (to))
10031 return true;
10033 /* Reject changes to/from modes with zero size. */
10034 if (!GET_MODE_SIZE (from) || !GET_MODE_SIZE (to))
10035 return false;
10037 /* Reject changes to/from complex and vector modes. */
10038 if (COMPLEX_MODE_P (from) || VECTOR_MODE_P (from)
10039 || COMPLEX_MODE_P (to) || VECTOR_MODE_P (to))
10040 return false;
10042 /* There is no way to load QImode or HImode values directly from memory
10043 to a FP register. SImode loads to the FP registers are not zero
10044 extended. On the 64-bit target, this conflicts with the definition
10045 of LOAD_EXTEND_OP. Thus, we can't allow changing between modes with
10046 different sizes in the floating-point registers. */
10047 if (MAYBE_FP_REG_CLASS_P (rclass))
10048 return false;
10050 /* TARGET_HARD_REGNO_MODE_OK places modes with sizes larger than a word
10051 in specific sets of registers. Thus, we cannot allow changing
10052 to a larger mode when it's larger than a word. */
10053 if (GET_MODE_SIZE (to) > UNITS_PER_WORD
10054 && GET_MODE_SIZE (to) > GET_MODE_SIZE (from))
10055 return false;
10057 return true;
10060 /* Implement TARGET_MODES_TIEABLE_P.
10062 We should return FALSE for QImode and HImode because these modes
10063 are not ok in the floating-point registers. However, this prevents
10064 tieing these modes to SImode and DImode in the general registers.
10065 So, this isn't a good idea. We rely on TARGET_HARD_REGNO_MODE_OK and
10066 TARGET_CAN_CHANGE_MODE_CLASS to prevent these modes from being used
10067 in the floating-point registers. */
10069 static bool
10070 pa_modes_tieable_p (machine_mode mode1, machine_mode mode2)
10072 /* Don't tie modes in different classes. */
10073 if (GET_MODE_CLASS (mode1) != GET_MODE_CLASS (mode2))
10074 return false;
10076 return true;
10080 /* Length in units of the trampoline instruction code. */
10082 #define TRAMPOLINE_CODE_SIZE (TARGET_64BIT ? 24 : (TARGET_PA_20 ? 32 : 40))
10085 /* Output assembler code for a block containing the constant parts
10086 of a trampoline, leaving space for the variable parts.\
10088 The trampoline sets the static chain pointer to STATIC_CHAIN_REGNUM
10089 and then branches to the specified routine.
10091 This code template is copied from text segment to stack location
10092 and then patched with pa_trampoline_init to contain valid values,
10093 and then entered as a subroutine.
10095 It is best to keep this as small as possible to avoid having to
10096 flush multiple lines in the cache. */
10098 static void
10099 pa_asm_trampoline_template (FILE *f)
10101 if (!TARGET_64BIT)
10103 fputs ("\tldw 36(%r22),%r21\n", f);
10104 fputs ("\tbb,>=,n %r21,30,.+16\n", f);
10105 if (ASSEMBLER_DIALECT == 0)
10106 fputs ("\tdepi 0,31,2,%r21\n", f);
10107 else
10108 fputs ("\tdepwi 0,31,2,%r21\n", f);
10109 fputs ("\tldw 4(%r21),%r19\n", f);
10110 fputs ("\tldw 0(%r21),%r21\n", f);
10111 if (TARGET_PA_20)
10113 fputs ("\tbve (%r21)\n", f);
10114 fputs ("\tldw 40(%r22),%r29\n", f);
10115 fputs ("\t.word 0\n", f);
10116 fputs ("\t.word 0\n", f);
10118 else
10120 fputs ("\tldsid (%r21),%r1\n", f);
10121 fputs ("\tmtsp %r1,%sr0\n", f);
10122 fputs ("\tbe 0(%sr0,%r21)\n", f);
10123 fputs ("\tldw 40(%r22),%r29\n", f);
10125 fputs ("\t.word 0\n", f);
10126 fputs ("\t.word 0\n", f);
10127 fputs ("\t.word 0\n", f);
10128 fputs ("\t.word 0\n", f);
10130 else
10132 fputs ("\t.dword 0\n", f);
10133 fputs ("\t.dword 0\n", f);
10134 fputs ("\t.dword 0\n", f);
10135 fputs ("\t.dword 0\n", f);
10136 fputs ("\tmfia %r31\n", f);
10137 fputs ("\tldd 24(%r31),%r1\n", f);
10138 fputs ("\tldd 24(%r1),%r27\n", f);
10139 fputs ("\tldd 16(%r1),%r1\n", f);
10140 fputs ("\tbve (%r1)\n", f);
10141 fputs ("\tldd 32(%r31),%r31\n", f);
10142 fputs ("\t.dword 0 ; fptr\n", f);
10143 fputs ("\t.dword 0 ; static link\n", f);
10147 /* Emit RTL insns to initialize the variable parts of a trampoline.
10148 FNADDR is an RTX for the address of the function's pure code.
10149 CXT is an RTX for the static chain value for the function.
10151 Move the function address to the trampoline template at offset 36.
10152 Move the static chain value to trampoline template at offset 40.
10153 Move the trampoline address to trampoline template at offset 44.
10154 Move r19 to trampoline template at offset 48. The latter two
10155 words create a plabel for the indirect call to the trampoline.
10157 A similar sequence is used for the 64-bit port but the plabel is
10158 at the beginning of the trampoline.
10160 Finally, the cache entries for the trampoline code are flushed.
10161 This is necessary to ensure that the trampoline instruction sequence
10162 is written to memory prior to any attempts at prefetching the code
10163 sequence. */
10165 static void
10166 pa_trampoline_init (rtx m_tramp, tree fndecl, rtx chain_value)
10168 rtx fnaddr = XEXP (DECL_RTL (fndecl), 0);
10169 rtx start_addr = gen_reg_rtx (Pmode);
10170 rtx end_addr = gen_reg_rtx (Pmode);
10171 rtx line_length = gen_reg_rtx (Pmode);
10172 rtx r_tramp, tmp;
10174 emit_block_move (m_tramp, assemble_trampoline_template (),
10175 GEN_INT (TRAMPOLINE_SIZE), BLOCK_OP_NORMAL);
10176 r_tramp = force_reg (Pmode, XEXP (m_tramp, 0));
10178 if (!TARGET_64BIT)
10180 tmp = adjust_address (m_tramp, Pmode, 36);
10181 emit_move_insn (tmp, fnaddr);
10182 tmp = adjust_address (m_tramp, Pmode, 40);
10183 emit_move_insn (tmp, chain_value);
10185 /* Create a fat pointer for the trampoline. */
10186 tmp = adjust_address (m_tramp, Pmode, 44);
10187 emit_move_insn (tmp, r_tramp);
10188 tmp = adjust_address (m_tramp, Pmode, 48);
10189 emit_move_insn (tmp, gen_rtx_REG (Pmode, 19));
10191 /* fdc and fic only use registers for the address to flush,
10192 they do not accept integer displacements. We align the
10193 start and end addresses to the beginning of their respective
10194 cache lines to minimize the number of lines flushed. */
10195 emit_insn (gen_andsi3 (start_addr, r_tramp,
10196 GEN_INT (-MIN_CACHELINE_SIZE)));
10197 tmp = force_reg (Pmode, plus_constant (Pmode, r_tramp,
10198 TRAMPOLINE_CODE_SIZE-1));
10199 emit_insn (gen_andsi3 (end_addr, tmp,
10200 GEN_INT (-MIN_CACHELINE_SIZE)));
10201 emit_move_insn (line_length, GEN_INT (MIN_CACHELINE_SIZE));
10202 emit_insn (gen_dcacheflushsi (start_addr, end_addr, line_length));
10203 emit_insn (gen_icacheflushsi (start_addr, end_addr, line_length,
10204 gen_reg_rtx (Pmode),
10205 gen_reg_rtx (Pmode)));
10207 else
10209 tmp = adjust_address (m_tramp, Pmode, 56);
10210 emit_move_insn (tmp, fnaddr);
10211 tmp = adjust_address (m_tramp, Pmode, 64);
10212 emit_move_insn (tmp, chain_value);
10214 /* Create a fat pointer for the trampoline. */
10215 tmp = adjust_address (m_tramp, Pmode, 16);
10216 emit_move_insn (tmp, force_reg (Pmode, plus_constant (Pmode,
10217 r_tramp, 32)));
10218 tmp = adjust_address (m_tramp, Pmode, 24);
10219 emit_move_insn (tmp, gen_rtx_REG (Pmode, 27));
10221 /* fdc and fic only use registers for the address to flush,
10222 they do not accept integer displacements. We align the
10223 start and end addresses to the beginning of their respective
10224 cache lines to minimize the number of lines flushed. */
10225 tmp = force_reg (Pmode, plus_constant (Pmode, r_tramp, 32));
10226 emit_insn (gen_anddi3 (start_addr, tmp,
10227 GEN_INT (-MIN_CACHELINE_SIZE)));
10228 tmp = force_reg (Pmode, plus_constant (Pmode, tmp,
10229 TRAMPOLINE_CODE_SIZE - 1));
10230 emit_insn (gen_anddi3 (end_addr, tmp,
10231 GEN_INT (-MIN_CACHELINE_SIZE)));
10232 emit_move_insn (line_length, GEN_INT (MIN_CACHELINE_SIZE));
10233 emit_insn (gen_dcacheflushdi (start_addr, end_addr, line_length));
10234 emit_insn (gen_icacheflushdi (start_addr, end_addr, line_length,
10235 gen_reg_rtx (Pmode),
10236 gen_reg_rtx (Pmode)));
10239 #ifdef HAVE_ENABLE_EXECUTE_STACK
10240  emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__enable_execute_stack"),
10241 LCT_NORMAL, VOIDmode, XEXP (m_tramp, 0), Pmode);
10242 #endif
10245 /* Perform any machine-specific adjustment in the address of the trampoline.
10246 ADDR contains the address that was passed to pa_trampoline_init.
10247 Adjust the trampoline address to point to the plabel at offset 44. */
10249 static rtx
10250 pa_trampoline_adjust_address (rtx addr)
10252 if (!TARGET_64BIT)
10253 addr = memory_address (Pmode, plus_constant (Pmode, addr, 46));
10254 return addr;
10257 static rtx
10258 pa_delegitimize_address (rtx orig_x)
10260 rtx x = delegitimize_mem_from_attrs (orig_x);
10262 if (GET_CODE (x) == LO_SUM
10263 && GET_CODE (XEXP (x, 1)) == UNSPEC
10264 && XINT (XEXP (x, 1), 1) == UNSPEC_DLTIND14R)
10265 return gen_const_mem (Pmode, XVECEXP (XEXP (x, 1), 0, 0));
10266 return x;
10269 static rtx
10270 pa_internal_arg_pointer (void)
10272 /* The argument pointer and the hard frame pointer are the same in
10273 the 32-bit runtime, so we don't need a copy. */
10274 if (TARGET_64BIT)
10275 return copy_to_reg (virtual_incoming_args_rtx);
10276 else
10277 return virtual_incoming_args_rtx;
10280 /* Given FROM and TO register numbers, say whether this elimination is allowed.
10281 Frame pointer elimination is automatically handled. */
10283 static bool
10284 pa_can_eliminate (const int from, const int to)
10286 /* The argument cannot be eliminated in the 64-bit runtime. */
10287 if (TARGET_64BIT && from == ARG_POINTER_REGNUM)
10288 return false;
10290 return (from == HARD_FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM
10291 ? ! frame_pointer_needed
10292 : true);
10295 /* Define the offset between two registers, FROM to be eliminated and its
10296 replacement TO, at the start of a routine. */
10297 HOST_WIDE_INT
10298 pa_initial_elimination_offset (int from, int to)
10300 HOST_WIDE_INT offset;
10302 if ((from == HARD_FRAME_POINTER_REGNUM || from == FRAME_POINTER_REGNUM)
10303 && to == STACK_POINTER_REGNUM)
10304 offset = -pa_compute_frame_size (get_frame_size (), 0);
10305 else if (from == FRAME_POINTER_REGNUM && to == HARD_FRAME_POINTER_REGNUM)
10306 offset = 0;
10307 else
10308 gcc_unreachable ();
10310 return offset;
10313 static void
10314 pa_conditional_register_usage (void)
10316 int i;
10318 if (!TARGET_64BIT && !TARGET_PA_11)
10320 for (i = 56; i <= FP_REG_LAST; i++)
10321 fixed_regs[i] = call_used_regs[i] = 1;
10322 for (i = 33; i < 56; i += 2)
10323 fixed_regs[i] = call_used_regs[i] = 1;
10325 if (TARGET_DISABLE_FPREGS || TARGET_SOFT_FLOAT)
10327 for (i = FP_REG_FIRST; i <= FP_REG_LAST; i++)
10328 fixed_regs[i] = call_used_regs[i] = 1;
10330 if (flag_pic)
10331 fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1;
10334 /* Target hook for c_mode_for_suffix. */
10336 static machine_mode
10337 pa_c_mode_for_suffix (char suffix)
10339 if (HPUX_LONG_DOUBLE_LIBRARY)
10341 if (suffix == 'q')
10342 return TFmode;
10345 return VOIDmode;
10348 /* Target hook for function_section. */
10350 static section *
10351 pa_function_section (tree decl, enum node_frequency freq,
10352 bool startup, bool exit)
10354 /* Put functions in text section if target doesn't have named sections. */
10355 if (!targetm_common.have_named_sections)
10356 return text_section;
10358 /* Force nested functions into the same section as the containing
10359 function. */
10360 if (decl
10361 && DECL_SECTION_NAME (decl) == NULL
10362 && DECL_CONTEXT (decl) != NULL_TREE
10363 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
10364 && DECL_SECTION_NAME (DECL_CONTEXT (decl)) == NULL)
10365 return function_section (DECL_CONTEXT (decl));
10367 /* Otherwise, use the default function section. */
10368 return default_function_section (decl, freq, startup, exit);
10371 /* Implement TARGET_LEGITIMATE_CONSTANT_P.
10373 In 64-bit mode, we reject CONST_DOUBLES. We also reject CONST_INTS
10374 that need more than three instructions to load prior to reload. This
10375 limit is somewhat arbitrary. It takes three instructions to load a
10376 CONST_INT from memory but two are memory accesses. It may be better
10377 to increase the allowed range for CONST_INTS. We may also be able
10378 to handle CONST_DOUBLES. */
10380 static bool
10381 pa_legitimate_constant_p (machine_mode mode, rtx x)
10383 if (GET_MODE_CLASS (mode) == MODE_FLOAT && x != CONST0_RTX (mode))
10384 return false;
10386 if (!NEW_HP_ASSEMBLER && !TARGET_GAS && GET_CODE (x) == LABEL_REF)
10387 return false;
10389 /* TLS_MODEL_GLOBAL_DYNAMIC and TLS_MODEL_LOCAL_DYNAMIC are not
10390 legitimate constants. The other variants can't be handled by
10391 the move patterns after reload starts. */
10392 if (tls_referenced_p (x))
10393 return false;
10395 if (TARGET_64BIT && GET_CODE (x) == CONST_DOUBLE)
10396 return false;
10398 if (TARGET_64BIT
10399 && HOST_BITS_PER_WIDE_INT > 32
10400 && GET_CODE (x) == CONST_INT
10401 && !reload_in_progress
10402 && !reload_completed
10403 && !LEGITIMATE_64BIT_CONST_INT_P (INTVAL (x))
10404 && !pa_cint_ok_for_move (UINTVAL (x)))
10405 return false;
10407 if (function_label_operand (x, mode))
10408 return false;
10410 return true;
10413 /* Implement TARGET_SECTION_TYPE_FLAGS. */
10415 static unsigned int
10416 pa_section_type_flags (tree decl, const char *name, int reloc)
10418 unsigned int flags;
10420 flags = default_section_type_flags (decl, name, reloc);
10422 /* Function labels are placed in the constant pool. This can
10423 cause a section conflict if decls are put in ".data.rel.ro"
10424 or ".data.rel.ro.local" using the __attribute__ construct. */
10425 if (strcmp (name, ".data.rel.ro") == 0
10426 || strcmp (name, ".data.rel.ro.local") == 0)
10427 flags |= SECTION_WRITE | SECTION_RELRO;
10429 return flags;
10432 /* pa_legitimate_address_p recognizes an RTL expression that is a
10433 valid memory address for an instruction. The MODE argument is the
10434 machine mode for the MEM expression that wants to use this address.
10436 On HP PA-RISC, the legitimate address forms are REG+SMALLINT,
10437 REG+REG, and REG+(REG*SCALE). The indexed address forms are only
10438 available with floating point loads and stores, and integer loads.
10439 We get better code by allowing indexed addresses in the initial
10440 RTL generation.
10442 The acceptance of indexed addresses as legitimate implies that we
10443 must provide patterns for doing indexed integer stores, or the move
10444 expanders must force the address of an indexed store to a register.
10445 We have adopted the latter approach.
10447 Another function of pa_legitimate_address_p is to ensure that
10448 the base register is a valid pointer for indexed instructions.
10449 On targets that have non-equivalent space registers, we have to
10450 know at the time of assembler output which register in a REG+REG
10451 pair is the base register. The REG_POINTER flag is sometimes lost
10452 in reload and the following passes, so it can't be relied on during
10453 code generation. Thus, we either have to canonicalize the order
10454 of the registers in REG+REG indexed addresses, or treat REG+REG
10455 addresses separately and provide patterns for both permutations.
10457 The latter approach requires several hundred additional lines of
10458 code in pa.md. The downside to canonicalizing is that a PLUS
10459 in the wrong order can't combine to form to make a scaled indexed
10460 memory operand. As we won't need to canonicalize the operands if
10461 the REG_POINTER lossage can be fixed, it seems better canonicalize.
10463 We initially break out scaled indexed addresses in canonical order
10464 in pa_emit_move_sequence. LEGITIMIZE_ADDRESS also canonicalizes
10465 scaled indexed addresses during RTL generation. However, fold_rtx
10466 has its own opinion on how the operands of a PLUS should be ordered.
10467 If one of the operands is equivalent to a constant, it will make
10468 that operand the second operand. As the base register is likely to
10469 be equivalent to a SYMBOL_REF, we have made it the second operand.
10471 pa_legitimate_address_p accepts REG+REG as legitimate when the
10472 operands are in the order INDEX+BASE on targets with non-equivalent
10473 space registers, and in any order on targets with equivalent space
10474 registers. It accepts both MULT+BASE and BASE+MULT for scaled indexing.
10476 We treat a SYMBOL_REF as legitimate if it is part of the current
10477 function's constant-pool, because such addresses can actually be
10478 output as REG+SMALLINT. */
10480 static bool
10481 pa_legitimate_address_p (machine_mode mode, rtx x, bool strict)
10483 if ((REG_P (x)
10484 && (strict ? STRICT_REG_OK_FOR_BASE_P (x)
10485 : REG_OK_FOR_BASE_P (x)))
10486 || ((GET_CODE (x) == PRE_DEC || GET_CODE (x) == POST_DEC
10487 || GET_CODE (x) == PRE_INC || GET_CODE (x) == POST_INC)
10488 && REG_P (XEXP (x, 0))
10489 && (strict ? STRICT_REG_OK_FOR_BASE_P (XEXP (x, 0))
10490 : REG_OK_FOR_BASE_P (XEXP (x, 0)))))
10491 return true;
10493 if (GET_CODE (x) == PLUS)
10495 rtx base, index;
10497 /* For REG+REG, the base register should be in XEXP (x, 1),
10498 so check it first. */
10499 if (REG_P (XEXP (x, 1))
10500 && (strict ? STRICT_REG_OK_FOR_BASE_P (XEXP (x, 1))
10501 : REG_OK_FOR_BASE_P (XEXP (x, 1))))
10502 base = XEXP (x, 1), index = XEXP (x, 0);
10503 else if (REG_P (XEXP (x, 0))
10504 && (strict ? STRICT_REG_OK_FOR_BASE_P (XEXP (x, 0))
10505 : REG_OK_FOR_BASE_P (XEXP (x, 0))))
10506 base = XEXP (x, 0), index = XEXP (x, 1);
10507 else
10508 return false;
10510 if (GET_CODE (index) == CONST_INT)
10512 if (INT_5_BITS (index))
10513 return true;
10515 /* When INT14_OK_STRICT is false, a secondary reload is needed
10516 to adjust the displacement of SImode and DImode floating point
10517 instructions but this may fail when the register also needs
10518 reloading. So, we return false when STRICT is true. We
10519 also reject long displacements for float mode addresses since
10520 the majority of accesses will use floating point instructions
10521 that don't support 14-bit offsets. */
10522 if (!INT14_OK_STRICT
10523 && (strict || !(reload_in_progress || reload_completed))
10524 && mode != QImode
10525 && mode != HImode)
10526 return false;
10528 return base14_operand (index, mode);
10531 if (!TARGET_DISABLE_INDEXING
10532 /* Only accept the "canonical" INDEX+BASE operand order
10533 on targets with non-equivalent space registers. */
10534 && (TARGET_NO_SPACE_REGS
10535 ? REG_P (index)
10536 : (base == XEXP (x, 1) && REG_P (index)
10537 && (reload_completed
10538 || (reload_in_progress && HARD_REGISTER_P (base))
10539 || REG_POINTER (base))
10540 && (reload_completed
10541 || (reload_in_progress && HARD_REGISTER_P (index))
10542 || !REG_POINTER (index))))
10543 && MODE_OK_FOR_UNSCALED_INDEXING_P (mode)
10544 && (strict ? STRICT_REG_OK_FOR_INDEX_P (index)
10545 : REG_OK_FOR_INDEX_P (index))
10546 && borx_reg_operand (base, Pmode)
10547 && borx_reg_operand (index, Pmode))
10548 return true;
10550 if (!TARGET_DISABLE_INDEXING
10551 && GET_CODE (index) == MULT
10552 /* Only accept base operands with the REG_POINTER flag prior to
10553 reload on targets with non-equivalent space registers. */
10554 && (TARGET_NO_SPACE_REGS
10555 || (base == XEXP (x, 1)
10556 && (reload_completed
10557 || (reload_in_progress && HARD_REGISTER_P (base))
10558 || REG_POINTER (base))))
10559 && REG_P (XEXP (index, 0))
10560 && GET_MODE (XEXP (index, 0)) == Pmode
10561 && MODE_OK_FOR_SCALED_INDEXING_P (mode)
10562 && (strict ? STRICT_REG_OK_FOR_INDEX_P (XEXP (index, 0))
10563 : REG_OK_FOR_INDEX_P (XEXP (index, 0)))
10564 && GET_CODE (XEXP (index, 1)) == CONST_INT
10565 && INTVAL (XEXP (index, 1))
10566 == (HOST_WIDE_INT) GET_MODE_SIZE (mode)
10567 && borx_reg_operand (base, Pmode))
10568 return true;
10570 return false;
10573 if (GET_CODE (x) == LO_SUM)
10575 rtx y = XEXP (x, 0);
10577 if (GET_CODE (y) == SUBREG)
10578 y = SUBREG_REG (y);
10580 if (REG_P (y)
10581 && (strict ? STRICT_REG_OK_FOR_BASE_P (y)
10582 : REG_OK_FOR_BASE_P (y)))
10584 /* Needed for -fPIC */
10585 if (mode == Pmode
10586 && GET_CODE (XEXP (x, 1)) == UNSPEC)
10587 return true;
10589 if (!INT14_OK_STRICT
10590 && (strict || !(reload_in_progress || reload_completed))
10591 && mode != QImode
10592 && mode != HImode)
10593 return false;
10595 if (CONSTANT_P (XEXP (x, 1)))
10596 return true;
10598 return false;
10601 if (GET_CODE (x) == CONST_INT && INT_5_BITS (x))
10602 return true;
10604 return false;
10607 /* Look for machine dependent ways to make the invalid address AD a
10608 valid address.
10610 For the PA, transform:
10612 memory(X + <large int>)
10614 into:
10616 if (<large int> & mask) >= 16
10617 Y = (<large int> & ~mask) + mask + 1 Round up.
10618 else
10619 Y = (<large int> & ~mask) Round down.
10620 Z = X + Y
10621 memory (Z + (<large int> - Y));
10623 This makes reload inheritance and reload_cse work better since Z
10624 can be reused.
10626 There may be more opportunities to improve code with this hook. */
10629 pa_legitimize_reload_address (rtx ad, machine_mode mode,
10630 int opnum, int type,
10631 int ind_levels ATTRIBUTE_UNUSED)
10633 long offset, newoffset, mask;
10634 rtx new_rtx, temp = NULL_RTX;
10636 mask = (GET_MODE_CLASS (mode) == MODE_FLOAT
10637 && !INT14_OK_STRICT ? 0x1f : 0x3fff);
10639 if (optimize && GET_CODE (ad) == PLUS)
10640 temp = simplify_binary_operation (PLUS, Pmode,
10641 XEXP (ad, 0), XEXP (ad, 1));
10643 new_rtx = temp ? temp : ad;
10645 if (optimize
10646 && GET_CODE (new_rtx) == PLUS
10647 && GET_CODE (XEXP (new_rtx, 0)) == REG
10648 && GET_CODE (XEXP (new_rtx, 1)) == CONST_INT)
10650 offset = INTVAL (XEXP ((new_rtx), 1));
10652 /* Choose rounding direction. Round up if we are >= halfway. */
10653 if ((offset & mask) >= ((mask + 1) / 2))
10654 newoffset = (offset & ~mask) + mask + 1;
10655 else
10656 newoffset = offset & ~mask;
10658 /* Ensure that long displacements are aligned. */
10659 if (mask == 0x3fff
10660 && (GET_MODE_CLASS (mode) == MODE_FLOAT
10661 || (TARGET_64BIT && (mode) == DImode)))
10662 newoffset &= ~(GET_MODE_SIZE (mode) - 1);
10664 if (newoffset != 0 && VAL_14_BITS_P (newoffset))
10666 temp = gen_rtx_PLUS (Pmode, XEXP (new_rtx, 0),
10667 GEN_INT (newoffset));
10668 ad = gen_rtx_PLUS (Pmode, temp, GEN_INT (offset - newoffset));
10669 push_reload (XEXP (ad, 0), 0, &XEXP (ad, 0), 0,
10670 BASE_REG_CLASS, Pmode, VOIDmode, 0, 0,
10671 opnum, (enum reload_type) type);
10672 return ad;
10676 return NULL_RTX;
10679 /* Output address vector. */
10681 void
10682 pa_output_addr_vec (rtx lab, rtx body)
10684 int idx, vlen = XVECLEN (body, 0);
10686 if (!TARGET_SOM)
10687 fputs ("\t.align 4\n", asm_out_file);
10688 targetm.asm_out.internal_label (asm_out_file, "L", CODE_LABEL_NUMBER (lab));
10689 if (TARGET_GAS)
10690 fputs ("\t.begin_brtab\n", asm_out_file);
10691 for (idx = 0; idx < vlen; idx++)
10693 ASM_OUTPUT_ADDR_VEC_ELT
10694 (asm_out_file, CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 0, idx), 0)));
10696 if (TARGET_GAS)
10697 fputs ("\t.end_brtab\n", asm_out_file);
10700 /* Output address difference vector. */
10702 void
10703 pa_output_addr_diff_vec (rtx lab, rtx body)
10705 rtx base = XEXP (XEXP (body, 0), 0);
10706 int idx, vlen = XVECLEN (body, 1);
10708 targetm.asm_out.internal_label (asm_out_file, "L", CODE_LABEL_NUMBER (lab));
10709 if (TARGET_GAS)
10710 fputs ("\t.begin_brtab\n", asm_out_file);
10711 for (idx = 0; idx < vlen; idx++)
10713 ASM_OUTPUT_ADDR_DIFF_ELT
10714 (asm_out_file,
10715 body,
10716 CODE_LABEL_NUMBER (XEXP (XVECEXP (body, 1, idx), 0)),
10717 CODE_LABEL_NUMBER (base));
10719 if (TARGET_GAS)
10720 fputs ("\t.end_brtab\n", asm_out_file);
10723 /* This is a helper function for the other atomic operations. This function
10724 emits a loop that contains SEQ that iterates until a compare-and-swap
10725 operation at the end succeeds. MEM is the memory to be modified. SEQ is
10726 a set of instructions that takes a value from OLD_REG as an input and
10727 produces a value in NEW_REG as an output. Before SEQ, OLD_REG will be
10728 set to the current contents of MEM. After SEQ, a compare-and-swap will
10729 attempt to update MEM with NEW_REG. The function returns true when the
10730 loop was generated successfully. */
10732 static bool
10733 pa_expand_compare_and_swap_loop (rtx mem, rtx old_reg, rtx new_reg, rtx seq)
10735 machine_mode mode = GET_MODE (mem);
10736 rtx_code_label *label;
10737 rtx cmp_reg, success, oldval;
10739 /* The loop we want to generate looks like
10741 cmp_reg = mem;
10742 label:
10743 old_reg = cmp_reg;
10744 seq;
10745 (success, cmp_reg) = compare-and-swap(mem, old_reg, new_reg)
10746 if (success)
10747 goto label;
10749 Note that we only do the plain load from memory once. Subsequent
10750 iterations use the value loaded by the compare-and-swap pattern. */
10752 label = gen_label_rtx ();
10753 cmp_reg = gen_reg_rtx (mode);
10755 emit_move_insn (cmp_reg, mem);
10756 emit_label (label);
10757 emit_move_insn (old_reg, cmp_reg);
10758 if (seq)
10759 emit_insn (seq);
10761 success = NULL_RTX;
10762 oldval = cmp_reg;
10763 if (!expand_atomic_compare_and_swap (&success, &oldval, mem, old_reg,
10764 new_reg, false, MEMMODEL_SYNC_SEQ_CST,
10765 MEMMODEL_RELAXED))
10766 return false;
10768 if (oldval != cmp_reg)
10769 emit_move_insn (cmp_reg, oldval);
10771 /* Mark this jump predicted not taken. */
10772 emit_cmp_and_jump_insns (success, const0_rtx, EQ, const0_rtx,
10773 GET_MODE (success), 1, label,
10774 profile_probability::guessed_never ());
10775 return true;
10778 /* This function tries to implement an atomic exchange operation using a
10779 compare_and_swap loop. VAL is written to *MEM. The previous contents of
10780 *MEM are returned, using TARGET if possible. No memory model is required
10781 since a compare_and_swap loop is seq-cst. */
10784 pa_maybe_emit_compare_and_swap_exchange_loop (rtx target, rtx mem, rtx val)
10786 machine_mode mode = GET_MODE (mem);
10788 if (can_compare_and_swap_p (mode, true))
10790 if (!target || !register_operand (target, mode))
10791 target = gen_reg_rtx (mode);
10792 if (pa_expand_compare_and_swap_loop (mem, target, val, NULL_RTX))
10793 return target;
10796 return NULL_RTX;
10799 /* Implement TARGET_CALLEE_COPIES. The callee is responsible for copying
10800 arguments passed by hidden reference in the 32-bit HP runtime. Users
10801 can override this behavior for better compatibility with openmp at the
10802 risk of library incompatibilities. Arguments are always passed by value
10803 in the 64-bit HP runtime. */
10805 static bool
10806 pa_callee_copies (cumulative_args_t cum ATTRIBUTE_UNUSED,
10807 machine_mode mode ATTRIBUTE_UNUSED,
10808 const_tree type ATTRIBUTE_UNUSED,
10809 bool named ATTRIBUTE_UNUSED)
10811 return !TARGET_CALLER_COPIES;
10814 /* Implement TARGET_HARD_REGNO_NREGS. */
10816 static unsigned int
10817 pa_hard_regno_nregs (unsigned int regno ATTRIBUTE_UNUSED, machine_mode mode)
10819 return PA_HARD_REGNO_NREGS (regno, mode);
10822 /* Implement TARGET_HARD_REGNO_MODE_OK. */
10824 static bool
10825 pa_hard_regno_mode_ok (unsigned int regno, machine_mode mode)
10827 return PA_HARD_REGNO_MODE_OK (regno, mode);
10830 /* Implement TARGET_STARTING_FRAME_OFFSET.
10832 On the 32-bit ports, we reserve one slot for the previous frame
10833 pointer and one fill slot. The fill slot is for compatibility
10834 with HP compiled programs. On the 64-bit ports, we reserve one
10835 slot for the previous frame pointer. */
10837 static HOST_WIDE_INT
10838 pa_starting_frame_offset (void)
10840 return 8;
10843 /* Figure out the size in words of the function argument. The size
10844 returned by this function should always be greater than zero because
10845 we pass variable and zero sized objects by reference. */
10847 HOST_WIDE_INT
10848 pa_function_arg_size (machine_mode mode, const_tree type)
10850 HOST_WIDE_INT size;
10852 size = mode != BLKmode ? GET_MODE_SIZE (mode) : int_size_in_bytes (type);
10853 return CEIL (size, UNITS_PER_WORD);
10856 #include "gt-pa.h"