* config/arm/bpabi.h (SUBTARGET_EXTRA_ASM_SPEC): Change meabi=3 to
[official-gcc.git] / gcc / integrate.c
blob2202e22f910736ca82e96197b0154cb792f20180
1 /* Procedure integration for GCC.
2 Copyright (C) 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by Michael Tiemann (tiemann@cygnus.com)
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "tm_p.h"
31 #include "regs.h"
32 #include "flags.h"
33 #include "debug.h"
34 #include "insn-config.h"
35 #include "expr.h"
36 #include "output.h"
37 #include "recog.h"
38 #include "integrate.h"
39 #include "real.h"
40 #include "except.h"
41 #include "function.h"
42 #include "toplev.h"
43 #include "intl.h"
44 #include "params.h"
45 #include "ggc.h"
46 #include "target.h"
47 #include "langhooks.h"
49 /* Round to the next highest integer that meets the alignment. */
50 #define CEIL_ROUND(VALUE,ALIGN) (((VALUE) + (ALIGN) - 1) & ~((ALIGN)- 1))
53 /* Private type used by {get/has}_func_hard_reg_initial_val. */
54 typedef struct initial_value_pair GTY(()) {
55 rtx hard_reg;
56 rtx pseudo;
57 } initial_value_pair;
58 typedef struct initial_value_struct GTY(()) {
59 int num_entries;
60 int max_entries;
61 initial_value_pair * GTY ((length ("%h.num_entries"))) entries;
62 } initial_value_struct;
64 static void set_block_origin_self (tree);
65 static void set_block_abstract_flags (tree, int);
67 /* Returns the Ith entry in the label_map contained in MAP. If the
68 Ith entry has not yet been set, return a fresh label. This function
69 performs a lazy initialization of label_map, thereby avoiding huge memory
70 explosions when the label_map gets very large. */
72 rtx
73 get_label_from_map (struct inline_remap *map, int i)
75 rtx x = map->label_map[i];
77 if (x == NULL_RTX)
78 x = map->label_map[i] = gen_label_rtx ();
80 return x;
83 /* Return false if the function FNDECL cannot be inlined on account of its
84 attributes, true otherwise. */
85 bool
86 function_attribute_inlinable_p (tree fndecl)
88 if (targetm.attribute_table)
90 tree a;
92 for (a = DECL_ATTRIBUTES (fndecl); a; a = TREE_CHAIN (a))
94 tree name = TREE_PURPOSE (a);
95 int i;
97 for (i = 0; targetm.attribute_table[i].name != NULL; i++)
98 if (is_attribute_p (targetm.attribute_table[i].name, name))
99 return targetm.function_attribute_inlinable_p (fndecl);
103 return true;
106 /* Copy NODE (which must be a DECL). The DECL originally was in the FROM_FN,
107 but now it will be in the TO_FN. */
109 tree
110 copy_decl_for_inlining (tree decl, tree from_fn, tree to_fn)
112 tree copy;
114 /* Copy the declaration. */
115 if (TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL)
117 tree type = TREE_TYPE (decl);
119 /* For a parameter or result, we must make an equivalent VAR_DECL, not a
120 new PARM_DECL. */
121 copy = build_decl (VAR_DECL, DECL_NAME (decl), type);
122 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
123 TREE_READONLY (copy) = TREE_READONLY (decl);
124 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
126 else
128 copy = copy_node (decl);
129 /* The COPY is not abstract; it will be generated in TO_FN. */
130 DECL_ABSTRACT (copy) = 0;
131 lang_hooks.dup_lang_specific_decl (copy);
133 /* TREE_ADDRESSABLE isn't used to indicate that a label's
134 address has been taken; it's for internal bookkeeping in
135 expand_goto_internal. */
136 if (TREE_CODE (copy) == LABEL_DECL)
138 TREE_ADDRESSABLE (copy) = 0;
142 /* Don't generate debug information for the copy if we wouldn't have
143 generated it for the copy either. */
144 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
145 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
147 /* Set the DECL_ABSTRACT_ORIGIN so the debugging routines know what
148 declaration inspired this copy. */
149 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
151 /* The new variable/label has no RTL, yet. */
152 if (!TREE_STATIC (copy) && !DECL_EXTERNAL (copy))
153 SET_DECL_RTL (copy, NULL_RTX);
155 /* These args would always appear unused, if not for this. */
156 TREE_USED (copy) = 1;
158 /* Set the context for the new declaration. */
159 if (!DECL_CONTEXT (decl))
160 /* Globals stay global. */
162 else if (DECL_CONTEXT (decl) != from_fn)
163 /* Things that weren't in the scope of the function we're inlining
164 from aren't in the scope we're inlining to, either. */
166 else if (TREE_STATIC (decl))
167 /* Function-scoped static variables should stay in the original
168 function. */
170 else
171 /* Ordinary automatic local variables are now in the scope of the
172 new function. */
173 DECL_CONTEXT (copy) = to_fn;
175 return copy;
178 /* Unfortunately, we need a global copy of const_equiv map for communication
179 with a function called from note_stores. Be *very* careful that this
180 is used properly in the presence of recursion. */
182 varray_type global_const_equiv_varray;
184 /* Create a new copy of an rtx. Recursively copies the operands of the rtx,
185 except for those few rtx codes that are sharable.
187 We always return an rtx that is similar to that incoming rtx, with the
188 exception of possibly changing a REG to a SUBREG or vice versa. No
189 rtl is ever emitted.
191 If FOR_LHS is nonzero, if means we are processing something that will
192 be the LHS of a SET.
194 Handle constants that need to be placed in the constant pool by
195 calling `force_const_mem'. */
198 copy_rtx_and_substitute (rtx orig, struct inline_remap *map, int for_lhs)
200 rtx copy, temp;
201 int i, j;
202 RTX_CODE code;
203 enum machine_mode mode;
204 const char *format_ptr;
205 int regno;
207 if (orig == 0)
208 return 0;
210 code = GET_CODE (orig);
211 mode = GET_MODE (orig);
213 switch (code)
215 case REG:
216 /* If the stack pointer register shows up, it must be part of
217 stack-adjustments (*not* because we eliminated the frame pointer!).
218 Small hard registers are returned as-is. Pseudo-registers
219 go through their `reg_map'. */
220 regno = REGNO (orig);
221 if (regno <= LAST_VIRTUAL_REGISTER)
223 /* Some hard registers are also mapped,
224 but others are not translated. */
225 if (map->reg_map[regno] != 0)
226 return map->reg_map[regno];
228 /* If this is the virtual frame pointer, make space in current
229 function's stack frame for the stack frame of the inline function.
231 Copy the address of this area into a pseudo. Map
232 virtual_stack_vars_rtx to this pseudo and set up a constant
233 equivalence for it to be the address. This will substitute the
234 address into insns where it can be substituted and use the new
235 pseudo where it can't. */
236 else if (regno == VIRTUAL_STACK_VARS_REGNUM)
238 rtx loc, seq;
239 int size
240 = get_func_frame_size (DECL_STRUCT_FUNCTION (map->fndecl));
241 #ifdef FRAME_GROWS_DOWNWARD
242 int alignment
243 = (DECL_STRUCT_FUNCTION (map->fndecl)->stack_alignment_needed
244 / BITS_PER_UNIT);
246 /* In this case, virtual_stack_vars_rtx points to one byte
247 higher than the top of the frame area. So make sure we
248 allocate a big enough chunk to keep the frame pointer
249 aligned like a real one. */
250 if (alignment)
251 size = CEIL_ROUND (size, alignment);
252 #endif
253 start_sequence ();
254 loc = assign_stack_temp (BLKmode, size, 1);
255 loc = XEXP (loc, 0);
256 #ifdef FRAME_GROWS_DOWNWARD
257 /* In this case, virtual_stack_vars_rtx points to one byte
258 higher than the top of the frame area. So compute the offset
259 to one byte higher than our substitute frame. */
260 loc = plus_constant (loc, size);
261 #endif
262 map->reg_map[regno] = temp
263 = force_reg (Pmode, force_operand (loc, NULL_RTX));
265 #ifdef STACK_BOUNDARY
266 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
267 #endif
269 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
271 seq = get_insns ();
272 end_sequence ();
273 emit_insn_after (seq, map->insns_at_start);
274 return temp;
276 else if (regno == VIRTUAL_INCOMING_ARGS_REGNUM)
278 /* Do the same for a block to contain any arguments referenced
279 in memory. */
280 rtx loc, seq;
281 int size = DECL_STRUCT_FUNCTION (map->fndecl)->args_size;
283 start_sequence ();
284 loc = assign_stack_temp (BLKmode, size, 1);
285 loc = XEXP (loc, 0);
286 /* When arguments grow downward, the virtual incoming
287 args pointer points to the top of the argument block,
288 so the remapped location better do the same. */
289 #ifdef ARGS_GROW_DOWNWARD
290 loc = plus_constant (loc, size);
291 #endif
292 map->reg_map[regno] = temp
293 = force_reg (Pmode, force_operand (loc, NULL_RTX));
295 #ifdef STACK_BOUNDARY
296 mark_reg_pointer (map->reg_map[regno], STACK_BOUNDARY);
297 #endif
299 SET_CONST_EQUIV_DATA (map, temp, loc, CONST_AGE_PARM);
301 seq = get_insns ();
302 end_sequence ();
303 emit_insn_after (seq, map->insns_at_start);
304 return temp;
306 else
307 return orig;
309 abort ();
311 if (map->reg_map[regno] == NULL)
313 map->reg_map[regno] = gen_reg_rtx (mode);
314 REG_USERVAR_P (map->reg_map[regno]) = REG_USERVAR_P (orig);
315 REG_LOOP_TEST_P (map->reg_map[regno]) = REG_LOOP_TEST_P (orig);
316 /* A reg with REG_FUNCTION_VALUE_P true will never reach here. */
318 if (REG_POINTER (map->x_regno_reg_rtx[regno]))
319 mark_reg_pointer (map->reg_map[regno],
320 map->regno_pointer_align[regno]);
322 return map->reg_map[regno];
324 case SUBREG:
325 copy = copy_rtx_and_substitute (SUBREG_REG (orig), map, for_lhs);
326 return simplify_gen_subreg (GET_MODE (orig), copy,
327 GET_MODE (SUBREG_REG (orig)),
328 SUBREG_BYTE (orig));
330 case USE:
331 case CLOBBER:
332 /* USE and CLOBBER are ordinary, but we convert (use (subreg foo))
333 to (use foo) if the original insn didn't have a subreg.
334 Removing the subreg distorts the VAX movmemhi pattern
335 by changing the mode of an operand. */
336 copy = copy_rtx_and_substitute (XEXP (orig, 0), map, code == CLOBBER);
337 if (GET_CODE (copy) == SUBREG && GET_CODE (XEXP (orig, 0)) != SUBREG)
338 copy = SUBREG_REG (copy);
339 return gen_rtx_fmt_e (code, VOIDmode, copy);
341 /* We need to handle "deleted" labels that appear in the DECL_RTL
342 of a LABEL_DECL. */
343 case NOTE:
344 if (NOTE_LINE_NUMBER (orig) != NOTE_INSN_DELETED_LABEL)
345 break;
347 /* Fall through. */
348 case CODE_LABEL:
349 LABEL_PRESERVE_P (get_label_from_map (map, CODE_LABEL_NUMBER (orig)))
350 = LABEL_PRESERVE_P (orig);
351 return get_label_from_map (map, CODE_LABEL_NUMBER (orig));
353 case LABEL_REF:
354 copy
355 = gen_rtx_LABEL_REF
356 (mode,
357 LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0)
358 : get_label_from_map (map, CODE_LABEL_NUMBER (XEXP (orig, 0))));
360 LABEL_OUTSIDE_LOOP_P (copy) = LABEL_OUTSIDE_LOOP_P (orig);
362 /* The fact that this label was previously nonlocal does not mean
363 it still is, so we must check if it is within the range of
364 this function's labels. */
365 LABEL_REF_NONLOCAL_P (copy)
366 = (LABEL_REF_NONLOCAL_P (orig)
367 && ! (CODE_LABEL_NUMBER (XEXP (copy, 0)) >= get_first_label_num ()
368 && CODE_LABEL_NUMBER (XEXP (copy, 0)) < max_label_num ()));
370 return copy;
372 case PC:
373 case CC0:
374 case CONST_INT:
375 case CONST_VECTOR:
376 return orig;
378 case SYMBOL_REF:
379 /* Symbols which represent the address of a label stored in the constant
380 pool must be modified to point to a constant pool entry for the
381 remapped label. Otherwise, symbols are returned unchanged. */
382 if (CONSTANT_POOL_ADDRESS_P (orig))
384 struct function *f = cfun;
385 rtx constant = get_pool_constant_for_function (f, orig);
386 if (GET_CODE (constant) == LABEL_REF)
387 return XEXP (force_const_mem
388 (GET_MODE (orig),
389 copy_rtx_and_substitute (constant, map, for_lhs)),
392 return orig;
394 case CONST_DOUBLE:
395 /* We have to make a new copy of this CONST_DOUBLE because don't want
396 to use the old value of CONST_DOUBLE_MEM. Also, this may be a
397 duplicate of a CONST_DOUBLE we have already seen. */
398 if (GET_MODE_CLASS (GET_MODE (orig)) == MODE_FLOAT)
400 REAL_VALUE_TYPE d;
402 REAL_VALUE_FROM_CONST_DOUBLE (d, orig);
403 return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (orig));
405 else
406 return immed_double_const (CONST_DOUBLE_LOW (orig),
407 CONST_DOUBLE_HIGH (orig), VOIDmode);
409 case CONST:
410 break;
412 case ASM_OPERANDS:
413 /* If a single asm insn contains multiple output operands then
414 it contains multiple ASM_OPERANDS rtx's that share the input
415 and constraint vecs. We must make sure that the copied insn
416 continues to share it. */
417 if (map->orig_asm_operands_vector == ASM_OPERANDS_INPUT_VEC (orig))
419 copy = rtx_alloc (ASM_OPERANDS);
420 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
421 PUT_MODE (copy, GET_MODE (orig));
422 ASM_OPERANDS_TEMPLATE (copy) = ASM_OPERANDS_TEMPLATE (orig);
423 ASM_OPERANDS_OUTPUT_CONSTRAINT (copy)
424 = ASM_OPERANDS_OUTPUT_CONSTRAINT (orig);
425 ASM_OPERANDS_OUTPUT_IDX (copy) = ASM_OPERANDS_OUTPUT_IDX (orig);
426 ASM_OPERANDS_INPUT_VEC (copy) = map->copy_asm_operands_vector;
427 ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy)
428 = map->copy_asm_constraints_vector;
429 #ifdef USE_MAPPED_LOCATION
430 ASM_OPERANDS_SOURCE_LOCATION (copy)
431 = ASM_OPERANDS_SOURCE_LOCATION (orig);
432 #else
433 ASM_OPERANDS_SOURCE_FILE (copy) = ASM_OPERANDS_SOURCE_FILE (orig);
434 ASM_OPERANDS_SOURCE_LINE (copy) = ASM_OPERANDS_SOURCE_LINE (orig);
435 #endif
436 return copy;
438 break;
440 case CALL:
441 /* This is given special treatment because the first
442 operand of a CALL is a (MEM ...) which may get
443 forced into a register for cse. This is undesirable
444 if function-address cse isn't wanted or if we won't do cse. */
445 #ifndef NO_FUNCTION_CSE
446 if (! (optimize && ! flag_no_function_cse))
447 #endif
449 rtx copy
450 = gen_rtx_MEM (GET_MODE (XEXP (orig, 0)),
451 copy_rtx_and_substitute (XEXP (XEXP (orig, 0), 0),
452 map, 0));
454 MEM_COPY_ATTRIBUTES (copy, XEXP (orig, 0));
456 return
457 gen_rtx_CALL (GET_MODE (orig), copy,
458 copy_rtx_and_substitute (XEXP (orig, 1), map, 0));
460 break;
462 #if 0
463 /* Must be ifdefed out for loop unrolling to work. */
464 /* ??? Is this for the old or the new unroller? */
465 case RETURN:
466 abort ();
467 #endif
469 case SET:
470 /* If this is setting fp or ap, it means that we have a nonlocal goto.
471 Adjust the setting by the offset of the area we made.
472 If the nonlocal goto is into the current function,
473 this will result in unnecessarily bad code, but should work. */
474 if (SET_DEST (orig) == virtual_stack_vars_rtx
475 || SET_DEST (orig) == virtual_incoming_args_rtx)
477 /* In case a translation hasn't occurred already, make one now. */
478 rtx equiv_reg;
479 rtx equiv_loc;
480 HOST_WIDE_INT loc_offset;
482 copy_rtx_and_substitute (SET_DEST (orig), map, for_lhs);
483 equiv_reg = map->reg_map[REGNO (SET_DEST (orig))];
484 equiv_loc = VARRAY_CONST_EQUIV (map->const_equiv_varray,
485 REGNO (equiv_reg)).rtx;
486 loc_offset
487 = REG_P (equiv_loc) ? 0 : INTVAL (XEXP (equiv_loc, 1));
489 return gen_rtx_SET (VOIDmode, SET_DEST (orig),
490 force_operand
491 (plus_constant
492 (copy_rtx_and_substitute (SET_SRC (orig),
493 map, 0),
494 - loc_offset),
495 NULL_RTX));
497 else
498 return gen_rtx_SET (VOIDmode,
499 copy_rtx_and_substitute (SET_DEST (orig), map, 1),
500 copy_rtx_and_substitute (SET_SRC (orig), map, 0));
501 break;
503 case MEM:
504 copy = gen_rtx_MEM (mode, copy_rtx_and_substitute (XEXP (orig, 0),
505 map, 0));
506 MEM_COPY_ATTRIBUTES (copy, orig);
507 return copy;
509 default:
510 break;
513 copy = rtx_alloc (code);
514 PUT_MODE (copy, mode);
515 RTX_FLAG (copy, in_struct) = RTX_FLAG (orig, in_struct);
516 RTX_FLAG (copy, volatil) = RTX_FLAG (orig, volatil);
517 RTX_FLAG (copy, unchanging) = RTX_FLAG (orig, unchanging);
519 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
521 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
523 switch (*format_ptr++)
525 case '0':
526 X0ANY (copy, i) = X0ANY (orig, i);
527 break;
529 case 'e':
530 XEXP (copy, i)
531 = copy_rtx_and_substitute (XEXP (orig, i), map, for_lhs);
532 break;
534 case 'u':
535 /* Change any references to old-insns to point to the
536 corresponding copied insns. */
537 XEXP (copy, i) = map->insn_map[INSN_UID (XEXP (orig, i))];
538 break;
540 case 'E':
541 XVEC (copy, i) = XVEC (orig, i);
542 if (XVEC (orig, i) != NULL && XVECLEN (orig, i) != 0)
544 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
545 for (j = 0; j < XVECLEN (copy, i); j++)
546 XVECEXP (copy, i, j)
547 = copy_rtx_and_substitute (XVECEXP (orig, i, j),
548 map, for_lhs);
550 break;
552 case 'w':
553 XWINT (copy, i) = XWINT (orig, i);
554 break;
556 case 'i':
557 XINT (copy, i) = XINT (orig, i);
558 break;
560 case 's':
561 XSTR (copy, i) = XSTR (orig, i);
562 break;
564 case 't':
565 XTREE (copy, i) = XTREE (orig, i);
566 break;
568 default:
569 abort ();
573 if (code == ASM_OPERANDS && map->orig_asm_operands_vector == 0)
575 map->orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
576 map->copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
577 map->copy_asm_constraints_vector
578 = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
581 return copy;
584 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
585 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
586 that it points to the node itself, thus indicating that the node is its
587 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
588 the given node is NULL, recursively descend the decl/block tree which
589 it is the root of, and for each other ..._DECL or BLOCK node contained
590 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
591 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
592 values to point to themselves. */
594 static void
595 set_block_origin_self (tree stmt)
597 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
599 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
602 tree local_decl;
604 for (local_decl = BLOCK_VARS (stmt);
605 local_decl != NULL_TREE;
606 local_decl = TREE_CHAIN (local_decl))
607 set_decl_origin_self (local_decl); /* Potential recursion. */
611 tree subblock;
613 for (subblock = BLOCK_SUBBLOCKS (stmt);
614 subblock != NULL_TREE;
615 subblock = BLOCK_CHAIN (subblock))
616 set_block_origin_self (subblock); /* Recurse. */
621 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
622 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
623 node to so that it points to the node itself, thus indicating that the
624 node represents its own (abstract) origin. Additionally, if the
625 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
626 the decl/block tree of which the given node is the root of, and for
627 each other ..._DECL or BLOCK node contained therein whose
628 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
629 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
630 point to themselves. */
632 void
633 set_decl_origin_self (tree decl)
635 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
637 DECL_ABSTRACT_ORIGIN (decl) = decl;
638 if (TREE_CODE (decl) == FUNCTION_DECL)
640 tree arg;
642 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
643 DECL_ABSTRACT_ORIGIN (arg) = arg;
644 if (DECL_INITIAL (decl) != NULL_TREE
645 && DECL_INITIAL (decl) != error_mark_node)
646 set_block_origin_self (DECL_INITIAL (decl));
651 /* Given a pointer to some BLOCK node, and a boolean value to set the
652 "abstract" flags to, set that value into the BLOCK_ABSTRACT flag for
653 the given block, and for all local decls and all local sub-blocks
654 (recursively) which are contained therein. */
656 static void
657 set_block_abstract_flags (tree stmt, int setting)
659 tree local_decl;
660 tree subblock;
662 BLOCK_ABSTRACT (stmt) = setting;
664 for (local_decl = BLOCK_VARS (stmt);
665 local_decl != NULL_TREE;
666 local_decl = TREE_CHAIN (local_decl))
667 set_decl_abstract_flags (local_decl, setting);
669 for (subblock = BLOCK_SUBBLOCKS (stmt);
670 subblock != NULL_TREE;
671 subblock = BLOCK_CHAIN (subblock))
672 set_block_abstract_flags (subblock, setting);
675 /* Given a pointer to some ..._DECL node, and a boolean value to set the
676 "abstract" flags to, set that value into the DECL_ABSTRACT flag for the
677 given decl, and (in the case where the decl is a FUNCTION_DECL) also
678 set the abstract flags for all of the parameters, local vars, local
679 blocks and sub-blocks (recursively) to the same setting. */
681 void
682 set_decl_abstract_flags (tree decl, int setting)
684 DECL_ABSTRACT (decl) = setting;
685 if (TREE_CODE (decl) == FUNCTION_DECL)
687 tree arg;
689 for (arg = DECL_ARGUMENTS (decl); arg; arg = TREE_CHAIN (arg))
690 DECL_ABSTRACT (arg) = setting;
691 if (DECL_INITIAL (decl) != NULL_TREE
692 && DECL_INITIAL (decl) != error_mark_node)
693 set_block_abstract_flags (DECL_INITIAL (decl), setting);
697 /* Functions to keep track of the values hard regs had at the start of
698 the function. */
701 get_hard_reg_initial_reg (struct function *fun, rtx reg)
703 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
704 int i;
706 if (ivs == 0)
707 return NULL_RTX;
709 for (i = 0; i < ivs->num_entries; i++)
710 if (rtx_equal_p (ivs->entries[i].pseudo, reg))
711 return ivs->entries[i].hard_reg;
713 return NULL_RTX;
717 has_func_hard_reg_initial_val (struct function *fun, rtx reg)
719 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
720 int i;
722 if (ivs == 0)
723 return NULL_RTX;
725 for (i = 0; i < ivs->num_entries; i++)
726 if (rtx_equal_p (ivs->entries[i].hard_reg, reg))
727 return ivs->entries[i].pseudo;
729 return NULL_RTX;
733 get_func_hard_reg_initial_val (struct function *fun, rtx reg)
735 struct initial_value_struct *ivs = fun->hard_reg_initial_vals;
736 rtx rv = has_func_hard_reg_initial_val (fun, reg);
738 if (rv)
739 return rv;
741 if (ivs == 0)
743 fun->hard_reg_initial_vals = ggc_alloc (sizeof (initial_value_struct));
744 ivs = fun->hard_reg_initial_vals;
745 ivs->num_entries = 0;
746 ivs->max_entries = 5;
747 ivs->entries = ggc_alloc (5 * sizeof (initial_value_pair));
750 if (ivs->num_entries >= ivs->max_entries)
752 ivs->max_entries += 5;
753 ivs->entries = ggc_realloc (ivs->entries,
754 ivs->max_entries
755 * sizeof (initial_value_pair));
758 ivs->entries[ivs->num_entries].hard_reg = reg;
759 ivs->entries[ivs->num_entries].pseudo = gen_reg_rtx (GET_MODE (reg));
761 return ivs->entries[ivs->num_entries++].pseudo;
765 get_hard_reg_initial_val (enum machine_mode mode, int regno)
767 return get_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
771 has_hard_reg_initial_val (enum machine_mode mode, int regno)
773 return has_func_hard_reg_initial_val (cfun, gen_rtx_REG (mode, regno));
776 void
777 emit_initial_value_sets (void)
779 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
780 int i;
781 rtx seq;
783 if (ivs == 0)
784 return;
786 start_sequence ();
787 for (i = 0; i < ivs->num_entries; i++)
788 emit_move_insn (ivs->entries[i].pseudo, ivs->entries[i].hard_reg);
789 seq = get_insns ();
790 end_sequence ();
792 emit_insn_after (seq, entry_of_function ());
795 /* If the backend knows where to allocate pseudos for hard
796 register initial values, register these allocations now. */
797 void
798 allocate_initial_values (rtx *reg_equiv_memory_loc ATTRIBUTE_UNUSED)
800 #ifdef ALLOCATE_INITIAL_VALUE
801 struct initial_value_struct *ivs = cfun->hard_reg_initial_vals;
802 int i;
804 if (ivs == 0)
805 return;
807 for (i = 0; i < ivs->num_entries; i++)
809 int regno = REGNO (ivs->entries[i].pseudo);
810 rtx x = ALLOCATE_INITIAL_VALUE (ivs->entries[i].hard_reg);
812 if (x == NULL_RTX || REG_N_SETS (REGNO (ivs->entries[i].pseudo)) > 1)
813 ; /* Do nothing. */
814 else if (MEM_P (x))
815 reg_equiv_memory_loc[regno] = x;
816 else if (REG_P (x))
818 reg_renumber[regno] = REGNO (x);
819 /* Poke the regno right into regno_reg_rtx
820 so that even fixed regs are accepted. */
821 REGNO (ivs->entries[i].pseudo) = REGNO (x);
823 else abort ();
825 #endif
828 #include "gt-integrate.h"