1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004, 2006, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
22 that directly map to addressing modes of the target. */
26 #include "coretypes.h"
30 #include "basic-block.h"
31 #include "tree-pretty-print.h"
32 #include "tree-flow.h"
35 #include "tree-inline.h"
36 #include "tree-affine.h"
38 /* FIXME: We compute address costs using RTL. */
39 #include "insn-config.h"
47 /* TODO -- handling of symbols (according to Richard Hendersons
48 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
50 There are at least 5 different kinds of symbols that we can run up against:
52 (1) binds_local_p, small data area.
53 (2) binds_local_p, eg local statics
54 (3) !binds_local_p, eg global variables
55 (4) thread local, local_exec
56 (5) thread local, !local_exec
58 Now, (1) won't appear often in an array context, but it certainly can.
59 All you have to do is set -GN high enough, or explicitly mark any
60 random object __attribute__((section (".sdata"))).
62 All of these affect whether or not a symbol is in fact a valid address.
63 The only one tested here is (3). And that result may very well
64 be incorrect for (4) or (5).
66 An incorrect result here does not cause incorrect results out the
67 back end, because the expander in expr.c validizes the address. However
68 it would be nice to improve the handling here in order to produce more
71 /* A "template" for memory address, used to determine whether the address is
74 typedef struct GTY (()) mem_addr_template
{
75 rtx ref
; /* The template. */
76 rtx
* GTY ((skip
)) step_p
; /* The point in template where the step should be
78 rtx
* GTY ((skip
)) off_p
; /* The point in template where the offset should
83 /* The templates. Each of the low five bits of the index corresponds to one
84 component of TARGET_MEM_REF being present, while the high bits identify
85 the address space. See TEMPL_IDX. */
87 static GTY(()) vec
<mem_addr_template
, va_gc
> *mem_addr_template_list
;
89 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
91 | ((SYMBOL != 0) << 4) \
92 | ((BASE != 0) << 3) \
93 | ((INDEX != 0) << 2) \
94 | ((STEP != 0) << 1) \
97 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
98 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
99 to where step is placed to *STEP_P and offset to *OFFSET_P. */
102 gen_addr_rtx (enum machine_mode address_mode
,
103 rtx symbol
, rtx base
, rtx index
, rtx step
, rtx offset
,
104 rtx
*addr
, rtx
**step_p
, rtx
**offset_p
)
119 act_elem
= gen_rtx_MULT (address_mode
, act_elem
, step
);
122 *step_p
= &XEXP (act_elem
, 1);
128 if (base
&& base
!= const0_rtx
)
131 *addr
= simplify_gen_binary (PLUS
, address_mode
, base
, *addr
);
141 act_elem
= gen_rtx_PLUS (address_mode
, act_elem
, offset
);
144 *offset_p
= &XEXP (act_elem
, 1);
146 if (GET_CODE (symbol
) == SYMBOL_REF
147 || GET_CODE (symbol
) == LABEL_REF
148 || GET_CODE (symbol
) == CONST
)
149 act_elem
= gen_rtx_CONST (address_mode
, act_elem
);
153 *addr
= gen_rtx_PLUS (address_mode
, *addr
, act_elem
);
161 *addr
= gen_rtx_PLUS (address_mode
, *addr
, offset
);
163 *offset_p
= &XEXP (*addr
, 1);
177 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
179 If REALLY_EXPAND is false, just make fake registers instead
180 of really expanding the operands, and perform the expansion in-place
181 by using one of the "templates". */
184 addr_for_mem_ref (struct mem_address
*addr
, addr_space_t as
,
187 enum machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
188 enum machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
189 rtx address
, sym
, bse
, idx
, st
, off
;
190 struct mem_addr_template
*templ
;
192 if (addr
->step
&& !integer_onep (addr
->step
))
193 st
= immed_double_int_const (tree_to_double_int (addr
->step
), pointer_mode
);
197 if (addr
->offset
&& !integer_zerop (addr
->offset
))
198 off
= immed_double_int_const
199 (tree_to_double_int (addr
->offset
)
200 .sext (TYPE_PRECISION (TREE_TYPE (addr
->offset
))),
207 unsigned int templ_index
208 = TEMPL_IDX (as
, addr
->symbol
, addr
->base
, addr
->index
, st
, off
);
210 if (templ_index
>= vec_safe_length (mem_addr_template_list
))
211 vec_safe_grow_cleared (mem_addr_template_list
, templ_index
+ 1);
213 /* Reuse the templates for addresses, so that we do not waste memory. */
214 templ
= &(*mem_addr_template_list
)[templ_index
];
217 sym
= (addr
->symbol
?
218 gen_rtx_SYMBOL_REF (pointer_mode
, ggc_strdup ("test_symbol"))
221 gen_raw_REG (pointer_mode
, LAST_VIRTUAL_REGISTER
+ 1)
224 gen_raw_REG (pointer_mode
, LAST_VIRTUAL_REGISTER
+ 2)
227 gen_addr_rtx (pointer_mode
, sym
, bse
, idx
,
228 st
? const0_rtx
: NULL_RTX
,
229 off
? const0_rtx
: NULL_RTX
,
243 /* Otherwise really expand the expressions. */
245 ? expand_expr (addr
->symbol
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
248 ? expand_expr (addr
->base
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
251 ? expand_expr (addr
->index
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
254 gen_addr_rtx (pointer_mode
, sym
, bse
, idx
, st
, off
, &address
, NULL
, NULL
);
255 if (pointer_mode
!= address_mode
)
256 address
= convert_memory_address (address_mode
, address
);
260 /* Returns address of MEM_REF in TYPE. */
263 tree_mem_ref_addr (tree type
, tree mem_ref
)
267 tree step
= TMR_STEP (mem_ref
), offset
= TMR_OFFSET (mem_ref
);
268 tree addr_base
= NULL_TREE
, addr_off
= NULL_TREE
;
270 addr_base
= fold_convert (type
, TMR_BASE (mem_ref
));
272 act_elem
= TMR_INDEX (mem_ref
);
276 act_elem
= fold_build2 (MULT_EXPR
, TREE_TYPE (act_elem
),
281 act_elem
= TMR_INDEX2 (mem_ref
);
285 addr_off
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr_off
),
291 if (offset
&& !integer_zerop (offset
))
294 addr_off
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr_off
), addr_off
,
295 fold_convert (TREE_TYPE (addr_off
), offset
));
301 addr
= fold_build_pointer_plus (addr_base
, addr_off
);
308 /* Returns true if a memory reference in MODE and with parameters given by
309 ADDR is valid on the current target. */
312 valid_mem_ref_p (enum machine_mode mode
, addr_space_t as
,
313 struct mem_address
*addr
)
317 address
= addr_for_mem_ref (addr
, as
, false);
321 return memory_address_addr_space_p (mode
, address
, as
);
324 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
325 is valid on the current target and if so, creates and returns the
326 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
329 create_mem_ref_raw (tree type
, tree alias_ptr_type
, struct mem_address
*addr
,
335 && !valid_mem_ref_p (TYPE_MODE (type
), TYPE_ADDR_SPACE (type
), addr
))
338 if (addr
->step
&& integer_onep (addr
->step
))
339 addr
->step
= NULL_TREE
;
342 addr
->offset
= fold_convert (alias_ptr_type
, addr
->offset
);
344 addr
->offset
= build_int_cst (alias_ptr_type
, 0);
352 && POINTER_TYPE_P (TREE_TYPE (addr
->base
)))
359 base
= build_int_cst (ptr_type_node
, 0);
363 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
364 ??? As IVOPTs does not follow restrictions to where the base
365 pointer may point to create a MEM_REF only if we know that
367 if ((TREE_CODE (base
) == ADDR_EXPR
|| TREE_CODE (base
) == INTEGER_CST
)
368 && (!index2
|| integer_zerop (index2
))
369 && (!addr
->index
|| integer_zerop (addr
->index
)))
370 return fold_build2 (MEM_REF
, type
, base
, addr
->offset
);
372 return build5 (TARGET_MEM_REF
, type
,
373 base
, addr
->offset
, addr
->index
, addr
->step
, index2
);
376 /* Returns true if OBJ is an object whose address is a link time constant. */
379 fixed_address_object_p (tree obj
)
381 return (TREE_CODE (obj
) == VAR_DECL
382 && (TREE_STATIC (obj
)
383 || DECL_EXTERNAL (obj
))
384 && ! DECL_DLLIMPORT_P (obj
));
387 /* If ADDR contains an address of object that is a link time constant,
388 move it to PARTS->symbol. */
391 move_fixed_address_to_symbol (struct mem_address
*parts
, aff_tree
*addr
)
394 tree val
= NULL_TREE
;
396 for (i
= 0; i
< addr
->n
; i
++)
398 if (!addr
->elts
[i
].coef
.is_one ())
401 val
= addr
->elts
[i
].val
;
402 if (TREE_CODE (val
) == ADDR_EXPR
403 && fixed_address_object_p (TREE_OPERAND (val
, 0)))
411 aff_combination_remove_elt (addr
, i
);
414 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
417 move_hint_to_base (tree type
, struct mem_address
*parts
, tree base_hint
,
421 tree val
= NULL_TREE
;
424 for (i
= 0; i
< addr
->n
; i
++)
426 if (!addr
->elts
[i
].coef
.is_one ())
429 val
= addr
->elts
[i
].val
;
430 if (operand_equal_p (val
, base_hint
, 0))
437 /* Cast value to appropriate pointer type. We cannot use a pointer
438 to TYPE directly, as the back-end will assume registers of pointer
439 type are aligned, and just the base itself may not actually be.
440 We use void pointer to the type's address space instead. */
441 qual
= ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type
));
442 type
= build_qualified_type (void_type_node
, qual
);
443 parts
->base
= fold_convert (build_pointer_type (type
), val
);
444 aff_combination_remove_elt (addr
, i
);
447 /* If ADDR contains an address of a dereferenced pointer, move it to
451 move_pointer_to_base (struct mem_address
*parts
, aff_tree
*addr
)
454 tree val
= NULL_TREE
;
456 for (i
= 0; i
< addr
->n
; i
++)
458 if (!addr
->elts
[i
].coef
.is_one ())
461 val
= addr
->elts
[i
].val
;
462 if (POINTER_TYPE_P (TREE_TYPE (val
)))
470 aff_combination_remove_elt (addr
, i
);
473 /* Moves the loop variant part V in linear address ADDR to be the index
477 move_variant_to_index (struct mem_address
*parts
, aff_tree
*addr
, tree v
)
480 tree val
= NULL_TREE
;
482 gcc_assert (!parts
->index
);
483 for (i
= 0; i
< addr
->n
; i
++)
485 val
= addr
->elts
[i
].val
;
486 if (operand_equal_p (val
, v
, 0))
493 parts
->index
= fold_convert (sizetype
, val
);
494 parts
->step
= double_int_to_tree (sizetype
, addr
->elts
[i
].coef
);
495 aff_combination_remove_elt (addr
, i
);
498 /* Adds ELT to PARTS. */
501 add_to_parts (struct mem_address
*parts
, tree elt
)
507 parts
->index
= fold_convert (sizetype
, elt
);
517 /* Add ELT to base. */
518 type
= TREE_TYPE (parts
->base
);
519 if (POINTER_TYPE_P (type
))
520 parts
->base
= fold_build_pointer_plus (parts
->base
, elt
);
522 parts
->base
= fold_build2 (PLUS_EXPR
, type
,
526 /* Finds the most expensive multiplication in ADDR that can be
527 expressed in an addressing mode and move the corresponding
528 element(s) to PARTS. */
531 most_expensive_mult_to_index (tree type
, struct mem_address
*parts
,
532 aff_tree
*addr
, bool speed
)
534 addr_space_t as
= TYPE_ADDR_SPACE (type
);
535 enum machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
537 double_int best_mult
, amult
, amult_neg
;
538 unsigned best_mult_cost
= 0, acost
;
539 tree mult_elt
= NULL_TREE
, elt
;
541 enum tree_code op_code
;
543 best_mult
= double_int_zero
;
544 for (i
= 0; i
< addr
->n
; i
++)
546 if (!addr
->elts
[i
].coef
.fits_shwi ())
549 coef
= addr
->elts
[i
].coef
.to_shwi ();
551 || !multiplier_allowed_in_address_p (coef
, TYPE_MODE (type
), as
))
554 acost
= mult_by_coeff_cost (coef
, address_mode
, speed
);
556 if (acost
> best_mult_cost
)
558 best_mult_cost
= acost
;
559 best_mult
= addr
->elts
[i
].coef
;
566 /* Collect elements multiplied by best_mult. */
567 for (i
= j
= 0; i
< addr
->n
; i
++)
569 amult
= addr
->elts
[i
].coef
;
570 amult_neg
= double_int_ext_for_comb (-amult
, addr
);
572 if (amult
== best_mult
)
574 else if (amult_neg
== best_mult
)
575 op_code
= MINUS_EXPR
;
578 addr
->elts
[j
] = addr
->elts
[i
];
583 elt
= fold_convert (sizetype
, addr
->elts
[i
].val
);
585 mult_elt
= fold_build2 (op_code
, sizetype
, mult_elt
, elt
);
586 else if (op_code
== PLUS_EXPR
)
589 mult_elt
= fold_build1 (NEGATE_EXPR
, sizetype
, elt
);
593 parts
->index
= mult_elt
;
594 parts
->step
= double_int_to_tree (sizetype
, best_mult
);
597 /* Splits address ADDR for a memory access of type TYPE into PARTS.
598 If BASE_HINT is non-NULL, it specifies an SSA name to be used
599 preferentially as base of the reference, and IV_CAND is the selected
600 iv candidate used in ADDR.
602 TODO -- be more clever about the distribution of the elements of ADDR
603 to PARTS. Some architectures do not support anything but single
604 register in address, possibly with a small integer offset; while
605 create_mem_ref will simplify the address to an acceptable shape
606 later, it would be more efficient to know that asking for complicated
607 addressing modes is useless. */
610 addr_to_parts (tree type
, aff_tree
*addr
, tree iv_cand
,
611 tree base_hint
, struct mem_address
*parts
,
617 parts
->symbol
= NULL_TREE
;
618 parts
->base
= NULL_TREE
;
619 parts
->index
= NULL_TREE
;
620 parts
->step
= NULL_TREE
;
622 if (!addr
->offset
.is_zero ())
623 parts
->offset
= double_int_to_tree (sizetype
, addr
->offset
);
625 parts
->offset
= NULL_TREE
;
627 /* Try to find a symbol. */
628 move_fixed_address_to_symbol (parts
, addr
);
630 /* No need to do address parts reassociation if the number of parts
631 is <= 2 -- in that case, no loop invariant code motion can be
634 if (!base_hint
&& (addr
->n
> 2))
635 move_variant_to_index (parts
, addr
, iv_cand
);
637 /* First move the most expensive feasible multiplication
640 most_expensive_mult_to_index (type
, parts
, addr
, speed
);
642 /* Try to find a base of the reference. Since at the moment
643 there is no reliable way how to distinguish between pointer and its
644 offset, this is just a guess. */
645 if (!parts
->symbol
&& base_hint
)
646 move_hint_to_base (type
, parts
, base_hint
, addr
);
647 if (!parts
->symbol
&& !parts
->base
)
648 move_pointer_to_base (parts
, addr
);
650 /* Then try to process the remaining elements. */
651 for (i
= 0; i
< addr
->n
; i
++)
653 part
= fold_convert (sizetype
, addr
->elts
[i
].val
);
654 if (!addr
->elts
[i
].coef
.is_one ())
655 part
= fold_build2 (MULT_EXPR
, sizetype
, part
,
656 double_int_to_tree (sizetype
, addr
->elts
[i
].coef
));
657 add_to_parts (parts
, part
);
660 add_to_parts (parts
, fold_convert (sizetype
, addr
->rest
));
663 /* Force the PARTS to register. */
666 gimplify_mem_ref_parts (gimple_stmt_iterator
*gsi
, struct mem_address
*parts
)
669 parts
->base
= force_gimple_operand_gsi_1 (gsi
, parts
->base
,
670 is_gimple_mem_ref_addr
, NULL_TREE
,
671 true, GSI_SAME_STMT
);
673 parts
->index
= force_gimple_operand_gsi (gsi
, parts
->index
,
675 true, GSI_SAME_STMT
);
678 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
679 computations are emitted in front of GSI. TYPE is the mode
680 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
681 and BASE_HINT is non NULL if IV_CAND comes from a base address
685 create_mem_ref (gimple_stmt_iterator
*gsi
, tree type
, aff_tree
*addr
,
686 tree alias_ptr_type
, tree iv_cand
, tree base_hint
, bool speed
)
689 struct mem_address parts
;
691 addr_to_parts (type
, addr
, iv_cand
, base_hint
, &parts
, speed
);
692 gimplify_mem_ref_parts (gsi
, &parts
);
693 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
697 /* The expression is too complicated. Try making it simpler. */
699 if (parts
.step
&& !integer_onep (parts
.step
))
701 /* Move the multiplication to index. */
702 gcc_assert (parts
.index
);
703 parts
.index
= force_gimple_operand_gsi (gsi
,
704 fold_build2 (MULT_EXPR
, sizetype
,
705 parts
.index
, parts
.step
),
706 true, NULL_TREE
, true, GSI_SAME_STMT
);
707 parts
.step
= NULL_TREE
;
709 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
717 gcc_assert (is_gimple_val (tmp
));
719 /* Add the symbol to base, eventually forcing it to register. */
722 gcc_assert (useless_type_conversion_p
723 (sizetype
, TREE_TYPE (parts
.base
)));
727 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
728 fold_build_pointer_plus (tmp
, parts
.base
),
729 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
733 parts
.index
= parts
.base
;
739 parts
.symbol
= NULL_TREE
;
741 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
748 /* Add index to base. */
751 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
752 fold_build_pointer_plus (parts
.base
, parts
.index
),
753 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
756 parts
.base
= parts
.index
;
757 parts
.index
= NULL_TREE
;
759 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
764 if (parts
.offset
&& !integer_zerop (parts
.offset
))
766 /* Try adding offset to base. */
769 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
770 fold_build_pointer_plus (parts
.base
, parts
.offset
),
771 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
774 parts
.base
= parts
.offset
;
776 parts
.offset
= NULL_TREE
;
778 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
783 /* Verify that the address is in the simplest possible shape
784 (only a register). If we cannot create such a memory reference,
785 something is really wrong. */
786 gcc_assert (parts
.symbol
== NULL_TREE
);
787 gcc_assert (parts
.index
== NULL_TREE
);
788 gcc_assert (!parts
.step
|| integer_onep (parts
.step
));
789 gcc_assert (!parts
.offset
|| integer_zerop (parts
.offset
));
793 /* Copies components of the address from OP to ADDR. */
796 get_address_description (tree op
, struct mem_address
*addr
)
798 if (TREE_CODE (TMR_BASE (op
)) == ADDR_EXPR
)
800 addr
->symbol
= TMR_BASE (op
);
801 addr
->base
= TMR_INDEX2 (op
);
805 addr
->symbol
= NULL_TREE
;
808 gcc_assert (integer_zerop (TMR_BASE (op
)));
809 addr
->base
= TMR_INDEX2 (op
);
812 addr
->base
= TMR_BASE (op
);
814 addr
->index
= TMR_INDEX (op
);
815 addr
->step
= TMR_STEP (op
);
816 addr
->offset
= TMR_OFFSET (op
);
819 /* Copies the reference information from OLD_REF to NEW_REF, where
820 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
823 copy_ref_info (tree new_ref
, tree old_ref
)
825 tree new_ptr_base
= NULL_TREE
;
827 gcc_assert (TREE_CODE (new_ref
) == MEM_REF
828 || TREE_CODE (new_ref
) == TARGET_MEM_REF
);
830 TREE_SIDE_EFFECTS (new_ref
) = TREE_SIDE_EFFECTS (old_ref
);
831 TREE_THIS_VOLATILE (new_ref
) = TREE_THIS_VOLATILE (old_ref
);
833 new_ptr_base
= TREE_OPERAND (new_ref
, 0);
835 /* We can transfer points-to information from an old pointer
836 or decl base to the new one. */
838 && TREE_CODE (new_ptr_base
) == SSA_NAME
839 && !SSA_NAME_PTR_INFO (new_ptr_base
))
841 tree base
= get_base_address (old_ref
);
844 else if ((TREE_CODE (base
) == MEM_REF
845 || TREE_CODE (base
) == TARGET_MEM_REF
)
846 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
847 && SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0)))
849 struct ptr_info_def
*new_pi
;
850 unsigned int align
, misalign
;
852 duplicate_ssa_name_ptr_info
853 (new_ptr_base
, SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0)));
854 new_pi
= SSA_NAME_PTR_INFO (new_ptr_base
);
855 /* We have to be careful about transferring alignment information. */
856 if (get_ptr_info_alignment (new_pi
, &align
, &misalign
)
857 && TREE_CODE (old_ref
) == MEM_REF
858 && !(TREE_CODE (new_ref
) == TARGET_MEM_REF
859 && (TMR_INDEX2 (new_ref
)
860 || (TMR_STEP (new_ref
)
861 && (TREE_INT_CST_LOW (TMR_STEP (new_ref
))
864 unsigned int inc
= (mem_ref_offset (old_ref
)
865 - mem_ref_offset (new_ref
)).low
;
866 adjust_ptr_info_misalignment (new_pi
, inc
);
869 mark_ptr_info_alignment_unknown (new_pi
);
871 else if (TREE_CODE (base
) == VAR_DECL
872 || TREE_CODE (base
) == PARM_DECL
873 || TREE_CODE (base
) == RESULT_DECL
)
875 struct ptr_info_def
*pi
= get_ptr_info (new_ptr_base
);
876 pt_solution_set_var (&pi
->pt
, base
);
881 /* Move constants in target_mem_ref REF to offset. Returns the new target
882 mem ref if anything changes, NULL_TREE otherwise. */
885 maybe_fold_tmr (tree ref
)
887 struct mem_address addr
;
888 bool changed
= false;
891 get_address_description (ref
, &addr
);
894 && TREE_CODE (addr
.base
) == INTEGER_CST
895 && !integer_zerop (addr
.base
))
897 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
898 TREE_TYPE (addr
.offset
),
899 addr
.offset
, addr
.base
);
900 addr
.base
= NULL_TREE
;
905 && TREE_CODE (TREE_OPERAND (addr
.symbol
, 0)) == MEM_REF
)
907 addr
.offset
= fold_binary_to_constant
908 (PLUS_EXPR
, TREE_TYPE (addr
.offset
),
910 TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 1));
911 addr
.symbol
= TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 0);
915 && handled_component_p (TREE_OPERAND (addr
.symbol
, 0)))
917 HOST_WIDE_INT offset
;
918 addr
.symbol
= build_fold_addr_expr
919 (get_addr_base_and_unit_offset
920 (TREE_OPERAND (addr
.symbol
, 0), &offset
));
921 addr
.offset
= int_const_binop (PLUS_EXPR
,
922 addr
.offset
, size_int (offset
));
926 if (addr
.index
&& TREE_CODE (addr
.index
) == INTEGER_CST
)
931 off
= fold_binary_to_constant (MULT_EXPR
, sizetype
,
933 addr
.step
= NULL_TREE
;
936 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
937 TREE_TYPE (addr
.offset
),
939 addr
.index
= NULL_TREE
;
946 /* If we have propagated something into this TARGET_MEM_REF and thus
947 ended up folding it, always create a new TARGET_MEM_REF regardless
948 if it is valid in this for on the target - the propagation result
949 wouldn't be anyway. */
950 new_ref
= create_mem_ref_raw (TREE_TYPE (ref
),
951 TREE_TYPE (addr
.offset
), &addr
, false);
952 TREE_SIDE_EFFECTS (new_ref
) = TREE_SIDE_EFFECTS (ref
);
953 TREE_THIS_VOLATILE (new_ref
) = TREE_THIS_VOLATILE (ref
);
957 /* Dump PARTS to FILE. */
959 extern void dump_mem_address (FILE *, struct mem_address
*);
961 dump_mem_address (FILE *file
, struct mem_address
*parts
)
965 fprintf (file
, "symbol: ");
966 print_generic_expr (file
, TREE_OPERAND (parts
->symbol
, 0), TDF_SLIM
);
967 fprintf (file
, "\n");
971 fprintf (file
, "base: ");
972 print_generic_expr (file
, parts
->base
, TDF_SLIM
);
973 fprintf (file
, "\n");
977 fprintf (file
, "index: ");
978 print_generic_expr (file
, parts
->index
, TDF_SLIM
);
979 fprintf (file
, "\n");
983 fprintf (file
, "step: ");
984 print_generic_expr (file
, parts
->step
, TDF_SLIM
);
985 fprintf (file
, "\n");
989 fprintf (file
, "offset: ");
990 print_generic_expr (file
, parts
->offset
, TDF_SLIM
);
991 fprintf (file
, "\n");
995 #include "gt-tree-ssa-address.h"