1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
22 that directly map to addressing modes of the target. */
26 #include "coretypes.h"
30 #include "basic-block.h"
31 #include "tree-pretty-print.h"
32 #include "tree-flow.h"
35 #include "tree-inline.h"
36 #include "tree-affine.h"
38 /* FIXME: We compute address costs using RTL. */
39 #include "insn-config.h"
47 /* TODO -- handling of symbols (according to Richard Hendersons
48 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
50 There are at least 5 different kinds of symbols that we can run up against:
52 (1) binds_local_p, small data area.
53 (2) binds_local_p, eg local statics
54 (3) !binds_local_p, eg global variables
55 (4) thread local, local_exec
56 (5) thread local, !local_exec
58 Now, (1) won't appear often in an array context, but it certainly can.
59 All you have to do is set -GN high enough, or explicitly mark any
60 random object __attribute__((section (".sdata"))).
62 All of these affect whether or not a symbol is in fact a valid address.
63 The only one tested here is (3). And that result may very well
64 be incorrect for (4) or (5).
66 An incorrect result here does not cause incorrect results out the
67 back end, because the expander in expr.c validizes the address. However
68 it would be nice to improve the handling here in order to produce more
71 /* A "template" for memory address, used to determine whether the address is
74 typedef struct GTY (()) mem_addr_template
{
75 rtx ref
; /* The template. */
76 rtx
* GTY ((skip
)) step_p
; /* The point in template where the step should be
78 rtx
* GTY ((skip
)) off_p
; /* The point in template where the offset should
82 DEF_VEC_O (mem_addr_template
);
83 DEF_VEC_ALLOC_O (mem_addr_template
, gc
);
85 /* The templates. Each of the low five bits of the index corresponds to one
86 component of TARGET_MEM_REF being present, while the high bits identify
87 the address space. See TEMPL_IDX. */
89 static GTY(()) VEC (mem_addr_template
, gc
) *mem_addr_template_list
;
91 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
93 | ((SYMBOL != 0) << 4) \
94 | ((BASE != 0) << 3) \
95 | ((INDEX != 0) << 2) \
96 | ((STEP != 0) << 1) \
99 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
100 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
101 to where step is placed to *STEP_P and offset to *OFFSET_P. */
104 gen_addr_rtx (enum machine_mode address_mode
,
105 rtx symbol
, rtx base
, rtx index
, rtx step
, rtx offset
,
106 rtx
*addr
, rtx
**step_p
, rtx
**offset_p
)
121 act_elem
= gen_rtx_MULT (address_mode
, act_elem
, step
);
124 *step_p
= &XEXP (act_elem
, 1);
130 if (base
&& base
!= const0_rtx
)
133 *addr
= simplify_gen_binary (PLUS
, address_mode
, base
, *addr
);
143 act_elem
= gen_rtx_PLUS (address_mode
, act_elem
, offset
);
146 *offset_p
= &XEXP (act_elem
, 1);
148 if (GET_CODE (symbol
) == SYMBOL_REF
149 || GET_CODE (symbol
) == LABEL_REF
150 || GET_CODE (symbol
) == CONST
)
151 act_elem
= gen_rtx_CONST (address_mode
, act_elem
);
155 *addr
= gen_rtx_PLUS (address_mode
, *addr
, act_elem
);
163 *addr
= gen_rtx_PLUS (address_mode
, *addr
, offset
);
165 *offset_p
= &XEXP (*addr
, 1);
179 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
181 If REALLY_EXPAND is false, just make fake registers instead
182 of really expanding the operands, and perform the expansion in-place
183 by using one of the "templates". */
186 addr_for_mem_ref (struct mem_address
*addr
, addr_space_t as
,
189 enum machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
190 enum machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
191 rtx address
, sym
, bse
, idx
, st
, off
;
192 struct mem_addr_template
*templ
;
194 if (addr
->step
&& !integer_onep (addr
->step
))
195 st
= immed_double_int_const (tree_to_double_int (addr
->step
), pointer_mode
);
199 if (addr
->offset
&& !integer_zerop (addr
->offset
))
200 off
= immed_double_int_const
201 (tree_to_double_int (addr
->offset
)
202 .sext (TYPE_PRECISION (TREE_TYPE (addr
->offset
))),
209 unsigned int templ_index
210 = TEMPL_IDX (as
, addr
->symbol
, addr
->base
, addr
->index
, st
, off
);
213 >= VEC_length (mem_addr_template
, mem_addr_template_list
))
214 VEC_safe_grow_cleared (mem_addr_template
, gc
, mem_addr_template_list
,
217 /* Reuse the templates for addresses, so that we do not waste memory. */
218 templ
= &VEC_index (mem_addr_template
, mem_addr_template_list
,
222 sym
= (addr
->symbol
?
223 gen_rtx_SYMBOL_REF (pointer_mode
, ggc_strdup ("test_symbol"))
226 gen_raw_REG (pointer_mode
, LAST_VIRTUAL_REGISTER
+ 1)
229 gen_raw_REG (pointer_mode
, LAST_VIRTUAL_REGISTER
+ 2)
232 gen_addr_rtx (pointer_mode
, sym
, bse
, idx
,
233 st
? const0_rtx
: NULL_RTX
,
234 off
? const0_rtx
: NULL_RTX
,
248 /* Otherwise really expand the expressions. */
250 ? expand_expr (addr
->symbol
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
253 ? expand_expr (addr
->base
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
256 ? expand_expr (addr
->index
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
259 gen_addr_rtx (pointer_mode
, sym
, bse
, idx
, st
, off
, &address
, NULL
, NULL
);
260 if (pointer_mode
!= address_mode
)
261 address
= convert_memory_address (address_mode
, address
);
265 /* Returns address of MEM_REF in TYPE. */
268 tree_mem_ref_addr (tree type
, tree mem_ref
)
272 tree step
= TMR_STEP (mem_ref
), offset
= TMR_OFFSET (mem_ref
);
273 tree addr_base
= NULL_TREE
, addr_off
= NULL_TREE
;
275 addr_base
= fold_convert (type
, TMR_BASE (mem_ref
));
277 act_elem
= TMR_INDEX (mem_ref
);
281 act_elem
= fold_build2 (MULT_EXPR
, TREE_TYPE (act_elem
),
286 act_elem
= TMR_INDEX2 (mem_ref
);
290 addr_off
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr_off
),
296 if (offset
&& !integer_zerop (offset
))
299 addr_off
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr_off
), addr_off
,
300 fold_convert (TREE_TYPE (addr_off
), offset
));
306 addr
= fold_build_pointer_plus (addr_base
, addr_off
);
313 /* Returns true if a memory reference in MODE and with parameters given by
314 ADDR is valid on the current target. */
317 valid_mem_ref_p (enum machine_mode mode
, addr_space_t as
,
318 struct mem_address
*addr
)
322 address
= addr_for_mem_ref (addr
, as
, false);
326 return memory_address_addr_space_p (mode
, address
, as
);
329 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
330 is valid on the current target and if so, creates and returns the
331 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
334 create_mem_ref_raw (tree type
, tree alias_ptr_type
, struct mem_address
*addr
,
340 && !valid_mem_ref_p (TYPE_MODE (type
), TYPE_ADDR_SPACE (type
), addr
))
343 if (addr
->step
&& integer_onep (addr
->step
))
344 addr
->step
= NULL_TREE
;
347 addr
->offset
= fold_convert (alias_ptr_type
, addr
->offset
);
349 addr
->offset
= build_int_cst (alias_ptr_type
, 0);
357 && POINTER_TYPE_P (TREE_TYPE (addr
->base
)))
364 base
= build_int_cst (ptr_type_node
, 0);
368 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
369 ??? As IVOPTs does not follow restrictions to where the base
370 pointer may point to create a MEM_REF only if we know that
372 if ((TREE_CODE (base
) == ADDR_EXPR
|| TREE_CODE (base
) == INTEGER_CST
)
373 && (!index2
|| integer_zerop (index2
))
374 && (!addr
->index
|| integer_zerop (addr
->index
)))
375 return fold_build2 (MEM_REF
, type
, base
, addr
->offset
);
377 return build5 (TARGET_MEM_REF
, type
,
378 base
, addr
->offset
, addr
->index
, addr
->step
, index2
);
381 /* Returns true if OBJ is an object whose address is a link time constant. */
384 fixed_address_object_p (tree obj
)
386 return (TREE_CODE (obj
) == VAR_DECL
387 && (TREE_STATIC (obj
)
388 || DECL_EXTERNAL (obj
))
389 && ! DECL_DLLIMPORT_P (obj
));
392 /* If ADDR contains an address of object that is a link time constant,
393 move it to PARTS->symbol. */
396 move_fixed_address_to_symbol (struct mem_address
*parts
, aff_tree
*addr
)
399 tree val
= NULL_TREE
;
401 for (i
= 0; i
< addr
->n
; i
++)
403 if (!addr
->elts
[i
].coef
.is_one ())
406 val
= addr
->elts
[i
].val
;
407 if (TREE_CODE (val
) == ADDR_EXPR
408 && fixed_address_object_p (TREE_OPERAND (val
, 0)))
416 aff_combination_remove_elt (addr
, i
);
419 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
422 move_hint_to_base (tree type
, struct mem_address
*parts
, tree base_hint
,
426 tree val
= NULL_TREE
;
429 for (i
= 0; i
< addr
->n
; i
++)
431 if (!addr
->elts
[i
].coef
.is_one ())
434 val
= addr
->elts
[i
].val
;
435 if (operand_equal_p (val
, base_hint
, 0))
442 /* Cast value to appropriate pointer type. We cannot use a pointer
443 to TYPE directly, as the back-end will assume registers of pointer
444 type are aligned, and just the base itself may not actually be.
445 We use void pointer to the type's address space instead. */
446 qual
= ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type
));
447 type
= build_qualified_type (void_type_node
, qual
);
448 parts
->base
= fold_convert (build_pointer_type (type
), val
);
449 aff_combination_remove_elt (addr
, i
);
452 /* If ADDR contains an address of a dereferenced pointer, move it to
456 move_pointer_to_base (struct mem_address
*parts
, aff_tree
*addr
)
459 tree val
= NULL_TREE
;
461 for (i
= 0; i
< addr
->n
; i
++)
463 if (!addr
->elts
[i
].coef
.is_one ())
466 val
= addr
->elts
[i
].val
;
467 if (POINTER_TYPE_P (TREE_TYPE (val
)))
475 aff_combination_remove_elt (addr
, i
);
478 /* Moves the loop variant part V in linear address ADDR to be the index
482 move_variant_to_index (struct mem_address
*parts
, aff_tree
*addr
, tree v
)
485 tree val
= NULL_TREE
;
487 gcc_assert (!parts
->index
);
488 for (i
= 0; i
< addr
->n
; i
++)
490 val
= addr
->elts
[i
].val
;
491 if (operand_equal_p (val
, v
, 0))
498 parts
->index
= fold_convert (sizetype
, val
);
499 parts
->step
= double_int_to_tree (sizetype
, addr
->elts
[i
].coef
);
500 aff_combination_remove_elt (addr
, i
);
503 /* Adds ELT to PARTS. */
506 add_to_parts (struct mem_address
*parts
, tree elt
)
512 parts
->index
= fold_convert (sizetype
, elt
);
522 /* Add ELT to base. */
523 type
= TREE_TYPE (parts
->base
);
524 if (POINTER_TYPE_P (type
))
525 parts
->base
= fold_build_pointer_plus (parts
->base
, elt
);
527 parts
->base
= fold_build2 (PLUS_EXPR
, type
,
531 /* Finds the most expensive multiplication in ADDR that can be
532 expressed in an addressing mode and move the corresponding
533 element(s) to PARTS. */
536 most_expensive_mult_to_index (tree type
, struct mem_address
*parts
,
537 aff_tree
*addr
, bool speed
)
539 addr_space_t as
= TYPE_ADDR_SPACE (type
);
540 enum machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
542 double_int best_mult
, amult
, amult_neg
;
543 unsigned best_mult_cost
= 0, acost
;
544 tree mult_elt
= NULL_TREE
, elt
;
546 enum tree_code op_code
;
548 best_mult
= double_int_zero
;
549 for (i
= 0; i
< addr
->n
; i
++)
551 if (!addr
->elts
[i
].coef
.fits_shwi ())
554 coef
= addr
->elts
[i
].coef
.to_shwi ();
556 || !multiplier_allowed_in_address_p (coef
, TYPE_MODE (type
), as
))
559 acost
= mult_by_coeff_cost (coef
, address_mode
, speed
);
561 if (acost
> best_mult_cost
)
563 best_mult_cost
= acost
;
564 best_mult
= addr
->elts
[i
].coef
;
571 /* Collect elements multiplied by best_mult. */
572 for (i
= j
= 0; i
< addr
->n
; i
++)
574 amult
= addr
->elts
[i
].coef
;
575 amult_neg
= double_int_ext_for_comb (-amult
, addr
);
577 if (amult
== best_mult
)
579 else if (amult_neg
== best_mult
)
580 op_code
= MINUS_EXPR
;
583 addr
->elts
[j
] = addr
->elts
[i
];
588 elt
= fold_convert (sizetype
, addr
->elts
[i
].val
);
590 mult_elt
= fold_build2 (op_code
, sizetype
, mult_elt
, elt
);
591 else if (op_code
== PLUS_EXPR
)
594 mult_elt
= fold_build1 (NEGATE_EXPR
, sizetype
, elt
);
598 parts
->index
= mult_elt
;
599 parts
->step
= double_int_to_tree (sizetype
, best_mult
);
602 /* Splits address ADDR for a memory access of type TYPE into PARTS.
603 If BASE_HINT is non-NULL, it specifies an SSA name to be used
604 preferentially as base of the reference, and IV_CAND is the selected
605 iv candidate used in ADDR.
607 TODO -- be more clever about the distribution of the elements of ADDR
608 to PARTS. Some architectures do not support anything but single
609 register in address, possibly with a small integer offset; while
610 create_mem_ref will simplify the address to an acceptable shape
611 later, it would be more efficient to know that asking for complicated
612 addressing modes is useless. */
615 addr_to_parts (tree type
, aff_tree
*addr
, tree iv_cand
,
616 tree base_hint
, struct mem_address
*parts
,
622 parts
->symbol
= NULL_TREE
;
623 parts
->base
= NULL_TREE
;
624 parts
->index
= NULL_TREE
;
625 parts
->step
= NULL_TREE
;
627 if (!addr
->offset
.is_zero ())
628 parts
->offset
= double_int_to_tree (sizetype
, addr
->offset
);
630 parts
->offset
= NULL_TREE
;
632 /* Try to find a symbol. */
633 move_fixed_address_to_symbol (parts
, addr
);
635 /* No need to do address parts reassociation if the number of parts
636 is <= 2 -- in that case, no loop invariant code motion can be
639 if (!base_hint
&& (addr
->n
> 2))
640 move_variant_to_index (parts
, addr
, iv_cand
);
642 /* First move the most expensive feasible multiplication
645 most_expensive_mult_to_index (type
, parts
, addr
, speed
);
647 /* Try to find a base of the reference. Since at the moment
648 there is no reliable way how to distinguish between pointer and its
649 offset, this is just a guess. */
650 if (!parts
->symbol
&& base_hint
)
651 move_hint_to_base (type
, parts
, base_hint
, addr
);
652 if (!parts
->symbol
&& !parts
->base
)
653 move_pointer_to_base (parts
, addr
);
655 /* Then try to process the remaining elements. */
656 for (i
= 0; i
< addr
->n
; i
++)
658 part
= fold_convert (sizetype
, addr
->elts
[i
].val
);
659 if (!addr
->elts
[i
].coef
.is_one ())
660 part
= fold_build2 (MULT_EXPR
, sizetype
, part
,
661 double_int_to_tree (sizetype
, addr
->elts
[i
].coef
));
662 add_to_parts (parts
, part
);
665 add_to_parts (parts
, fold_convert (sizetype
, addr
->rest
));
668 /* Force the PARTS to register. */
671 gimplify_mem_ref_parts (gimple_stmt_iterator
*gsi
, struct mem_address
*parts
)
674 parts
->base
= force_gimple_operand_gsi_1 (gsi
, parts
->base
,
675 is_gimple_mem_ref_addr
, NULL_TREE
,
676 true, GSI_SAME_STMT
);
678 parts
->index
= force_gimple_operand_gsi (gsi
, parts
->index
,
680 true, GSI_SAME_STMT
);
683 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
684 computations are emitted in front of GSI. TYPE is the mode
685 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
686 and BASE_HINT is non NULL if IV_CAND comes from a base address
690 create_mem_ref (gimple_stmt_iterator
*gsi
, tree type
, aff_tree
*addr
,
691 tree alias_ptr_type
, tree iv_cand
, tree base_hint
, bool speed
)
694 struct mem_address parts
;
696 addr_to_parts (type
, addr
, iv_cand
, base_hint
, &parts
, speed
);
697 gimplify_mem_ref_parts (gsi
, &parts
);
698 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
702 /* The expression is too complicated. Try making it simpler. */
704 if (parts
.step
&& !integer_onep (parts
.step
))
706 /* Move the multiplication to index. */
707 gcc_assert (parts
.index
);
708 parts
.index
= force_gimple_operand_gsi (gsi
,
709 fold_build2 (MULT_EXPR
, sizetype
,
710 parts
.index
, parts
.step
),
711 true, NULL_TREE
, true, GSI_SAME_STMT
);
712 parts
.step
= NULL_TREE
;
714 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
722 gcc_assert (is_gimple_val (tmp
));
724 /* Add the symbol to base, eventually forcing it to register. */
727 gcc_assert (useless_type_conversion_p
728 (sizetype
, TREE_TYPE (parts
.base
)));
732 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
733 fold_build_pointer_plus (tmp
, parts
.base
),
734 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
738 parts
.index
= parts
.base
;
744 parts
.symbol
= NULL_TREE
;
746 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
753 /* Add index to base. */
756 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
757 fold_build_pointer_plus (parts
.base
, parts
.index
),
758 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
761 parts
.base
= parts
.index
;
762 parts
.index
= NULL_TREE
;
764 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
769 if (parts
.offset
&& !integer_zerop (parts
.offset
))
771 /* Try adding offset to base. */
774 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
775 fold_build_pointer_plus (parts
.base
, parts
.offset
),
776 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
779 parts
.base
= parts
.offset
;
781 parts
.offset
= NULL_TREE
;
783 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
788 /* Verify that the address is in the simplest possible shape
789 (only a register). If we cannot create such a memory reference,
790 something is really wrong. */
791 gcc_assert (parts
.symbol
== NULL_TREE
);
792 gcc_assert (parts
.index
== NULL_TREE
);
793 gcc_assert (!parts
.step
|| integer_onep (parts
.step
));
794 gcc_assert (!parts
.offset
|| integer_zerop (parts
.offset
));
798 /* Copies components of the address from OP to ADDR. */
801 get_address_description (tree op
, struct mem_address
*addr
)
803 if (TREE_CODE (TMR_BASE (op
)) == ADDR_EXPR
)
805 addr
->symbol
= TMR_BASE (op
);
806 addr
->base
= TMR_INDEX2 (op
);
810 addr
->symbol
= NULL_TREE
;
813 gcc_assert (integer_zerop (TMR_BASE (op
)));
814 addr
->base
= TMR_INDEX2 (op
);
817 addr
->base
= TMR_BASE (op
);
819 addr
->index
= TMR_INDEX (op
);
820 addr
->step
= TMR_STEP (op
);
821 addr
->offset
= TMR_OFFSET (op
);
824 /* Copies the reference information from OLD_REF to NEW_REF, where
825 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
828 copy_ref_info (tree new_ref
, tree old_ref
)
830 tree new_ptr_base
= NULL_TREE
;
832 gcc_assert (TREE_CODE (new_ref
) == MEM_REF
833 || TREE_CODE (new_ref
) == TARGET_MEM_REF
);
835 TREE_SIDE_EFFECTS (new_ref
) = TREE_SIDE_EFFECTS (old_ref
);
836 TREE_THIS_VOLATILE (new_ref
) = TREE_THIS_VOLATILE (old_ref
);
838 new_ptr_base
= TREE_OPERAND (new_ref
, 0);
840 /* We can transfer points-to information from an old pointer
841 or decl base to the new one. */
843 && TREE_CODE (new_ptr_base
) == SSA_NAME
844 && !SSA_NAME_PTR_INFO (new_ptr_base
))
846 tree base
= get_base_address (old_ref
);
849 else if ((TREE_CODE (base
) == MEM_REF
850 || TREE_CODE (base
) == TARGET_MEM_REF
)
851 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
852 && SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0)))
854 struct ptr_info_def
*new_pi
;
855 unsigned int align
, misalign
;
857 duplicate_ssa_name_ptr_info
858 (new_ptr_base
, SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0)));
859 new_pi
= SSA_NAME_PTR_INFO (new_ptr_base
);
860 /* We have to be careful about transferring alignment information. */
861 if (get_ptr_info_alignment (new_pi
, &align
, &misalign
)
862 && TREE_CODE (old_ref
) == MEM_REF
863 && !(TREE_CODE (new_ref
) == TARGET_MEM_REF
864 && (TMR_INDEX2 (new_ref
)
865 || (TMR_STEP (new_ref
)
866 && (TREE_INT_CST_LOW (TMR_STEP (new_ref
))
869 unsigned int inc
= (mem_ref_offset (old_ref
)
870 - mem_ref_offset (new_ref
)).low
;
871 adjust_ptr_info_misalignment (new_pi
, inc
);
874 mark_ptr_info_alignment_unknown (new_pi
);
876 else if (TREE_CODE (base
) == VAR_DECL
877 || TREE_CODE (base
) == PARM_DECL
878 || TREE_CODE (base
) == RESULT_DECL
)
880 struct ptr_info_def
*pi
= get_ptr_info (new_ptr_base
);
881 pt_solution_set_var (&pi
->pt
, base
);
886 /* Move constants in target_mem_ref REF to offset. Returns the new target
887 mem ref if anything changes, NULL_TREE otherwise. */
890 maybe_fold_tmr (tree ref
)
892 struct mem_address addr
;
893 bool changed
= false;
896 get_address_description (ref
, &addr
);
899 && TREE_CODE (addr
.base
) == INTEGER_CST
900 && !integer_zerop (addr
.base
))
902 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
903 TREE_TYPE (addr
.offset
),
904 addr
.offset
, addr
.base
);
905 addr
.base
= NULL_TREE
;
910 && TREE_CODE (TREE_OPERAND (addr
.symbol
, 0)) == MEM_REF
)
912 addr
.offset
= fold_binary_to_constant
913 (PLUS_EXPR
, TREE_TYPE (addr
.offset
),
915 TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 1));
916 addr
.symbol
= TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 0);
920 && handled_component_p (TREE_OPERAND (addr
.symbol
, 0)))
922 HOST_WIDE_INT offset
;
923 addr
.symbol
= build_fold_addr_expr
924 (get_addr_base_and_unit_offset
925 (TREE_OPERAND (addr
.symbol
, 0), &offset
));
926 addr
.offset
= int_const_binop (PLUS_EXPR
,
927 addr
.offset
, size_int (offset
));
931 if (addr
.index
&& TREE_CODE (addr
.index
) == INTEGER_CST
)
936 off
= fold_binary_to_constant (MULT_EXPR
, sizetype
,
938 addr
.step
= NULL_TREE
;
941 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
942 TREE_TYPE (addr
.offset
),
944 addr
.index
= NULL_TREE
;
951 /* If we have propagated something into this TARGET_MEM_REF and thus
952 ended up folding it, always create a new TARGET_MEM_REF regardless
953 if it is valid in this for on the target - the propagation result
954 wouldn't be anyway. */
955 new_ref
= create_mem_ref_raw (TREE_TYPE (ref
),
956 TREE_TYPE (addr
.offset
), &addr
, false);
957 TREE_SIDE_EFFECTS (new_ref
) = TREE_SIDE_EFFECTS (ref
);
958 TREE_THIS_VOLATILE (new_ref
) = TREE_THIS_VOLATILE (ref
);
962 /* Dump PARTS to FILE. */
964 extern void dump_mem_address (FILE *, struct mem_address
*);
966 dump_mem_address (FILE *file
, struct mem_address
*parts
)
970 fprintf (file
, "symbol: ");
971 print_generic_expr (file
, TREE_OPERAND (parts
->symbol
, 0), TDF_SLIM
);
972 fprintf (file
, "\n");
976 fprintf (file
, "base: ");
977 print_generic_expr (file
, parts
->base
, TDF_SLIM
);
978 fprintf (file
, "\n");
982 fprintf (file
, "index: ");
983 print_generic_expr (file
, parts
->index
, TDF_SLIM
);
984 fprintf (file
, "\n");
988 fprintf (file
, "step: ");
989 print_generic_expr (file
, parts
->step
, TDF_SLIM
);
990 fprintf (file
, "\n");
994 fprintf (file
, "offset: ");
995 print_generic_expr (file
, parts
->offset
, TDF_SLIM
);
996 fprintf (file
, "\n");
1000 #include "gt-tree-ssa-address.h"