1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
25 #include "coretypes.h"
30 #include "double-int.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
41 #include "hard-reg-set.h"
43 #include "basic-block.h"
44 #include "tree-pretty-print.h"
45 #include "tree-ssa-alias.h"
46 #include "internal-fn.h"
47 #include "gimple-expr.h"
50 #include "gimple-iterator.h"
51 #include "gimplify-me.h"
52 #include "stringpool.h"
53 #include "tree-ssanames.h"
54 #include "tree-ssa-loop-ivopts.h"
58 #include "statistics.h"
60 #include "fixed-value.h"
61 #include "insn-config.h"
72 #include "tree-inline.h"
73 #include "tree-affine.h"
75 /* FIXME: We compute address costs using RTL. */
78 #include "tree-ssa-address.h"
80 /* TODO -- handling of symbols (according to Richard Hendersons
81 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
83 There are at least 5 different kinds of symbols that we can run up against:
85 (1) binds_local_p, small data area.
86 (2) binds_local_p, eg local statics
87 (3) !binds_local_p, eg global variables
88 (4) thread local, local_exec
89 (5) thread local, !local_exec
91 Now, (1) won't appear often in an array context, but it certainly can.
92 All you have to do is set -GN high enough, or explicitly mark any
93 random object __attribute__((section (".sdata"))).
95 All of these affect whether or not a symbol is in fact a valid address.
96 The only one tested here is (3). And that result may very well
97 be incorrect for (4) or (5).
99 An incorrect result here does not cause incorrect results out the
100 back end, because the expander in expr.c validizes the address. However
101 it would be nice to improve the handling here in order to produce more
104 /* A "template" for memory address, used to determine whether the address is
107 typedef struct GTY (()) mem_addr_template
{
108 rtx ref
; /* The template. */
109 rtx
* GTY ((skip
)) step_p
; /* The point in template where the step should be
111 rtx
* GTY ((skip
)) off_p
; /* The point in template where the offset should
116 /* The templates. Each of the low five bits of the index corresponds to one
117 component of TARGET_MEM_REF being present, while the high bits identify
118 the address space. See TEMPL_IDX. */
120 static GTY(()) vec
<mem_addr_template
, va_gc
> *mem_addr_template_list
;
122 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
124 | ((SYMBOL != 0) << 4) \
125 | ((BASE != 0) << 3) \
126 | ((INDEX != 0) << 2) \
127 | ((STEP != 0) << 1) \
130 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
131 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
132 to where step is placed to *STEP_P and offset to *OFFSET_P. */
135 gen_addr_rtx (machine_mode address_mode
,
136 rtx symbol
, rtx base
, rtx index
, rtx step
, rtx offset
,
137 rtx
*addr
, rtx
**step_p
, rtx
**offset_p
)
152 act_elem
= gen_rtx_MULT (address_mode
, act_elem
, step
);
155 *step_p
= &XEXP (act_elem
, 1);
161 if (base
&& base
!= const0_rtx
)
164 *addr
= simplify_gen_binary (PLUS
, address_mode
, base
, *addr
);
174 act_elem
= gen_rtx_PLUS (address_mode
, act_elem
, offset
);
177 *offset_p
= &XEXP (act_elem
, 1);
179 if (GET_CODE (symbol
) == SYMBOL_REF
180 || GET_CODE (symbol
) == LABEL_REF
181 || GET_CODE (symbol
) == CONST
)
182 act_elem
= gen_rtx_CONST (address_mode
, act_elem
);
186 *addr
= gen_rtx_PLUS (address_mode
, *addr
, act_elem
);
194 *addr
= gen_rtx_PLUS (address_mode
, *addr
, offset
);
196 *offset_p
= &XEXP (*addr
, 1);
210 /* Description of a memory address. */
214 tree symbol
, base
, index
, step
, offset
;
217 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
219 If REALLY_EXPAND is false, just make fake registers instead
220 of really expanding the operands, and perform the expansion in-place
221 by using one of the "templates". */
224 addr_for_mem_ref (struct mem_address
*addr
, addr_space_t as
,
227 machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
228 machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
229 rtx address
, sym
, bse
, idx
, st
, off
;
230 struct mem_addr_template
*templ
;
232 if (addr
->step
&& !integer_onep (addr
->step
))
233 st
= immed_wide_int_const (addr
->step
, pointer_mode
);
237 if (addr
->offset
&& !integer_zerop (addr
->offset
))
239 offset_int dc
= offset_int::from (addr
->offset
, SIGNED
);
240 off
= immed_wide_int_const (dc
, pointer_mode
);
247 unsigned int templ_index
248 = TEMPL_IDX (as
, addr
->symbol
, addr
->base
, addr
->index
, st
, off
);
250 if (templ_index
>= vec_safe_length (mem_addr_template_list
))
251 vec_safe_grow_cleared (mem_addr_template_list
, templ_index
+ 1);
253 /* Reuse the templates for addresses, so that we do not waste memory. */
254 templ
= &(*mem_addr_template_list
)[templ_index
];
257 sym
= (addr
->symbol
?
258 gen_rtx_SYMBOL_REF (pointer_mode
, ggc_strdup ("test_symbol"))
261 gen_raw_REG (pointer_mode
, LAST_VIRTUAL_REGISTER
+ 1)
264 gen_raw_REG (pointer_mode
, LAST_VIRTUAL_REGISTER
+ 2)
267 gen_addr_rtx (pointer_mode
, sym
, bse
, idx
,
268 st
? const0_rtx
: NULL_RTX
,
269 off
? const0_rtx
: NULL_RTX
,
283 /* Otherwise really expand the expressions. */
285 ? expand_expr (addr
->symbol
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
288 ? expand_expr (addr
->base
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
291 ? expand_expr (addr
->index
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
294 gen_addr_rtx (pointer_mode
, sym
, bse
, idx
, st
, off
, &address
, NULL
, NULL
);
295 if (pointer_mode
!= address_mode
)
296 address
= convert_memory_address (address_mode
, address
);
300 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
301 the mem_address structure. */
304 addr_for_mem_ref (tree exp
, addr_space_t as
, bool really_expand
)
306 struct mem_address addr
;
307 get_address_description (exp
, &addr
);
308 return addr_for_mem_ref (&addr
, as
, really_expand
);
311 /* Returns address of MEM_REF in TYPE. */
314 tree_mem_ref_addr (tree type
, tree mem_ref
)
318 tree step
= TMR_STEP (mem_ref
), offset
= TMR_OFFSET (mem_ref
);
319 tree addr_base
= NULL_TREE
, addr_off
= NULL_TREE
;
321 addr_base
= fold_convert (type
, TMR_BASE (mem_ref
));
323 act_elem
= TMR_INDEX (mem_ref
);
327 act_elem
= fold_build2 (MULT_EXPR
, TREE_TYPE (act_elem
),
332 act_elem
= TMR_INDEX2 (mem_ref
);
336 addr_off
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr_off
),
342 if (offset
&& !integer_zerop (offset
))
345 addr_off
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr_off
), addr_off
,
346 fold_convert (TREE_TYPE (addr_off
), offset
));
352 addr
= fold_build_pointer_plus (addr_base
, addr_off
);
359 /* Returns true if a memory reference in MODE and with parameters given by
360 ADDR is valid on the current target. */
363 valid_mem_ref_p (machine_mode mode
, addr_space_t as
,
364 struct mem_address
*addr
)
368 address
= addr_for_mem_ref (addr
, as
, false);
372 return memory_address_addr_space_p (mode
, address
, as
);
375 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
376 is valid on the current target and if so, creates and returns the
377 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
380 create_mem_ref_raw (tree type
, tree alias_ptr_type
, struct mem_address
*addr
,
386 && !valid_mem_ref_p (TYPE_MODE (type
), TYPE_ADDR_SPACE (type
), addr
))
389 if (addr
->step
&& integer_onep (addr
->step
))
390 addr
->step
= NULL_TREE
;
393 addr
->offset
= fold_convert (alias_ptr_type
, addr
->offset
);
395 addr
->offset
= build_int_cst (alias_ptr_type
, 0);
403 && POINTER_TYPE_P (TREE_TYPE (addr
->base
)))
410 base
= build_int_cst (ptr_type_node
, 0);
414 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
415 ??? As IVOPTs does not follow restrictions to where the base
416 pointer may point to create a MEM_REF only if we know that
418 if ((TREE_CODE (base
) == ADDR_EXPR
|| TREE_CODE (base
) == INTEGER_CST
)
419 && (!index2
|| integer_zerop (index2
))
420 && (!addr
->index
|| integer_zerop (addr
->index
)))
421 return fold_build2 (MEM_REF
, type
, base
, addr
->offset
);
423 return build5 (TARGET_MEM_REF
, type
,
424 base
, addr
->offset
, addr
->index
, addr
->step
, index2
);
427 /* Returns true if OBJ is an object whose address is a link time constant. */
430 fixed_address_object_p (tree obj
)
432 return (TREE_CODE (obj
) == VAR_DECL
433 && (TREE_STATIC (obj
)
434 || DECL_EXTERNAL (obj
))
435 && ! DECL_DLLIMPORT_P (obj
));
438 /* If ADDR contains an address of object that is a link time constant,
439 move it to PARTS->symbol. */
442 move_fixed_address_to_symbol (struct mem_address
*parts
, aff_tree
*addr
)
445 tree val
= NULL_TREE
;
447 for (i
= 0; i
< addr
->n
; i
++)
449 if (addr
->elts
[i
].coef
!= 1)
452 val
= addr
->elts
[i
].val
;
453 if (TREE_CODE (val
) == ADDR_EXPR
454 && fixed_address_object_p (TREE_OPERAND (val
, 0)))
462 aff_combination_remove_elt (addr
, i
);
465 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
468 move_hint_to_base (tree type
, struct mem_address
*parts
, tree base_hint
,
472 tree val
= NULL_TREE
;
475 for (i
= 0; i
< addr
->n
; i
++)
477 if (addr
->elts
[i
].coef
!= 1)
480 val
= addr
->elts
[i
].val
;
481 if (operand_equal_p (val
, base_hint
, 0))
488 /* Cast value to appropriate pointer type. We cannot use a pointer
489 to TYPE directly, as the back-end will assume registers of pointer
490 type are aligned, and just the base itself may not actually be.
491 We use void pointer to the type's address space instead. */
492 qual
= ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type
));
493 type
= build_qualified_type (void_type_node
, qual
);
494 parts
->base
= fold_convert (build_pointer_type (type
), val
);
495 aff_combination_remove_elt (addr
, i
);
498 /* If ADDR contains an address of a dereferenced pointer, move it to
502 move_pointer_to_base (struct mem_address
*parts
, aff_tree
*addr
)
505 tree val
= NULL_TREE
;
507 for (i
= 0; i
< addr
->n
; i
++)
509 if (addr
->elts
[i
].coef
!= 1)
512 val
= addr
->elts
[i
].val
;
513 if (POINTER_TYPE_P (TREE_TYPE (val
)))
521 aff_combination_remove_elt (addr
, i
);
524 /* Moves the loop variant part V in linear address ADDR to be the index
528 move_variant_to_index (struct mem_address
*parts
, aff_tree
*addr
, tree v
)
531 tree val
= NULL_TREE
;
533 gcc_assert (!parts
->index
);
534 for (i
= 0; i
< addr
->n
; i
++)
536 val
= addr
->elts
[i
].val
;
537 if (operand_equal_p (val
, v
, 0))
544 parts
->index
= fold_convert (sizetype
, val
);
545 parts
->step
= wide_int_to_tree (sizetype
, addr
->elts
[i
].coef
);
546 aff_combination_remove_elt (addr
, i
);
549 /* Adds ELT to PARTS. */
552 add_to_parts (struct mem_address
*parts
, tree elt
)
558 parts
->index
= fold_convert (sizetype
, elt
);
568 /* Add ELT to base. */
569 type
= TREE_TYPE (parts
->base
);
570 if (POINTER_TYPE_P (type
))
571 parts
->base
= fold_build_pointer_plus (parts
->base
, elt
);
573 parts
->base
= fold_build2 (PLUS_EXPR
, type
,
577 /* Finds the most expensive multiplication in ADDR that can be
578 expressed in an addressing mode and move the corresponding
579 element(s) to PARTS. */
582 most_expensive_mult_to_index (tree type
, struct mem_address
*parts
,
583 aff_tree
*addr
, bool speed
)
585 addr_space_t as
= TYPE_ADDR_SPACE (type
);
586 machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
588 unsigned best_mult_cost
= 0, acost
;
589 tree mult_elt
= NULL_TREE
, elt
;
591 enum tree_code op_code
;
593 offset_int best_mult
= 0;
594 for (i
= 0; i
< addr
->n
; i
++)
596 if (!wi::fits_shwi_p (addr
->elts
[i
].coef
))
599 coef
= addr
->elts
[i
].coef
.to_shwi ();
601 || !multiplier_allowed_in_address_p (coef
, TYPE_MODE (type
), as
))
604 acost
= mult_by_coeff_cost (coef
, address_mode
, speed
);
606 if (acost
> best_mult_cost
)
608 best_mult_cost
= acost
;
609 best_mult
= offset_int::from (addr
->elts
[i
].coef
, SIGNED
);
616 /* Collect elements multiplied by best_mult. */
617 for (i
= j
= 0; i
< addr
->n
; i
++)
619 offset_int amult
= offset_int::from (addr
->elts
[i
].coef
, SIGNED
);
620 offset_int amult_neg
= -wi::sext (amult
, TYPE_PRECISION (addr
->type
));
622 if (amult
== best_mult
)
624 else if (amult_neg
== best_mult
)
625 op_code
= MINUS_EXPR
;
628 addr
->elts
[j
] = addr
->elts
[i
];
633 elt
= fold_convert (sizetype
, addr
->elts
[i
].val
);
635 mult_elt
= fold_build2 (op_code
, sizetype
, mult_elt
, elt
);
636 else if (op_code
== PLUS_EXPR
)
639 mult_elt
= fold_build1 (NEGATE_EXPR
, sizetype
, elt
);
643 parts
->index
= mult_elt
;
644 parts
->step
= wide_int_to_tree (sizetype
, best_mult
);
647 /* Splits address ADDR for a memory access of type TYPE into PARTS.
648 If BASE_HINT is non-NULL, it specifies an SSA name to be used
649 preferentially as base of the reference, and IV_CAND is the selected
650 iv candidate used in ADDR.
652 TODO -- be more clever about the distribution of the elements of ADDR
653 to PARTS. Some architectures do not support anything but single
654 register in address, possibly with a small integer offset; while
655 create_mem_ref will simplify the address to an acceptable shape
656 later, it would be more efficient to know that asking for complicated
657 addressing modes is useless. */
660 addr_to_parts (tree type
, aff_tree
*addr
, tree iv_cand
,
661 tree base_hint
, struct mem_address
*parts
,
667 parts
->symbol
= NULL_TREE
;
668 parts
->base
= NULL_TREE
;
669 parts
->index
= NULL_TREE
;
670 parts
->step
= NULL_TREE
;
672 if (addr
->offset
!= 0)
673 parts
->offset
= wide_int_to_tree (sizetype
, addr
->offset
);
675 parts
->offset
= NULL_TREE
;
677 /* Try to find a symbol. */
678 move_fixed_address_to_symbol (parts
, addr
);
680 /* No need to do address parts reassociation if the number of parts
681 is <= 2 -- in that case, no loop invariant code motion can be
684 if (!base_hint
&& (addr
->n
> 2))
685 move_variant_to_index (parts
, addr
, iv_cand
);
687 /* First move the most expensive feasible multiplication
690 most_expensive_mult_to_index (type
, parts
, addr
, speed
);
692 /* Try to find a base of the reference. Since at the moment
693 there is no reliable way how to distinguish between pointer and its
694 offset, this is just a guess. */
695 if (!parts
->symbol
&& base_hint
)
696 move_hint_to_base (type
, parts
, base_hint
, addr
);
697 if (!parts
->symbol
&& !parts
->base
)
698 move_pointer_to_base (parts
, addr
);
700 /* Then try to process the remaining elements. */
701 for (i
= 0; i
< addr
->n
; i
++)
703 part
= fold_convert (sizetype
, addr
->elts
[i
].val
);
704 if (addr
->elts
[i
].coef
!= 1)
705 part
= fold_build2 (MULT_EXPR
, sizetype
, part
,
706 wide_int_to_tree (sizetype
, addr
->elts
[i
].coef
));
707 add_to_parts (parts
, part
);
710 add_to_parts (parts
, fold_convert (sizetype
, addr
->rest
));
713 /* Force the PARTS to register. */
716 gimplify_mem_ref_parts (gimple_stmt_iterator
*gsi
, struct mem_address
*parts
)
719 parts
->base
= force_gimple_operand_gsi_1 (gsi
, parts
->base
,
720 is_gimple_mem_ref_addr
, NULL_TREE
,
721 true, GSI_SAME_STMT
);
723 parts
->index
= force_gimple_operand_gsi (gsi
, parts
->index
,
725 true, GSI_SAME_STMT
);
728 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
729 computations are emitted in front of GSI. TYPE is the mode
730 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
731 and BASE_HINT is non NULL if IV_CAND comes from a base address
735 create_mem_ref (gimple_stmt_iterator
*gsi
, tree type
, aff_tree
*addr
,
736 tree alias_ptr_type
, tree iv_cand
, tree base_hint
, bool speed
)
739 struct mem_address parts
;
741 addr_to_parts (type
, addr
, iv_cand
, base_hint
, &parts
, speed
);
742 gimplify_mem_ref_parts (gsi
, &parts
);
743 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
747 /* The expression is too complicated. Try making it simpler. */
749 if (parts
.step
&& !integer_onep (parts
.step
))
751 /* Move the multiplication to index. */
752 gcc_assert (parts
.index
);
753 parts
.index
= force_gimple_operand_gsi (gsi
,
754 fold_build2 (MULT_EXPR
, sizetype
,
755 parts
.index
, parts
.step
),
756 true, NULL_TREE
, true, GSI_SAME_STMT
);
757 parts
.step
= NULL_TREE
;
759 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
767 gcc_assert (is_gimple_val (tmp
));
769 /* Add the symbol to base, eventually forcing it to register. */
772 gcc_assert (useless_type_conversion_p
773 (sizetype
, TREE_TYPE (parts
.base
)));
777 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
778 fold_build_pointer_plus (tmp
, parts
.base
),
779 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
783 parts
.index
= parts
.base
;
789 parts
.symbol
= NULL_TREE
;
791 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
798 /* Add index to base. */
801 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
802 fold_build_pointer_plus (parts
.base
, parts
.index
),
803 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
806 parts
.base
= parts
.index
;
807 parts
.index
= NULL_TREE
;
809 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
814 if (parts
.offset
&& !integer_zerop (parts
.offset
))
816 /* Try adding offset to base. */
819 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
820 fold_build_pointer_plus (parts
.base
, parts
.offset
),
821 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
824 parts
.base
= parts
.offset
;
826 parts
.offset
= NULL_TREE
;
828 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
833 /* Verify that the address is in the simplest possible shape
834 (only a register). If we cannot create such a memory reference,
835 something is really wrong. */
836 gcc_assert (parts
.symbol
== NULL_TREE
);
837 gcc_assert (parts
.index
== NULL_TREE
);
838 gcc_assert (!parts
.step
|| integer_onep (parts
.step
));
839 gcc_assert (!parts
.offset
|| integer_zerop (parts
.offset
));
843 /* Copies components of the address from OP to ADDR. */
846 get_address_description (tree op
, struct mem_address
*addr
)
848 if (TREE_CODE (TMR_BASE (op
)) == ADDR_EXPR
)
850 addr
->symbol
= TMR_BASE (op
);
851 addr
->base
= TMR_INDEX2 (op
);
855 addr
->symbol
= NULL_TREE
;
858 gcc_assert (integer_zerop (TMR_BASE (op
)));
859 addr
->base
= TMR_INDEX2 (op
);
862 addr
->base
= TMR_BASE (op
);
864 addr
->index
= TMR_INDEX (op
);
865 addr
->step
= TMR_STEP (op
);
866 addr
->offset
= TMR_OFFSET (op
);
869 /* Copies the reference information from OLD_REF to NEW_REF, where
870 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
873 copy_ref_info (tree new_ref
, tree old_ref
)
875 tree new_ptr_base
= NULL_TREE
;
877 gcc_assert (TREE_CODE (new_ref
) == MEM_REF
878 || TREE_CODE (new_ref
) == TARGET_MEM_REF
);
880 TREE_SIDE_EFFECTS (new_ref
) = TREE_SIDE_EFFECTS (old_ref
);
881 TREE_THIS_VOLATILE (new_ref
) = TREE_THIS_VOLATILE (old_ref
);
883 new_ptr_base
= TREE_OPERAND (new_ref
, 0);
885 /* We can transfer points-to information from an old pointer
886 or decl base to the new one. */
888 && TREE_CODE (new_ptr_base
) == SSA_NAME
889 && !SSA_NAME_PTR_INFO (new_ptr_base
))
891 tree base
= get_base_address (old_ref
);
894 else if ((TREE_CODE (base
) == MEM_REF
895 || TREE_CODE (base
) == TARGET_MEM_REF
)
896 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
897 && SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0)))
899 struct ptr_info_def
*new_pi
;
900 unsigned int align
, misalign
;
902 duplicate_ssa_name_ptr_info
903 (new_ptr_base
, SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0)));
904 new_pi
= SSA_NAME_PTR_INFO (new_ptr_base
);
905 /* We have to be careful about transferring alignment information. */
906 if (get_ptr_info_alignment (new_pi
, &align
, &misalign
)
907 && TREE_CODE (old_ref
) == MEM_REF
908 && !(TREE_CODE (new_ref
) == TARGET_MEM_REF
909 && (TMR_INDEX2 (new_ref
)
910 || (TMR_STEP (new_ref
)
911 && (TREE_INT_CST_LOW (TMR_STEP (new_ref
))
914 unsigned int inc
= (mem_ref_offset (old_ref
).to_short_addr ()
915 - mem_ref_offset (new_ref
).to_short_addr ());
916 adjust_ptr_info_misalignment (new_pi
, inc
);
919 mark_ptr_info_alignment_unknown (new_pi
);
921 else if (TREE_CODE (base
) == VAR_DECL
922 || TREE_CODE (base
) == PARM_DECL
923 || TREE_CODE (base
) == RESULT_DECL
)
925 struct ptr_info_def
*pi
= get_ptr_info (new_ptr_base
);
926 pt_solution_set_var (&pi
->pt
, base
);
931 /* Move constants in target_mem_ref REF to offset. Returns the new target
932 mem ref if anything changes, NULL_TREE otherwise. */
935 maybe_fold_tmr (tree ref
)
937 struct mem_address addr
;
938 bool changed
= false;
941 get_address_description (ref
, &addr
);
944 && TREE_CODE (addr
.base
) == INTEGER_CST
945 && !integer_zerop (addr
.base
))
947 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
948 TREE_TYPE (addr
.offset
),
949 addr
.offset
, addr
.base
);
950 addr
.base
= NULL_TREE
;
955 && TREE_CODE (TREE_OPERAND (addr
.symbol
, 0)) == MEM_REF
)
957 addr
.offset
= fold_binary_to_constant
958 (PLUS_EXPR
, TREE_TYPE (addr
.offset
),
960 TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 1));
961 addr
.symbol
= TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 0);
965 && handled_component_p (TREE_OPERAND (addr
.symbol
, 0)))
967 HOST_WIDE_INT offset
;
968 addr
.symbol
= build_fold_addr_expr
969 (get_addr_base_and_unit_offset
970 (TREE_OPERAND (addr
.symbol
, 0), &offset
));
971 addr
.offset
= int_const_binop (PLUS_EXPR
,
972 addr
.offset
, size_int (offset
));
976 if (addr
.index
&& TREE_CODE (addr
.index
) == INTEGER_CST
)
981 off
= fold_binary_to_constant (MULT_EXPR
, sizetype
,
983 addr
.step
= NULL_TREE
;
986 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
987 TREE_TYPE (addr
.offset
),
989 addr
.index
= NULL_TREE
;
996 /* If we have propagated something into this TARGET_MEM_REF and thus
997 ended up folding it, always create a new TARGET_MEM_REF regardless
998 if it is valid in this for on the target - the propagation result
999 wouldn't be anyway. */
1000 new_ref
= create_mem_ref_raw (TREE_TYPE (ref
),
1001 TREE_TYPE (addr
.offset
), &addr
, false);
1002 TREE_SIDE_EFFECTS (new_ref
) = TREE_SIDE_EFFECTS (ref
);
1003 TREE_THIS_VOLATILE (new_ref
) = TREE_THIS_VOLATILE (ref
);
1007 /* Dump PARTS to FILE. */
1009 extern void dump_mem_address (FILE *, struct mem_address
*);
1011 dump_mem_address (FILE *file
, struct mem_address
*parts
)
1015 fprintf (file
, "symbol: ");
1016 print_generic_expr (file
, TREE_OPERAND (parts
->symbol
, 0), TDF_SLIM
);
1017 fprintf (file
, "\n");
1021 fprintf (file
, "base: ");
1022 print_generic_expr (file
, parts
->base
, TDF_SLIM
);
1023 fprintf (file
, "\n");
1027 fprintf (file
, "index: ");
1028 print_generic_expr (file
, parts
->index
, TDF_SLIM
);
1029 fprintf (file
, "\n");
1033 fprintf (file
, "step: ");
1034 print_generic_expr (file
, parts
->step
, TDF_SLIM
);
1035 fprintf (file
, "\n");
1039 fprintf (file
, "offset: ");
1040 print_generic_expr (file
, parts
->offset
, TDF_SLIM
);
1041 fprintf (file
, "\n");
1045 #include "gt-tree-ssa-address.h"