1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
22 that directly map to addressing modes of the target. */
26 #include "coretypes.h"
30 #include "basic-block.h"
32 #include "tree-pretty-print.h"
33 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "tree-inline.h"
39 #include "tree-affine.h"
41 /* FIXME: We compute address costs using RTL. */
42 #include "insn-config.h"
49 /* TODO -- handling of symbols (according to Richard Hendersons
50 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
52 There are at least 5 different kinds of symbols that we can run up against:
54 (1) binds_local_p, small data area.
55 (2) binds_local_p, eg local statics
56 (3) !binds_local_p, eg global variables
57 (4) thread local, local_exec
58 (5) thread local, !local_exec
60 Now, (1) won't appear often in an array context, but it certainly can.
61 All you have to do is set -GN high enough, or explicitly mark any
62 random object __attribute__((section (".sdata"))).
64 All of these affect whether or not a symbol is in fact a valid address.
65 The only one tested here is (3). And that result may very well
66 be incorrect for (4) or (5).
68 An incorrect result here does not cause incorrect results out the
69 back end, because the expander in expr.c validizes the address. However
70 it would be nice to improve the handling here in order to produce more
73 /* A "template" for memory address, used to determine whether the address is
76 typedef struct GTY (()) mem_addr_template
{
77 rtx ref
; /* The template. */
78 rtx
* GTY ((skip
)) step_p
; /* The point in template where the step should be
80 rtx
* GTY ((skip
)) off_p
; /* The point in template where the offset should
84 DEF_VEC_O (mem_addr_template
);
85 DEF_VEC_ALLOC_O (mem_addr_template
, gc
);
87 /* The templates. Each of the low five bits of the index corresponds to one
88 component of TARGET_MEM_REF being present, while the high bits identify
89 the address space. See TEMPL_IDX. */
91 static GTY(()) VEC (mem_addr_template
, gc
) *mem_addr_template_list
;
93 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
95 | ((SYMBOL != 0) << 4) \
96 | ((BASE != 0) << 3) \
97 | ((INDEX != 0) << 2) \
98 | ((STEP != 0) << 1) \
101 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
102 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
103 to where step is placed to *STEP_P and offset to *OFFSET_P. */
106 gen_addr_rtx (enum machine_mode address_mode
,
107 rtx symbol
, rtx base
, rtx index
, rtx step
, rtx offset
,
108 rtx
*addr
, rtx
**step_p
, rtx
**offset_p
)
123 act_elem
= gen_rtx_MULT (address_mode
, act_elem
, step
);
126 *step_p
= &XEXP (act_elem
, 1);
135 *addr
= simplify_gen_binary (PLUS
, address_mode
, base
, *addr
);
145 act_elem
= gen_rtx_PLUS (address_mode
, act_elem
, offset
);
148 *offset_p
= &XEXP (act_elem
, 1);
150 if (GET_CODE (symbol
) == SYMBOL_REF
151 || GET_CODE (symbol
) == LABEL_REF
152 || GET_CODE (symbol
) == CONST
)
153 act_elem
= gen_rtx_CONST (address_mode
, act_elem
);
157 *addr
= gen_rtx_PLUS (address_mode
, *addr
, act_elem
);
165 *addr
= gen_rtx_PLUS (address_mode
, *addr
, offset
);
167 *offset_p
= &XEXP (*addr
, 1);
181 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
183 If REALLY_EXPAND is false, just make fake registers instead
184 of really expanding the operands, and perform the expansion in-place
185 by using one of the "templates". */
188 addr_for_mem_ref (struct mem_address
*addr
, addr_space_t as
,
191 enum machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
192 rtx address
, sym
, bse
, idx
, st
, off
;
193 struct mem_addr_template
*templ
;
195 if (addr
->step
&& !integer_onep (addr
->step
))
196 st
= immed_double_int_const (tree_to_double_int (addr
->step
), address_mode
);
200 if (addr
->offset
&& !integer_zerop (addr
->offset
))
201 off
= immed_double_int_const
202 (double_int_sext (tree_to_double_int (addr
->offset
),
203 TYPE_PRECISION (TREE_TYPE (addr
->offset
))),
210 unsigned int templ_index
211 = TEMPL_IDX (as
, addr
->symbol
, addr
->base
, addr
->index
, st
, off
);
214 >= VEC_length (mem_addr_template
, mem_addr_template_list
))
215 VEC_safe_grow_cleared (mem_addr_template
, gc
, mem_addr_template_list
,
218 /* Reuse the templates for addresses, so that we do not waste memory. */
219 templ
= VEC_index (mem_addr_template
, mem_addr_template_list
, templ_index
);
222 sym
= (addr
->symbol
?
223 gen_rtx_SYMBOL_REF (address_mode
, ggc_strdup ("test_symbol"))
226 gen_raw_REG (address_mode
, LAST_VIRTUAL_REGISTER
+ 1)
229 gen_raw_REG (address_mode
, LAST_VIRTUAL_REGISTER
+ 2)
232 gen_addr_rtx (address_mode
, sym
, bse
, idx
,
233 st
? const0_rtx
: NULL_RTX
,
234 off
? const0_rtx
: NULL_RTX
,
248 /* Otherwise really expand the expressions. */
250 ? expand_expr (addr
->symbol
, NULL_RTX
, address_mode
, EXPAND_NORMAL
)
253 ? expand_expr (addr
->base
, NULL_RTX
, address_mode
, EXPAND_NORMAL
)
256 ? expand_expr (addr
->index
, NULL_RTX
, address_mode
, EXPAND_NORMAL
)
259 gen_addr_rtx (address_mode
, sym
, bse
, idx
, st
, off
, &address
, NULL
, NULL
);
263 /* Returns address of MEM_REF in TYPE. */
266 tree_mem_ref_addr (tree type
, tree mem_ref
)
270 tree step
= TMR_STEP (mem_ref
), offset
= TMR_OFFSET (mem_ref
);
271 tree addr_base
= NULL_TREE
, addr_off
= NULL_TREE
;
273 addr_base
= fold_convert (type
, TMR_BASE (mem_ref
));
275 act_elem
= TMR_INDEX (mem_ref
);
279 act_elem
= fold_build2 (MULT_EXPR
, sizetype
, act_elem
, step
);
283 act_elem
= TMR_INDEX2 (mem_ref
);
287 addr_off
= fold_build2 (PLUS_EXPR
, sizetype
, addr_off
, act_elem
);
292 if (offset
&& !integer_zerop (offset
))
294 offset
= fold_convert (sizetype
, offset
);
296 addr_off
= fold_build2 (PLUS_EXPR
, sizetype
, addr_off
, offset
);
302 addr
= fold_build2 (POINTER_PLUS_EXPR
, type
, addr_base
, addr_off
);
309 /* Returns true if a memory reference in MODE and with parameters given by
310 ADDR is valid on the current target. */
313 valid_mem_ref_p (enum machine_mode mode
, addr_space_t as
,
314 struct mem_address
*addr
)
318 address
= addr_for_mem_ref (addr
, as
, false);
322 return memory_address_addr_space_p (mode
, address
, as
);
325 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
326 is valid on the current target and if so, creates and returns the
327 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
330 create_mem_ref_raw (tree type
, tree alias_ptr_type
, struct mem_address
*addr
,
336 && !valid_mem_ref_p (TYPE_MODE (type
), TYPE_ADDR_SPACE (type
), addr
))
339 if (addr
->step
&& integer_onep (addr
->step
))
340 addr
->step
= NULL_TREE
;
343 addr
->offset
= fold_convert (alias_ptr_type
, addr
->offset
);
345 addr
->offset
= build_int_cst (alias_ptr_type
, 0);
353 && POINTER_TYPE_P (TREE_TYPE (addr
->base
)))
360 base
= build_int_cst (ptr_type_node
, 0);
364 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF. */
366 && (!index2
|| integer_zerop (index2
))
367 && (!addr
->index
|| integer_zerop (addr
->index
)))
368 return fold_build2 (MEM_REF
, type
, base
, addr
->offset
);
370 return build5 (TARGET_MEM_REF
, type
,
371 base
, addr
->offset
, addr
->index
, addr
->step
, index2
);
374 /* Returns true if OBJ is an object whose address is a link time constant. */
377 fixed_address_object_p (tree obj
)
379 return (TREE_CODE (obj
) == VAR_DECL
380 && (TREE_STATIC (obj
)
381 || DECL_EXTERNAL (obj
))
382 && ! DECL_DLLIMPORT_P (obj
));
385 /* If ADDR contains an address of object that is a link time constant,
386 move it to PARTS->symbol. */
389 move_fixed_address_to_symbol (struct mem_address
*parts
, aff_tree
*addr
)
392 tree val
= NULL_TREE
;
394 for (i
= 0; i
< addr
->n
; i
++)
396 if (!double_int_one_p (addr
->elts
[i
].coef
))
399 val
= addr
->elts
[i
].val
;
400 if (TREE_CODE (val
) == ADDR_EXPR
401 && fixed_address_object_p (TREE_OPERAND (val
, 0)))
409 aff_combination_remove_elt (addr
, i
);
412 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
415 move_hint_to_base (tree type
, struct mem_address
*parts
, tree base_hint
,
419 tree val
= NULL_TREE
;
422 for (i
= 0; i
< addr
->n
; i
++)
424 if (!double_int_one_p (addr
->elts
[i
].coef
))
427 val
= addr
->elts
[i
].val
;
428 if (operand_equal_p (val
, base_hint
, 0))
435 /* Cast value to appropriate pointer type. We cannot use a pointer
436 to TYPE directly, as the back-end will assume registers of pointer
437 type are aligned, and just the base itself may not actually be.
438 We use void pointer to the type's address space instead. */
439 qual
= ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type
));
440 type
= build_qualified_type (void_type_node
, qual
);
441 parts
->base
= fold_convert (build_pointer_type (type
), val
);
442 aff_combination_remove_elt (addr
, i
);
445 /* If ADDR contains an address of a dereferenced pointer, move it to
449 move_pointer_to_base (struct mem_address
*parts
, aff_tree
*addr
)
452 tree val
= NULL_TREE
;
454 for (i
= 0; i
< addr
->n
; i
++)
456 if (!double_int_one_p (addr
->elts
[i
].coef
))
459 val
= addr
->elts
[i
].val
;
460 if (POINTER_TYPE_P (TREE_TYPE (val
)))
468 aff_combination_remove_elt (addr
, i
);
471 /* Moves the loop variant part V in linear address ADDR to be the index
475 move_variant_to_index (struct mem_address
*parts
, aff_tree
*addr
, tree v
)
478 tree val
= NULL_TREE
;
480 gcc_assert (!parts
->index
);
481 for (i
= 0; i
< addr
->n
; i
++)
483 val
= addr
->elts
[i
].val
;
484 if (operand_equal_p (val
, v
, 0))
491 parts
->index
= fold_convert (sizetype
, val
);
492 parts
->step
= double_int_to_tree (sizetype
, addr
->elts
[i
].coef
);
493 aff_combination_remove_elt (addr
, i
);
496 /* Adds ELT to PARTS. */
499 add_to_parts (struct mem_address
*parts
, tree elt
)
505 parts
->index
= fold_convert (sizetype
, elt
);
515 /* Add ELT to base. */
516 type
= TREE_TYPE (parts
->base
);
517 if (POINTER_TYPE_P (type
))
518 parts
->base
= fold_build2 (POINTER_PLUS_EXPR
, type
,
520 fold_convert (sizetype
, elt
));
522 parts
->base
= fold_build2 (PLUS_EXPR
, type
,
526 /* Finds the most expensive multiplication in ADDR that can be
527 expressed in an addressing mode and move the corresponding
528 element(s) to PARTS. */
531 most_expensive_mult_to_index (tree type
, struct mem_address
*parts
,
532 aff_tree
*addr
, bool speed
)
534 addr_space_t as
= TYPE_ADDR_SPACE (type
);
535 enum machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
537 double_int best_mult
, amult
, amult_neg
;
538 unsigned best_mult_cost
= 0, acost
;
539 tree mult_elt
= NULL_TREE
, elt
;
541 enum tree_code op_code
;
543 best_mult
= double_int_zero
;
544 for (i
= 0; i
< addr
->n
; i
++)
546 if (!double_int_fits_in_shwi_p (addr
->elts
[i
].coef
))
549 coef
= double_int_to_shwi (addr
->elts
[i
].coef
);
551 || !multiplier_allowed_in_address_p (coef
, TYPE_MODE (type
), as
))
554 acost
= multiply_by_cost (coef
, address_mode
, speed
);
556 if (acost
> best_mult_cost
)
558 best_mult_cost
= acost
;
559 best_mult
= addr
->elts
[i
].coef
;
566 /* Collect elements multiplied by best_mult. */
567 for (i
= j
= 0; i
< addr
->n
; i
++)
569 amult
= addr
->elts
[i
].coef
;
570 amult_neg
= double_int_ext_for_comb (double_int_neg (amult
), addr
);
572 if (double_int_equal_p (amult
, best_mult
))
574 else if (double_int_equal_p (amult_neg
, best_mult
))
575 op_code
= MINUS_EXPR
;
578 addr
->elts
[j
] = addr
->elts
[i
];
583 elt
= fold_convert (sizetype
, addr
->elts
[i
].val
);
585 mult_elt
= fold_build2 (op_code
, sizetype
, mult_elt
, elt
);
586 else if (op_code
== PLUS_EXPR
)
589 mult_elt
= fold_build1 (NEGATE_EXPR
, sizetype
, elt
);
593 parts
->index
= mult_elt
;
594 parts
->step
= double_int_to_tree (sizetype
, best_mult
);
597 /* Splits address ADDR for a memory access of type TYPE into PARTS.
598 If BASE_HINT is non-NULL, it specifies an SSA name to be used
599 preferentially as base of the reference, and IV_CAND is the selected
600 iv candidate used in ADDR.
602 TODO -- be more clever about the distribution of the elements of ADDR
603 to PARTS. Some architectures do not support anything but single
604 register in address, possibly with a small integer offset; while
605 create_mem_ref will simplify the address to an acceptable shape
606 later, it would be more efficient to know that asking for complicated
607 addressing modes is useless. */
610 addr_to_parts (tree type
, aff_tree
*addr
, tree iv_cand
,
611 tree base_hint
, struct mem_address
*parts
,
617 parts
->symbol
= NULL_TREE
;
618 parts
->base
= NULL_TREE
;
619 parts
->index
= NULL_TREE
;
620 parts
->step
= NULL_TREE
;
622 if (!double_int_zero_p (addr
->offset
))
623 parts
->offset
= double_int_to_tree (sizetype
, addr
->offset
);
625 parts
->offset
= NULL_TREE
;
627 /* Try to find a symbol. */
628 move_fixed_address_to_symbol (parts
, addr
);
630 /* No need to do address parts reassociation if the number of parts
631 is <= 2 -- in that case, no loop invariant code motion can be
634 if (!base_hint
&& (addr
->n
> 2))
635 move_variant_to_index (parts
, addr
, iv_cand
);
637 /* First move the most expensive feasible multiplication
640 most_expensive_mult_to_index (type
, parts
, addr
, speed
);
642 /* Try to find a base of the reference. Since at the moment
643 there is no reliable way how to distinguish between pointer and its
644 offset, this is just a guess. */
645 if (!parts
->symbol
&& base_hint
)
646 move_hint_to_base (type
, parts
, base_hint
, addr
);
647 if (!parts
->symbol
&& !parts
->base
)
648 move_pointer_to_base (parts
, addr
);
650 /* Then try to process the remaining elements. */
651 for (i
= 0; i
< addr
->n
; i
++)
653 part
= fold_convert (sizetype
, addr
->elts
[i
].val
);
654 if (!double_int_one_p (addr
->elts
[i
].coef
))
655 part
= fold_build2 (MULT_EXPR
, sizetype
, part
,
656 double_int_to_tree (sizetype
, addr
->elts
[i
].coef
));
657 add_to_parts (parts
, part
);
660 add_to_parts (parts
, fold_convert (sizetype
, addr
->rest
));
663 /* Force the PARTS to register. */
666 gimplify_mem_ref_parts (gimple_stmt_iterator
*gsi
, struct mem_address
*parts
)
669 parts
->base
= force_gimple_operand_gsi_1 (gsi
, parts
->base
,
670 is_gimple_mem_ref_addr
, NULL_TREE
,
671 true, GSI_SAME_STMT
);
673 parts
->index
= force_gimple_operand_gsi (gsi
, parts
->index
,
675 true, GSI_SAME_STMT
);
678 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
679 computations are emitted in front of GSI. TYPE is the mode
680 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
681 and BASE_HINT is non NULL if IV_CAND comes from a base address
685 create_mem_ref (gimple_stmt_iterator
*gsi
, tree type
, aff_tree
*addr
,
686 tree alias_ptr_type
, tree iv_cand
, tree base_hint
, bool speed
)
690 struct mem_address parts
;
692 addr_to_parts (type
, addr
, iv_cand
, base_hint
, &parts
, speed
);
693 gimplify_mem_ref_parts (gsi
, &parts
);
694 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
698 /* The expression is too complicated. Try making it simpler. */
700 if (parts
.step
&& !integer_onep (parts
.step
))
702 /* Move the multiplication to index. */
703 gcc_assert (parts
.index
);
704 parts
.index
= force_gimple_operand_gsi (gsi
,
705 fold_build2 (MULT_EXPR
, sizetype
,
706 parts
.index
, parts
.step
),
707 true, NULL_TREE
, true, GSI_SAME_STMT
);
708 parts
.step
= NULL_TREE
;
710 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
718 gcc_assert (is_gimple_val (tmp
));
720 /* Add the symbol to base, eventually forcing it to register. */
723 gcc_assert (useless_type_conversion_p
724 (sizetype
, TREE_TYPE (parts
.base
)));
728 atype
= TREE_TYPE (tmp
);
729 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
730 fold_build2 (POINTER_PLUS_EXPR
, atype
,
732 fold_convert (sizetype
, parts
.base
)),
733 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
737 parts
.index
= parts
.base
;
743 parts
.symbol
= NULL_TREE
;
745 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
752 /* Add index to base. */
755 atype
= TREE_TYPE (parts
.base
);
756 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
757 fold_build2 (POINTER_PLUS_EXPR
, atype
,
760 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
763 parts
.base
= parts
.index
;
764 parts
.index
= NULL_TREE
;
766 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
771 if (parts
.offset
&& !integer_zerop (parts
.offset
))
773 /* Try adding offset to base. */
776 atype
= TREE_TYPE (parts
.base
);
777 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
778 fold_build2 (POINTER_PLUS_EXPR
, atype
,
780 fold_convert (sizetype
, parts
.offset
)),
781 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
784 parts
.base
= parts
.offset
;
786 parts
.offset
= NULL_TREE
;
788 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
793 /* Verify that the address is in the simplest possible shape
794 (only a register). If we cannot create such a memory reference,
795 something is really wrong. */
796 gcc_assert (parts
.symbol
== NULL_TREE
);
797 gcc_assert (parts
.index
== NULL_TREE
);
798 gcc_assert (!parts
.step
|| integer_onep (parts
.step
));
799 gcc_assert (!parts
.offset
|| integer_zerop (parts
.offset
));
803 /* Copies components of the address from OP to ADDR. */
806 get_address_description (tree op
, struct mem_address
*addr
)
808 if (TREE_CODE (TMR_BASE (op
)) == ADDR_EXPR
)
810 addr
->symbol
= TMR_BASE (op
);
811 addr
->base
= TMR_INDEX2 (op
);
815 addr
->symbol
= NULL_TREE
;
818 gcc_assert (integer_zerop (TMR_BASE (op
)));
819 addr
->base
= TMR_INDEX2 (op
);
822 addr
->base
= TMR_BASE (op
);
824 addr
->index
= TMR_INDEX (op
);
825 addr
->step
= TMR_STEP (op
);
826 addr
->offset
= TMR_OFFSET (op
);
829 /* Copies the additional information attached to target_mem_ref FROM to TO. */
832 copy_mem_ref_info (tree to
, tree from
)
834 /* And the info about the original reference. */
835 TREE_SIDE_EFFECTS (to
) = TREE_SIDE_EFFECTS (from
);
836 TREE_THIS_VOLATILE (to
) = TREE_THIS_VOLATILE (from
);
839 /* Move constants in target_mem_ref REF to offset. Returns the new target
840 mem ref if anything changes, NULL_TREE otherwise. */
843 maybe_fold_tmr (tree ref
)
845 struct mem_address addr
;
846 bool changed
= false;
849 get_address_description (ref
, &addr
);
852 && TREE_CODE (addr
.base
) == INTEGER_CST
853 && !integer_zerop (addr
.base
))
855 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
856 TREE_TYPE (addr
.offset
),
857 addr
.offset
, addr
.base
);
858 addr
.base
= NULL_TREE
;
863 && TREE_CODE (TREE_OPERAND (addr
.symbol
, 0)) == MEM_REF
)
865 addr
.offset
= fold_binary_to_constant
866 (PLUS_EXPR
, TREE_TYPE (addr
.offset
),
868 TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 1));
869 addr
.symbol
= TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 0);
873 && handled_component_p (TREE_OPERAND (addr
.symbol
, 0)))
875 HOST_WIDE_INT offset
;
876 addr
.symbol
= build_fold_addr_expr
877 (get_addr_base_and_unit_offset
878 (TREE_OPERAND (addr
.symbol
, 0), &offset
));
879 addr
.offset
= int_const_binop (PLUS_EXPR
,
880 addr
.offset
, size_int (offset
), 0);
884 if (addr
.index
&& TREE_CODE (addr
.index
) == INTEGER_CST
)
889 off
= fold_binary_to_constant (MULT_EXPR
, sizetype
,
891 addr
.step
= NULL_TREE
;
894 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
895 TREE_TYPE (addr
.offset
),
897 addr
.index
= NULL_TREE
;
904 /* If we have propagated something into this TARGET_MEM_REF and thus
905 ended up folding it, always create a new TARGET_MEM_REF regardless
906 if it is valid in this for on the target - the propagation result
907 wouldn't be anyway. */
908 ret
= create_mem_ref_raw (TREE_TYPE (ref
),
909 TREE_TYPE (addr
.offset
), &addr
, false);
910 copy_mem_ref_info (ret
, ref
);
914 /* Dump PARTS to FILE. */
916 extern void dump_mem_address (FILE *, struct mem_address
*);
918 dump_mem_address (FILE *file
, struct mem_address
*parts
)
922 fprintf (file
, "symbol: ");
923 print_generic_expr (file
, TREE_OPERAND (parts
->symbol
, 0), TDF_SLIM
);
924 fprintf (file
, "\n");
928 fprintf (file
, "base: ");
929 print_generic_expr (file
, parts
->base
, TDF_SLIM
);
930 fprintf (file
, "\n");
934 fprintf (file
, "index: ");
935 print_generic_expr (file
, parts
->index
, TDF_SLIM
);
936 fprintf (file
, "\n");
940 fprintf (file
, "step: ");
941 print_generic_expr (file
, parts
->step
, TDF_SLIM
);
942 fprintf (file
, "\n");
946 fprintf (file
, "offset: ");
947 print_generic_expr (file
, parts
->offset
, TDF_SLIM
);
948 fprintf (file
, "\n");
952 #include "gt-tree-ssa-address.h"