1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
22 that directly map to addressing modes of the target. */
26 #include "coretypes.h"
30 #include "basic-block.h"
32 #include "tree-pretty-print.h"
33 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "tree-inline.h"
39 #include "tree-affine.h"
41 /* FIXME: We compute address costs using RTL. */
42 #include "insn-config.h"
49 /* TODO -- handling of symbols (according to Richard Hendersons
50 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
52 There are at least 5 different kinds of symbols that we can run up against:
54 (1) binds_local_p, small data area.
55 (2) binds_local_p, eg local statics
56 (3) !binds_local_p, eg global variables
57 (4) thread local, local_exec
58 (5) thread local, !local_exec
60 Now, (1) won't appear often in an array context, but it certainly can.
61 All you have to do is set -GN high enough, or explicitly mark any
62 random object __attribute__((section (".sdata"))).
64 All of these affect whether or not a symbol is in fact a valid address.
65 The only one tested here is (3). And that result may very well
66 be incorrect for (4) or (5).
68 An incorrect result here does not cause incorrect results out the
69 back end, because the expander in expr.c validizes the address. However
70 it would be nice to improve the handling here in order to produce more
73 /* A "template" for memory address, used to determine whether the address is
76 typedef struct GTY (()) mem_addr_template
{
77 rtx ref
; /* The template. */
78 rtx
* GTY ((skip
)) step_p
; /* The point in template where the step should be
80 rtx
* GTY ((skip
)) off_p
; /* The point in template where the offset should
84 DEF_VEC_O (mem_addr_template
);
85 DEF_VEC_ALLOC_O (mem_addr_template
, gc
);
87 /* The templates. Each of the low five bits of the index corresponds to one
88 component of TARGET_MEM_REF being present, while the high bits identify
89 the address space. See TEMPL_IDX. */
91 static GTY(()) VEC (mem_addr_template
, gc
) *mem_addr_template_list
;
93 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
95 | ((SYMBOL != 0) << 4) \
96 | ((BASE != 0) << 3) \
97 | ((INDEX != 0) << 2) \
98 | ((STEP != 0) << 1) \
101 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
102 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
103 to where step is placed to *STEP_P and offset to *OFFSET_P. */
106 gen_addr_rtx (enum machine_mode address_mode
,
107 rtx symbol
, rtx base
, rtx index
, rtx step
, rtx offset
,
108 rtx
*addr
, rtx
**step_p
, rtx
**offset_p
)
123 act_elem
= gen_rtx_MULT (address_mode
, act_elem
, step
);
126 *step_p
= &XEXP (act_elem
, 1);
132 if (base
&& base
!= const0_rtx
)
135 *addr
= simplify_gen_binary (PLUS
, address_mode
, base
, *addr
);
145 act_elem
= gen_rtx_PLUS (address_mode
, act_elem
, offset
);
148 *offset_p
= &XEXP (act_elem
, 1);
150 if (GET_CODE (symbol
) == SYMBOL_REF
151 || GET_CODE (symbol
) == LABEL_REF
152 || GET_CODE (symbol
) == CONST
)
153 act_elem
= gen_rtx_CONST (address_mode
, act_elem
);
157 *addr
= gen_rtx_PLUS (address_mode
, *addr
, act_elem
);
165 *addr
= gen_rtx_PLUS (address_mode
, *addr
, offset
);
167 *offset_p
= &XEXP (*addr
, 1);
181 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
183 If REALLY_EXPAND is false, just make fake registers instead
184 of really expanding the operands, and perform the expansion in-place
185 by using one of the "templates". */
188 addr_for_mem_ref (struct mem_address
*addr
, addr_space_t as
,
191 enum machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
192 rtx address
, sym
, bse
, idx
, st
, off
;
193 struct mem_addr_template
*templ
;
195 if (addr
->step
&& !integer_onep (addr
->step
))
196 st
= immed_double_int_const (tree_to_double_int (addr
->step
), address_mode
);
200 if (addr
->offset
&& !integer_zerop (addr
->offset
))
201 off
= immed_double_int_const
202 (double_int_sext (tree_to_double_int (addr
->offset
),
203 TYPE_PRECISION (TREE_TYPE (addr
->offset
))),
210 unsigned int templ_index
211 = TEMPL_IDX (as
, addr
->symbol
, addr
->base
, addr
->index
, st
, off
);
214 >= VEC_length (mem_addr_template
, mem_addr_template_list
))
215 VEC_safe_grow_cleared (mem_addr_template
, gc
, mem_addr_template_list
,
218 /* Reuse the templates for addresses, so that we do not waste memory. */
219 templ
= VEC_index (mem_addr_template
, mem_addr_template_list
, templ_index
);
222 sym
= (addr
->symbol
?
223 gen_rtx_SYMBOL_REF (address_mode
, ggc_strdup ("test_symbol"))
226 gen_raw_REG (address_mode
, LAST_VIRTUAL_REGISTER
+ 1)
229 gen_raw_REG (address_mode
, LAST_VIRTUAL_REGISTER
+ 2)
232 gen_addr_rtx (address_mode
, sym
, bse
, idx
,
233 st
? const0_rtx
: NULL_RTX
,
234 off
? const0_rtx
: NULL_RTX
,
248 /* Otherwise really expand the expressions. */
250 ? expand_expr (addr
->symbol
, NULL_RTX
, address_mode
, EXPAND_NORMAL
)
253 ? expand_expr (addr
->base
, NULL_RTX
, address_mode
, EXPAND_NORMAL
)
256 ? expand_expr (addr
->index
, NULL_RTX
, address_mode
, EXPAND_NORMAL
)
259 gen_addr_rtx (address_mode
, sym
, bse
, idx
, st
, off
, &address
, NULL
, NULL
);
263 /* Returns address of MEM_REF in TYPE. */
266 tree_mem_ref_addr (tree type
, tree mem_ref
)
270 tree step
= TMR_STEP (mem_ref
), offset
= TMR_OFFSET (mem_ref
);
271 tree addr_base
= NULL_TREE
, addr_off
= NULL_TREE
;
273 addr_base
= fold_convert (type
, TMR_BASE (mem_ref
));
275 act_elem
= TMR_INDEX (mem_ref
);
279 act_elem
= fold_build2 (MULT_EXPR
, sizetype
, act_elem
, step
);
283 act_elem
= TMR_INDEX2 (mem_ref
);
287 addr_off
= fold_build2 (PLUS_EXPR
, sizetype
, addr_off
, act_elem
);
292 if (offset
&& !integer_zerop (offset
))
294 offset
= fold_convert (sizetype
, offset
);
296 addr_off
= fold_build2 (PLUS_EXPR
, sizetype
, addr_off
, offset
);
302 addr
= fold_build2 (POINTER_PLUS_EXPR
, type
, addr_base
, addr_off
);
309 /* Returns true if a memory reference in MODE and with parameters given by
310 ADDR is valid on the current target. */
313 valid_mem_ref_p (enum machine_mode mode
, addr_space_t as
,
314 struct mem_address
*addr
)
318 address
= addr_for_mem_ref (addr
, as
, false);
322 return memory_address_addr_space_p (mode
, address
, as
);
325 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
326 is valid on the current target and if so, creates and returns the
327 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
330 create_mem_ref_raw (tree type
, tree alias_ptr_type
, struct mem_address
*addr
,
336 && !valid_mem_ref_p (TYPE_MODE (type
), TYPE_ADDR_SPACE (type
), addr
))
339 if (addr
->step
&& integer_onep (addr
->step
))
340 addr
->step
= NULL_TREE
;
343 addr
->offset
= fold_convert (alias_ptr_type
, addr
->offset
);
345 addr
->offset
= build_int_cst (alias_ptr_type
, 0);
353 && POINTER_TYPE_P (TREE_TYPE (addr
->base
)))
360 base
= build_int_cst (ptr_type_node
, 0);
364 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
365 ??? As IVOPTs does not follow restrictions to where the base
366 pointer may point to create a MEM_REF only if we know that
368 if ((TREE_CODE (base
) == ADDR_EXPR
|| TREE_CODE (base
) == INTEGER_CST
)
369 && (!index2
|| integer_zerop (index2
))
370 && (!addr
->index
|| integer_zerop (addr
->index
)))
371 return fold_build2 (MEM_REF
, type
, base
, addr
->offset
);
373 return build5 (TARGET_MEM_REF
, type
,
374 base
, addr
->offset
, addr
->index
, addr
->step
, index2
);
377 /* Returns true if OBJ is an object whose address is a link time constant. */
380 fixed_address_object_p (tree obj
)
382 return (TREE_CODE (obj
) == VAR_DECL
383 && (TREE_STATIC (obj
)
384 || DECL_EXTERNAL (obj
))
385 && ! DECL_DLLIMPORT_P (obj
));
388 /* If ADDR contains an address of object that is a link time constant,
389 move it to PARTS->symbol. */
392 move_fixed_address_to_symbol (struct mem_address
*parts
, aff_tree
*addr
)
395 tree val
= NULL_TREE
;
397 for (i
= 0; i
< addr
->n
; i
++)
399 if (!double_int_one_p (addr
->elts
[i
].coef
))
402 val
= addr
->elts
[i
].val
;
403 if (TREE_CODE (val
) == ADDR_EXPR
404 && fixed_address_object_p (TREE_OPERAND (val
, 0)))
412 aff_combination_remove_elt (addr
, i
);
415 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
418 move_hint_to_base (tree type
, struct mem_address
*parts
, tree base_hint
,
422 tree val
= NULL_TREE
;
425 for (i
= 0; i
< addr
->n
; i
++)
427 if (!double_int_one_p (addr
->elts
[i
].coef
))
430 val
= addr
->elts
[i
].val
;
431 if (operand_equal_p (val
, base_hint
, 0))
438 /* Cast value to appropriate pointer type. We cannot use a pointer
439 to TYPE directly, as the back-end will assume registers of pointer
440 type are aligned, and just the base itself may not actually be.
441 We use void pointer to the type's address space instead. */
442 qual
= ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type
));
443 type
= build_qualified_type (void_type_node
, qual
);
444 parts
->base
= fold_convert (build_pointer_type (type
), val
);
445 aff_combination_remove_elt (addr
, i
);
448 /* If ADDR contains an address of a dereferenced pointer, move it to
452 move_pointer_to_base (struct mem_address
*parts
, aff_tree
*addr
)
455 tree val
= NULL_TREE
;
457 for (i
= 0; i
< addr
->n
; i
++)
459 if (!double_int_one_p (addr
->elts
[i
].coef
))
462 val
= addr
->elts
[i
].val
;
463 if (POINTER_TYPE_P (TREE_TYPE (val
)))
471 aff_combination_remove_elt (addr
, i
);
474 /* Moves the loop variant part V in linear address ADDR to be the index
478 move_variant_to_index (struct mem_address
*parts
, aff_tree
*addr
, tree v
)
481 tree val
= NULL_TREE
;
483 gcc_assert (!parts
->index
);
484 for (i
= 0; i
< addr
->n
; i
++)
486 val
= addr
->elts
[i
].val
;
487 if (operand_equal_p (val
, v
, 0))
494 parts
->index
= fold_convert (sizetype
, val
);
495 parts
->step
= double_int_to_tree (sizetype
, addr
->elts
[i
].coef
);
496 aff_combination_remove_elt (addr
, i
);
499 /* Adds ELT to PARTS. */
502 add_to_parts (struct mem_address
*parts
, tree elt
)
508 parts
->index
= fold_convert (sizetype
, elt
);
518 /* Add ELT to base. */
519 type
= TREE_TYPE (parts
->base
);
520 if (POINTER_TYPE_P (type
))
521 parts
->base
= fold_build2 (POINTER_PLUS_EXPR
, type
,
523 fold_convert (sizetype
, elt
));
525 parts
->base
= fold_build2 (PLUS_EXPR
, type
,
529 /* Finds the most expensive multiplication in ADDR that can be
530 expressed in an addressing mode and move the corresponding
531 element(s) to PARTS. */
534 most_expensive_mult_to_index (tree type
, struct mem_address
*parts
,
535 aff_tree
*addr
, bool speed
)
537 addr_space_t as
= TYPE_ADDR_SPACE (type
);
538 enum machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
540 double_int best_mult
, amult
, amult_neg
;
541 unsigned best_mult_cost
= 0, acost
;
542 tree mult_elt
= NULL_TREE
, elt
;
544 enum tree_code op_code
;
546 best_mult
= double_int_zero
;
547 for (i
= 0; i
< addr
->n
; i
++)
549 if (!double_int_fits_in_shwi_p (addr
->elts
[i
].coef
))
552 coef
= double_int_to_shwi (addr
->elts
[i
].coef
);
554 || !multiplier_allowed_in_address_p (coef
, TYPE_MODE (type
), as
))
557 acost
= multiply_by_cost (coef
, address_mode
, speed
);
559 if (acost
> best_mult_cost
)
561 best_mult_cost
= acost
;
562 best_mult
= addr
->elts
[i
].coef
;
569 /* Collect elements multiplied by best_mult. */
570 for (i
= j
= 0; i
< addr
->n
; i
++)
572 amult
= addr
->elts
[i
].coef
;
573 amult_neg
= double_int_ext_for_comb (double_int_neg (amult
), addr
);
575 if (double_int_equal_p (amult
, best_mult
))
577 else if (double_int_equal_p (amult_neg
, best_mult
))
578 op_code
= MINUS_EXPR
;
581 addr
->elts
[j
] = addr
->elts
[i
];
586 elt
= fold_convert (sizetype
, addr
->elts
[i
].val
);
588 mult_elt
= fold_build2 (op_code
, sizetype
, mult_elt
, elt
);
589 else if (op_code
== PLUS_EXPR
)
592 mult_elt
= fold_build1 (NEGATE_EXPR
, sizetype
, elt
);
596 parts
->index
= mult_elt
;
597 parts
->step
= double_int_to_tree (sizetype
, best_mult
);
600 /* Splits address ADDR for a memory access of type TYPE into PARTS.
601 If BASE_HINT is non-NULL, it specifies an SSA name to be used
602 preferentially as base of the reference, and IV_CAND is the selected
603 iv candidate used in ADDR.
605 TODO -- be more clever about the distribution of the elements of ADDR
606 to PARTS. Some architectures do not support anything but single
607 register in address, possibly with a small integer offset; while
608 create_mem_ref will simplify the address to an acceptable shape
609 later, it would be more efficient to know that asking for complicated
610 addressing modes is useless. */
613 addr_to_parts (tree type
, aff_tree
*addr
, tree iv_cand
,
614 tree base_hint
, struct mem_address
*parts
,
620 parts
->symbol
= NULL_TREE
;
621 parts
->base
= NULL_TREE
;
622 parts
->index
= NULL_TREE
;
623 parts
->step
= NULL_TREE
;
625 if (!double_int_zero_p (addr
->offset
))
626 parts
->offset
= double_int_to_tree (sizetype
, addr
->offset
);
628 parts
->offset
= NULL_TREE
;
630 /* Try to find a symbol. */
631 move_fixed_address_to_symbol (parts
, addr
);
633 /* No need to do address parts reassociation if the number of parts
634 is <= 2 -- in that case, no loop invariant code motion can be
637 if (!base_hint
&& (addr
->n
> 2))
638 move_variant_to_index (parts
, addr
, iv_cand
);
640 /* First move the most expensive feasible multiplication
643 most_expensive_mult_to_index (type
, parts
, addr
, speed
);
645 /* Try to find a base of the reference. Since at the moment
646 there is no reliable way how to distinguish between pointer and its
647 offset, this is just a guess. */
648 if (!parts
->symbol
&& base_hint
)
649 move_hint_to_base (type
, parts
, base_hint
, addr
);
650 if (!parts
->symbol
&& !parts
->base
)
651 move_pointer_to_base (parts
, addr
);
653 /* Then try to process the remaining elements. */
654 for (i
= 0; i
< addr
->n
; i
++)
656 part
= fold_convert (sizetype
, addr
->elts
[i
].val
);
657 if (!double_int_one_p (addr
->elts
[i
].coef
))
658 part
= fold_build2 (MULT_EXPR
, sizetype
, part
,
659 double_int_to_tree (sizetype
, addr
->elts
[i
].coef
));
660 add_to_parts (parts
, part
);
663 add_to_parts (parts
, fold_convert (sizetype
, addr
->rest
));
666 /* Force the PARTS to register. */
669 gimplify_mem_ref_parts (gimple_stmt_iterator
*gsi
, struct mem_address
*parts
)
672 parts
->base
= force_gimple_operand_gsi_1 (gsi
, parts
->base
,
673 is_gimple_mem_ref_addr
, NULL_TREE
,
674 true, GSI_SAME_STMT
);
676 parts
->index
= force_gimple_operand_gsi (gsi
, parts
->index
,
678 true, GSI_SAME_STMT
);
681 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
682 computations are emitted in front of GSI. TYPE is the mode
683 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
684 and BASE_HINT is non NULL if IV_CAND comes from a base address
688 create_mem_ref (gimple_stmt_iterator
*gsi
, tree type
, aff_tree
*addr
,
689 tree alias_ptr_type
, tree iv_cand
, tree base_hint
, bool speed
)
693 struct mem_address parts
;
695 addr_to_parts (type
, addr
, iv_cand
, base_hint
, &parts
, speed
);
696 gimplify_mem_ref_parts (gsi
, &parts
);
697 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
701 /* The expression is too complicated. Try making it simpler. */
703 if (parts
.step
&& !integer_onep (parts
.step
))
705 /* Move the multiplication to index. */
706 gcc_assert (parts
.index
);
707 parts
.index
= force_gimple_operand_gsi (gsi
,
708 fold_build2 (MULT_EXPR
, sizetype
,
709 parts
.index
, parts
.step
),
710 true, NULL_TREE
, true, GSI_SAME_STMT
);
711 parts
.step
= NULL_TREE
;
713 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
721 gcc_assert (is_gimple_val (tmp
));
723 /* Add the symbol to base, eventually forcing it to register. */
726 gcc_assert (useless_type_conversion_p
727 (sizetype
, TREE_TYPE (parts
.base
)));
731 atype
= TREE_TYPE (tmp
);
732 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
733 fold_build2 (POINTER_PLUS_EXPR
, atype
,
735 fold_convert (sizetype
, parts
.base
)),
736 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
740 parts
.index
= parts
.base
;
746 parts
.symbol
= NULL_TREE
;
748 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
755 /* Add index to base. */
758 atype
= TREE_TYPE (parts
.base
);
759 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
760 fold_build2 (POINTER_PLUS_EXPR
, atype
,
763 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
766 parts
.base
= parts
.index
;
767 parts
.index
= NULL_TREE
;
769 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
774 if (parts
.offset
&& !integer_zerop (parts
.offset
))
776 /* Try adding offset to base. */
779 atype
= TREE_TYPE (parts
.base
);
780 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
781 fold_build2 (POINTER_PLUS_EXPR
, atype
,
783 fold_convert (sizetype
, parts
.offset
)),
784 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
787 parts
.base
= parts
.offset
;
789 parts
.offset
= NULL_TREE
;
791 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
796 /* Verify that the address is in the simplest possible shape
797 (only a register). If we cannot create such a memory reference,
798 something is really wrong. */
799 gcc_assert (parts
.symbol
== NULL_TREE
);
800 gcc_assert (parts
.index
== NULL_TREE
);
801 gcc_assert (!parts
.step
|| integer_onep (parts
.step
));
802 gcc_assert (!parts
.offset
|| integer_zerop (parts
.offset
));
806 /* Copies components of the address from OP to ADDR. */
809 get_address_description (tree op
, struct mem_address
*addr
)
811 if (TREE_CODE (TMR_BASE (op
)) == ADDR_EXPR
)
813 addr
->symbol
= TMR_BASE (op
);
814 addr
->base
= TMR_INDEX2 (op
);
818 addr
->symbol
= NULL_TREE
;
821 gcc_assert (integer_zerop (TMR_BASE (op
)));
822 addr
->base
= TMR_INDEX2 (op
);
825 addr
->base
= TMR_BASE (op
);
827 addr
->index
= TMR_INDEX (op
);
828 addr
->step
= TMR_STEP (op
);
829 addr
->offset
= TMR_OFFSET (op
);
832 /* Copies the additional information attached to target_mem_ref FROM to TO. */
835 copy_mem_ref_info (tree to
, tree from
)
837 /* And the info about the original reference. */
838 TREE_SIDE_EFFECTS (to
) = TREE_SIDE_EFFECTS (from
);
839 TREE_THIS_VOLATILE (to
) = TREE_THIS_VOLATILE (from
);
842 /* Move constants in target_mem_ref REF to offset. Returns the new target
843 mem ref if anything changes, NULL_TREE otherwise. */
846 maybe_fold_tmr (tree ref
)
848 struct mem_address addr
;
849 bool changed
= false;
852 get_address_description (ref
, &addr
);
855 && TREE_CODE (addr
.base
) == INTEGER_CST
856 && !integer_zerop (addr
.base
))
858 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
859 TREE_TYPE (addr
.offset
),
860 addr
.offset
, addr
.base
);
861 addr
.base
= NULL_TREE
;
866 && TREE_CODE (TREE_OPERAND (addr
.symbol
, 0)) == MEM_REF
)
868 addr
.offset
= fold_binary_to_constant
869 (PLUS_EXPR
, TREE_TYPE (addr
.offset
),
871 TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 1));
872 addr
.symbol
= TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 0);
876 && handled_component_p (TREE_OPERAND (addr
.symbol
, 0)))
878 HOST_WIDE_INT offset
;
879 addr
.symbol
= build_fold_addr_expr
880 (get_addr_base_and_unit_offset
881 (TREE_OPERAND (addr
.symbol
, 0), &offset
));
882 addr
.offset
= int_const_binop (PLUS_EXPR
,
883 addr
.offset
, size_int (offset
));
887 if (addr
.index
&& TREE_CODE (addr
.index
) == INTEGER_CST
)
892 off
= fold_binary_to_constant (MULT_EXPR
, sizetype
,
894 addr
.step
= NULL_TREE
;
897 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
898 TREE_TYPE (addr
.offset
),
900 addr
.index
= NULL_TREE
;
907 /* If we have propagated something into this TARGET_MEM_REF and thus
908 ended up folding it, always create a new TARGET_MEM_REF regardless
909 if it is valid in this for on the target - the propagation result
910 wouldn't be anyway. */
911 ret
= create_mem_ref_raw (TREE_TYPE (ref
),
912 TREE_TYPE (addr
.offset
), &addr
, false);
913 copy_mem_ref_info (ret
, ref
);
917 /* Dump PARTS to FILE. */
919 extern void dump_mem_address (FILE *, struct mem_address
*);
921 dump_mem_address (FILE *file
, struct mem_address
*parts
)
925 fprintf (file
, "symbol: ");
926 print_generic_expr (file
, TREE_OPERAND (parts
->symbol
, 0), TDF_SLIM
);
927 fprintf (file
, "\n");
931 fprintf (file
, "base: ");
932 print_generic_expr (file
, parts
->base
, TDF_SLIM
);
933 fprintf (file
, "\n");
937 fprintf (file
, "index: ");
938 print_generic_expr (file
, parts
->index
, TDF_SLIM
);
939 fprintf (file
, "\n");
943 fprintf (file
, "step: ");
944 print_generic_expr (file
, parts
->step
, TDF_SLIM
);
945 fprintf (file
, "\n");
949 fprintf (file
, "offset: ");
950 print_generic_expr (file
, parts
->offset
, TDF_SLIM
);
951 fprintf (file
, "\n");
955 #include "gt-tree-ssa-address.h"