1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
25 #include "coretypes.h"
31 #include "stringpool.h"
33 #include "tree-ssanames.h"
35 #include "insn-config.h"
37 #include "tree-pretty-print.h"
38 #include "fold-const.h"
39 #include "stor-layout.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "tree-ssa-loop-ivopts.h"
46 #include "tree-affine.h"
48 /* FIXME: We compute address costs using RTL. */
49 #include "tree-ssa-address.h"
51 /* TODO -- handling of symbols (according to Richard Hendersons
52 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
54 There are at least 5 different kinds of symbols that we can run up against:
56 (1) binds_local_p, small data area.
57 (2) binds_local_p, eg local statics
58 (3) !binds_local_p, eg global variables
59 (4) thread local, local_exec
60 (5) thread local, !local_exec
62 Now, (1) won't appear often in an array context, but it certainly can.
63 All you have to do is set -GN high enough, or explicitly mark any
64 random object __attribute__((section (".sdata"))).
66 All of these affect whether or not a symbol is in fact a valid address.
67 The only one tested here is (3). And that result may very well
68 be incorrect for (4) or (5).
70 An incorrect result here does not cause incorrect results out the
71 back end, because the expander in expr.c validizes the address. However
72 it would be nice to improve the handling here in order to produce more
75 /* A "template" for memory address, used to determine whether the address is
78 struct GTY (()) mem_addr_template
{
79 rtx ref
; /* The template. */
80 rtx
* GTY ((skip
)) step_p
; /* The point in template where the step should be
82 rtx
* GTY ((skip
)) off_p
; /* The point in template where the offset should
87 /* The templates. Each of the low five bits of the index corresponds to one
88 component of TARGET_MEM_REF being present, while the high bits identify
89 the address space. See TEMPL_IDX. */
91 static GTY(()) vec
<mem_addr_template
, va_gc
> *mem_addr_template_list
;
93 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
95 | ((SYMBOL != 0) << 4) \
96 | ((BASE != 0) << 3) \
97 | ((INDEX != 0) << 2) \
98 | ((STEP != 0) << 1) \
101 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
102 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
103 to where step is placed to *STEP_P and offset to *OFFSET_P. */
106 gen_addr_rtx (machine_mode address_mode
,
107 rtx symbol
, rtx base
, rtx index
, rtx step
, rtx offset
,
108 rtx
*addr
, rtx
**step_p
, rtx
**offset_p
)
123 act_elem
= gen_rtx_MULT (address_mode
, act_elem
, step
);
126 *step_p
= &XEXP (act_elem
, 1);
132 if (base
&& base
!= const0_rtx
)
135 *addr
= simplify_gen_binary (PLUS
, address_mode
, base
, *addr
);
145 act_elem
= gen_rtx_PLUS (address_mode
, act_elem
, offset
);
148 *offset_p
= &XEXP (act_elem
, 1);
150 if (GET_CODE (symbol
) == SYMBOL_REF
151 || GET_CODE (symbol
) == LABEL_REF
152 || GET_CODE (symbol
) == CONST
)
153 act_elem
= gen_rtx_CONST (address_mode
, act_elem
);
157 *addr
= gen_rtx_PLUS (address_mode
, *addr
, act_elem
);
165 *addr
= gen_rtx_PLUS (address_mode
, *addr
, offset
);
167 *offset_p
= &XEXP (*addr
, 1);
181 /* Description of a memory address. */
185 tree symbol
, base
, index
, step
, offset
;
188 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
190 If REALLY_EXPAND is false, just make fake registers instead
191 of really expanding the operands, and perform the expansion in-place
192 by using one of the "templates". */
195 addr_for_mem_ref (struct mem_address
*addr
, addr_space_t as
,
198 machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
199 machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
200 rtx address
, sym
, bse
, idx
, st
, off
;
201 struct mem_addr_template
*templ
;
203 if (addr
->step
&& !integer_onep (addr
->step
))
204 st
= immed_wide_int_const (addr
->step
, pointer_mode
);
208 if (addr
->offset
&& !integer_zerop (addr
->offset
))
210 offset_int dc
= offset_int::from (addr
->offset
, SIGNED
);
211 off
= immed_wide_int_const (dc
, pointer_mode
);
218 unsigned int templ_index
219 = TEMPL_IDX (as
, addr
->symbol
, addr
->base
, addr
->index
, st
, off
);
221 if (templ_index
>= vec_safe_length (mem_addr_template_list
))
222 vec_safe_grow_cleared (mem_addr_template_list
, templ_index
+ 1);
224 /* Reuse the templates for addresses, so that we do not waste memory. */
225 templ
= &(*mem_addr_template_list
)[templ_index
];
228 sym
= (addr
->symbol
?
229 gen_rtx_SYMBOL_REF (pointer_mode
, ggc_strdup ("test_symbol"))
232 gen_raw_REG (pointer_mode
, LAST_VIRTUAL_REGISTER
+ 1)
235 gen_raw_REG (pointer_mode
, LAST_VIRTUAL_REGISTER
+ 2)
238 gen_addr_rtx (pointer_mode
, sym
, bse
, idx
,
239 st
? const0_rtx
: NULL_RTX
,
240 off
? const0_rtx
: NULL_RTX
,
254 /* Otherwise really expand the expressions. */
256 ? expand_expr (addr
->symbol
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
259 ? expand_expr (addr
->base
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
262 ? expand_expr (addr
->index
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
265 gen_addr_rtx (pointer_mode
, sym
, bse
, idx
, st
, off
, &address
, NULL
, NULL
);
266 if (pointer_mode
!= address_mode
)
267 address
= convert_memory_address (address_mode
, address
);
271 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
272 the mem_address structure. */
275 addr_for_mem_ref (tree exp
, addr_space_t as
, bool really_expand
)
277 struct mem_address addr
;
278 get_address_description (exp
, &addr
);
279 return addr_for_mem_ref (&addr
, as
, really_expand
);
282 /* Returns address of MEM_REF in TYPE. */
285 tree_mem_ref_addr (tree type
, tree mem_ref
)
289 tree step
= TMR_STEP (mem_ref
), offset
= TMR_OFFSET (mem_ref
);
290 tree addr_base
= NULL_TREE
, addr_off
= NULL_TREE
;
292 addr_base
= fold_convert (type
, TMR_BASE (mem_ref
));
294 act_elem
= TMR_INDEX (mem_ref
);
298 act_elem
= fold_build2 (MULT_EXPR
, TREE_TYPE (act_elem
),
303 act_elem
= TMR_INDEX2 (mem_ref
);
307 addr_off
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr_off
),
313 if (offset
&& !integer_zerop (offset
))
316 addr_off
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr_off
), addr_off
,
317 fold_convert (TREE_TYPE (addr_off
), offset
));
323 addr
= fold_build_pointer_plus (addr_base
, addr_off
);
330 /* Returns true if a memory reference in MODE and with parameters given by
331 ADDR is valid on the current target. */
334 valid_mem_ref_p (machine_mode mode
, addr_space_t as
,
335 struct mem_address
*addr
)
339 address
= addr_for_mem_ref (addr
, as
, false);
343 return memory_address_addr_space_p (mode
, address
, as
);
346 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
347 is valid on the current target and if so, creates and returns the
348 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
351 create_mem_ref_raw (tree type
, tree alias_ptr_type
, struct mem_address
*addr
,
357 && !valid_mem_ref_p (TYPE_MODE (type
), TYPE_ADDR_SPACE (type
), addr
))
360 if (addr
->step
&& integer_onep (addr
->step
))
361 addr
->step
= NULL_TREE
;
364 addr
->offset
= fold_convert (alias_ptr_type
, addr
->offset
);
366 addr
->offset
= build_int_cst (alias_ptr_type
, 0);
374 && POINTER_TYPE_P (TREE_TYPE (addr
->base
)))
381 base
= build_int_cst (build_pointer_type (type
), 0);
385 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
386 ??? As IVOPTs does not follow restrictions to where the base
387 pointer may point to create a MEM_REF only if we know that
389 if ((TREE_CODE (base
) == ADDR_EXPR
|| TREE_CODE (base
) == INTEGER_CST
)
390 && (!index2
|| integer_zerop (index2
))
391 && (!addr
->index
|| integer_zerop (addr
->index
)))
392 return fold_build2 (MEM_REF
, type
, base
, addr
->offset
);
394 return build5 (TARGET_MEM_REF
, type
,
395 base
, addr
->offset
, addr
->index
, addr
->step
, index2
);
398 /* Returns true if OBJ is an object whose address is a link time constant. */
401 fixed_address_object_p (tree obj
)
404 && (TREE_STATIC (obj
) || DECL_EXTERNAL (obj
))
405 && ! DECL_DLLIMPORT_P (obj
));
408 /* If ADDR contains an address of object that is a link time constant,
409 move it to PARTS->symbol. */
412 move_fixed_address_to_symbol (struct mem_address
*parts
, aff_tree
*addr
)
415 tree val
= NULL_TREE
;
417 for (i
= 0; i
< addr
->n
; i
++)
419 if (addr
->elts
[i
].coef
!= 1)
422 val
= addr
->elts
[i
].val
;
423 if (TREE_CODE (val
) == ADDR_EXPR
424 && fixed_address_object_p (TREE_OPERAND (val
, 0)))
432 aff_combination_remove_elt (addr
, i
);
435 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
438 move_hint_to_base (tree type
, struct mem_address
*parts
, tree base_hint
,
442 tree val
= NULL_TREE
;
445 for (i
= 0; i
< addr
->n
; i
++)
447 if (addr
->elts
[i
].coef
!= 1)
450 val
= addr
->elts
[i
].val
;
451 if (operand_equal_p (val
, base_hint
, 0))
458 /* Cast value to appropriate pointer type. We cannot use a pointer
459 to TYPE directly, as the back-end will assume registers of pointer
460 type are aligned, and just the base itself may not actually be.
461 We use void pointer to the type's address space instead. */
462 qual
= ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type
));
463 type
= build_qualified_type (void_type_node
, qual
);
464 parts
->base
= fold_convert (build_pointer_type (type
), val
);
465 aff_combination_remove_elt (addr
, i
);
468 /* If ADDR contains an address of a dereferenced pointer, move it to
472 move_pointer_to_base (struct mem_address
*parts
, aff_tree
*addr
)
475 tree val
= NULL_TREE
;
477 for (i
= 0; i
< addr
->n
; i
++)
479 if (addr
->elts
[i
].coef
!= 1)
482 val
= addr
->elts
[i
].val
;
483 if (POINTER_TYPE_P (TREE_TYPE (val
)))
491 aff_combination_remove_elt (addr
, i
);
494 /* Moves the loop variant part V in linear address ADDR to be the index
498 move_variant_to_index (struct mem_address
*parts
, aff_tree
*addr
, tree v
)
501 tree val
= NULL_TREE
;
503 gcc_assert (!parts
->index
);
504 for (i
= 0; i
< addr
->n
; i
++)
506 val
= addr
->elts
[i
].val
;
507 if (operand_equal_p (val
, v
, 0))
514 parts
->index
= fold_convert (sizetype
, val
);
515 parts
->step
= wide_int_to_tree (sizetype
, addr
->elts
[i
].coef
);
516 aff_combination_remove_elt (addr
, i
);
519 /* Adds ELT to PARTS. */
522 add_to_parts (struct mem_address
*parts
, tree elt
)
528 parts
->index
= fold_convert (sizetype
, elt
);
538 /* Add ELT to base. */
539 type
= TREE_TYPE (parts
->base
);
540 if (POINTER_TYPE_P (type
))
541 parts
->base
= fold_build_pointer_plus (parts
->base
, elt
);
543 parts
->base
= fold_build2 (PLUS_EXPR
, type
,
547 /* Finds the most expensive multiplication in ADDR that can be
548 expressed in an addressing mode and move the corresponding
549 element(s) to PARTS. */
552 most_expensive_mult_to_index (tree type
, struct mem_address
*parts
,
553 aff_tree
*addr
, bool speed
)
555 addr_space_t as
= TYPE_ADDR_SPACE (type
);
556 machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
558 unsigned best_mult_cost
= 0, acost
;
559 tree mult_elt
= NULL_TREE
, elt
;
561 enum tree_code op_code
;
563 offset_int best_mult
= 0;
564 for (i
= 0; i
< addr
->n
; i
++)
566 if (!wi::fits_shwi_p (addr
->elts
[i
].coef
))
569 coef
= addr
->elts
[i
].coef
.to_shwi ();
571 || !multiplier_allowed_in_address_p (coef
, TYPE_MODE (type
), as
))
574 acost
= mult_by_coeff_cost (coef
, address_mode
, speed
);
576 if (acost
> best_mult_cost
)
578 best_mult_cost
= acost
;
579 best_mult
= offset_int::from (addr
->elts
[i
].coef
, SIGNED
);
586 /* Collect elements multiplied by best_mult. */
587 for (i
= j
= 0; i
< addr
->n
; i
++)
589 offset_int amult
= offset_int::from (addr
->elts
[i
].coef
, SIGNED
);
590 offset_int amult_neg
= -wi::sext (amult
, TYPE_PRECISION (addr
->type
));
592 if (amult
== best_mult
)
594 else if (amult_neg
== best_mult
)
595 op_code
= MINUS_EXPR
;
598 addr
->elts
[j
] = addr
->elts
[i
];
603 elt
= fold_convert (sizetype
, addr
->elts
[i
].val
);
605 mult_elt
= fold_build2 (op_code
, sizetype
, mult_elt
, elt
);
606 else if (op_code
== PLUS_EXPR
)
609 mult_elt
= fold_build1 (NEGATE_EXPR
, sizetype
, elt
);
613 parts
->index
= mult_elt
;
614 parts
->step
= wide_int_to_tree (sizetype
, best_mult
);
617 /* Splits address ADDR for a memory access of type TYPE into PARTS.
618 If BASE_HINT is non-NULL, it specifies an SSA name to be used
619 preferentially as base of the reference, and IV_CAND is the selected
620 iv candidate used in ADDR.
622 TODO -- be more clever about the distribution of the elements of ADDR
623 to PARTS. Some architectures do not support anything but single
624 register in address, possibly with a small integer offset; while
625 create_mem_ref will simplify the address to an acceptable shape
626 later, it would be more efficient to know that asking for complicated
627 addressing modes is useless. */
630 addr_to_parts (tree type
, aff_tree
*addr
, tree iv_cand
,
631 tree base_hint
, struct mem_address
*parts
,
637 parts
->symbol
= NULL_TREE
;
638 parts
->base
= NULL_TREE
;
639 parts
->index
= NULL_TREE
;
640 parts
->step
= NULL_TREE
;
642 if (addr
->offset
!= 0)
643 parts
->offset
= wide_int_to_tree (sizetype
, addr
->offset
);
645 parts
->offset
= NULL_TREE
;
647 /* Try to find a symbol. */
648 move_fixed_address_to_symbol (parts
, addr
);
650 /* No need to do address parts reassociation if the number of parts
651 is <= 2 -- in that case, no loop invariant code motion can be
654 if (!base_hint
&& (addr
->n
> 2))
655 move_variant_to_index (parts
, addr
, iv_cand
);
657 /* First move the most expensive feasible multiplication
660 most_expensive_mult_to_index (type
, parts
, addr
, speed
);
662 /* Try to find a base of the reference. Since at the moment
663 there is no reliable way how to distinguish between pointer and its
664 offset, this is just a guess. */
665 if (!parts
->symbol
&& base_hint
)
666 move_hint_to_base (type
, parts
, base_hint
, addr
);
667 if (!parts
->symbol
&& !parts
->base
)
668 move_pointer_to_base (parts
, addr
);
670 /* Then try to process the remaining elements. */
671 for (i
= 0; i
< addr
->n
; i
++)
673 part
= fold_convert (sizetype
, addr
->elts
[i
].val
);
674 if (addr
->elts
[i
].coef
!= 1)
675 part
= fold_build2 (MULT_EXPR
, sizetype
, part
,
676 wide_int_to_tree (sizetype
, addr
->elts
[i
].coef
));
677 add_to_parts (parts
, part
);
680 add_to_parts (parts
, fold_convert (sizetype
, addr
->rest
));
683 /* Force the PARTS to register. */
686 gimplify_mem_ref_parts (gimple_stmt_iterator
*gsi
, struct mem_address
*parts
)
689 parts
->base
= force_gimple_operand_gsi_1 (gsi
, parts
->base
,
690 is_gimple_mem_ref_addr
, NULL_TREE
,
691 true, GSI_SAME_STMT
);
693 parts
->index
= force_gimple_operand_gsi (gsi
, parts
->index
,
695 true, GSI_SAME_STMT
);
698 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
699 computations are emitted in front of GSI. TYPE is the mode
700 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
701 and BASE_HINT is non NULL if IV_CAND comes from a base address
705 create_mem_ref (gimple_stmt_iterator
*gsi
, tree type
, aff_tree
*addr
,
706 tree alias_ptr_type
, tree iv_cand
, tree base_hint
, bool speed
)
709 struct mem_address parts
;
711 addr_to_parts (type
, addr
, iv_cand
, base_hint
, &parts
, speed
);
712 gimplify_mem_ref_parts (gsi
, &parts
);
713 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
717 /* The expression is too complicated. Try making it simpler. */
719 if (parts
.step
&& !integer_onep (parts
.step
))
721 /* Move the multiplication to index. */
722 gcc_assert (parts
.index
);
723 parts
.index
= force_gimple_operand_gsi (gsi
,
724 fold_build2 (MULT_EXPR
, sizetype
,
725 parts
.index
, parts
.step
),
726 true, NULL_TREE
, true, GSI_SAME_STMT
);
727 parts
.step
= NULL_TREE
;
729 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
737 gcc_assert (is_gimple_val (tmp
));
739 /* Add the symbol to base, eventually forcing it to register. */
742 gcc_assert (useless_type_conversion_p
743 (sizetype
, TREE_TYPE (parts
.base
)));
747 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
748 fold_build_pointer_plus (tmp
, parts
.base
),
749 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
753 parts
.index
= parts
.base
;
759 parts
.symbol
= NULL_TREE
;
761 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
768 /* Add index to base. */
771 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
772 fold_build_pointer_plus (parts
.base
, parts
.index
),
773 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
776 parts
.base
= parts
.index
;
777 parts
.index
= NULL_TREE
;
779 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
784 if (parts
.offset
&& !integer_zerop (parts
.offset
))
786 /* Try adding offset to base. */
789 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
790 fold_build_pointer_plus (parts
.base
, parts
.offset
),
791 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
794 parts
.base
= parts
.offset
;
796 parts
.offset
= NULL_TREE
;
798 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
803 /* Verify that the address is in the simplest possible shape
804 (only a register). If we cannot create such a memory reference,
805 something is really wrong. */
806 gcc_assert (parts
.symbol
== NULL_TREE
);
807 gcc_assert (parts
.index
== NULL_TREE
);
808 gcc_assert (!parts
.step
|| integer_onep (parts
.step
));
809 gcc_assert (!parts
.offset
|| integer_zerop (parts
.offset
));
813 /* Copies components of the address from OP to ADDR. */
816 get_address_description (tree op
, struct mem_address
*addr
)
818 if (TREE_CODE (TMR_BASE (op
)) == ADDR_EXPR
)
820 addr
->symbol
= TMR_BASE (op
);
821 addr
->base
= TMR_INDEX2 (op
);
825 addr
->symbol
= NULL_TREE
;
828 gcc_assert (integer_zerop (TMR_BASE (op
)));
829 addr
->base
= TMR_INDEX2 (op
);
832 addr
->base
= TMR_BASE (op
);
834 addr
->index
= TMR_INDEX (op
);
835 addr
->step
= TMR_STEP (op
);
836 addr
->offset
= TMR_OFFSET (op
);
839 /* Copies the reference information from OLD_REF to NEW_REF, where
840 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
843 copy_ref_info (tree new_ref
, tree old_ref
)
845 tree new_ptr_base
= NULL_TREE
;
847 gcc_assert (TREE_CODE (new_ref
) == MEM_REF
848 || TREE_CODE (new_ref
) == TARGET_MEM_REF
);
850 TREE_SIDE_EFFECTS (new_ref
) = TREE_SIDE_EFFECTS (old_ref
);
851 TREE_THIS_VOLATILE (new_ref
) = TREE_THIS_VOLATILE (old_ref
);
853 new_ptr_base
= TREE_OPERAND (new_ref
, 0);
855 /* We can transfer points-to information from an old pointer
856 or decl base to the new one. */
858 && TREE_CODE (new_ptr_base
) == SSA_NAME
859 && !SSA_NAME_PTR_INFO (new_ptr_base
))
861 tree base
= get_base_address (old_ref
);
864 else if ((TREE_CODE (base
) == MEM_REF
865 || TREE_CODE (base
) == TARGET_MEM_REF
)
866 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
867 && SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0)))
869 struct ptr_info_def
*new_pi
;
870 unsigned int align
, misalign
;
872 duplicate_ssa_name_ptr_info
873 (new_ptr_base
, SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0)));
874 new_pi
= SSA_NAME_PTR_INFO (new_ptr_base
);
875 /* We have to be careful about transferring alignment information. */
876 if (get_ptr_info_alignment (new_pi
, &align
, &misalign
)
877 && TREE_CODE (old_ref
) == MEM_REF
878 && !(TREE_CODE (new_ref
) == TARGET_MEM_REF
879 && (TMR_INDEX2 (new_ref
)
880 /* TODO: Below conditions can be relaxed if TMR_INDEX
881 is an indcution variable and its initial value and
883 || (TMR_INDEX (new_ref
) && !TMR_STEP (new_ref
))
884 || (TMR_STEP (new_ref
)
885 && (TREE_INT_CST_LOW (TMR_STEP (new_ref
))
888 unsigned int inc
= (mem_ref_offset (old_ref
).to_short_addr ()
889 - mem_ref_offset (new_ref
).to_short_addr ());
890 adjust_ptr_info_misalignment (new_pi
, inc
);
893 mark_ptr_info_alignment_unknown (new_pi
);
895 else if (VAR_P (base
)
896 || TREE_CODE (base
) == PARM_DECL
897 || TREE_CODE (base
) == RESULT_DECL
)
899 struct ptr_info_def
*pi
= get_ptr_info (new_ptr_base
);
900 pt_solution_set_var (&pi
->pt
, base
);
905 /* Move constants in target_mem_ref REF to offset. Returns the new target
906 mem ref if anything changes, NULL_TREE otherwise. */
909 maybe_fold_tmr (tree ref
)
911 struct mem_address addr
;
912 bool changed
= false;
915 get_address_description (ref
, &addr
);
918 && TREE_CODE (addr
.base
) == INTEGER_CST
919 && !integer_zerop (addr
.base
))
921 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
922 TREE_TYPE (addr
.offset
),
923 addr
.offset
, addr
.base
);
924 addr
.base
= NULL_TREE
;
929 && TREE_CODE (TREE_OPERAND (addr
.symbol
, 0)) == MEM_REF
)
931 addr
.offset
= fold_binary_to_constant
932 (PLUS_EXPR
, TREE_TYPE (addr
.offset
),
934 TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 1));
935 addr
.symbol
= TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 0);
939 && handled_component_p (TREE_OPERAND (addr
.symbol
, 0)))
941 HOST_WIDE_INT offset
;
942 addr
.symbol
= build_fold_addr_expr
943 (get_addr_base_and_unit_offset
944 (TREE_OPERAND (addr
.symbol
, 0), &offset
));
945 addr
.offset
= int_const_binop (PLUS_EXPR
,
946 addr
.offset
, size_int (offset
));
950 if (addr
.index
&& TREE_CODE (addr
.index
) == INTEGER_CST
)
955 off
= fold_binary_to_constant (MULT_EXPR
, sizetype
,
957 addr
.step
= NULL_TREE
;
960 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
961 TREE_TYPE (addr
.offset
),
963 addr
.index
= NULL_TREE
;
970 /* If we have propagated something into this TARGET_MEM_REF and thus
971 ended up folding it, always create a new TARGET_MEM_REF regardless
972 if it is valid in this for on the target - the propagation result
973 wouldn't be anyway. */
974 new_ref
= create_mem_ref_raw (TREE_TYPE (ref
),
975 TREE_TYPE (addr
.offset
), &addr
, false);
976 TREE_SIDE_EFFECTS (new_ref
) = TREE_SIDE_EFFECTS (ref
);
977 TREE_THIS_VOLATILE (new_ref
) = TREE_THIS_VOLATILE (ref
);
981 /* Dump PARTS to FILE. */
983 extern void dump_mem_address (FILE *, struct mem_address
*);
985 dump_mem_address (FILE *file
, struct mem_address
*parts
)
989 fprintf (file
, "symbol: ");
990 print_generic_expr (file
, TREE_OPERAND (parts
->symbol
, 0), TDF_SLIM
);
991 fprintf (file
, "\n");
995 fprintf (file
, "base: ");
996 print_generic_expr (file
, parts
->base
, TDF_SLIM
);
997 fprintf (file
, "\n");
1001 fprintf (file
, "index: ");
1002 print_generic_expr (file
, parts
->index
, TDF_SLIM
);
1003 fprintf (file
, "\n");
1007 fprintf (file
, "step: ");
1008 print_generic_expr (file
, parts
->step
, TDF_SLIM
);
1009 fprintf (file
, "\n");
1013 fprintf (file
, "offset: ");
1014 print_generic_expr (file
, parts
->offset
, TDF_SLIM
);
1015 fprintf (file
, "\n");
1019 #include "gt-tree-ssa-address.h"