1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
25 #include "coretypes.h"
32 #include "stringpool.h"
34 #include "tree-ssanames.h"
36 #include "insn-config.h"
39 #include "tree-pretty-print.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "tree-ssa-loop-ivopts.h"
48 #include "tree-affine.h"
52 /* FIXME: We compute address costs using RTL. */
53 #include "tree-ssa-address.h"
55 /* TODO -- handling of symbols (according to Richard Hendersons
56 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
58 There are at least 5 different kinds of symbols that we can run up against:
60 (1) binds_local_p, small data area.
61 (2) binds_local_p, eg local statics
62 (3) !binds_local_p, eg global variables
63 (4) thread local, local_exec
64 (5) thread local, !local_exec
66 Now, (1) won't appear often in an array context, but it certainly can.
67 All you have to do is set -GN high enough, or explicitly mark any
68 random object __attribute__((section (".sdata"))).
70 All of these affect whether or not a symbol is in fact a valid address.
71 The only one tested here is (3). And that result may very well
72 be incorrect for (4) or (5).
74 An incorrect result here does not cause incorrect results out the
75 back end, because the expander in expr.cc validizes the address. However
76 it would be nice to improve the handling here in order to produce more
79 /* A "template" for memory address, used to determine whether the address is
82 struct GTY (()) mem_addr_template
{
83 rtx ref
; /* The template. */
84 rtx
* GTY ((skip
)) step_p
; /* The point in template where the step should be
86 rtx
* GTY ((skip
)) off_p
; /* The point in template where the offset should
91 /* The templates. Each of the low five bits of the index corresponds to one
92 component of TARGET_MEM_REF being present, while the high bits identify
93 the address space. See TEMPL_IDX. */
95 static GTY(()) vec
<mem_addr_template
, va_gc
> *mem_addr_template_list
;
97 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
99 | ((SYMBOL != 0) << 4) \
100 | ((BASE != 0) << 3) \
101 | ((INDEX != 0) << 2) \
102 | ((STEP != 0) << 1) \
105 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
106 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
107 to where step is placed to *STEP_P and offset to *OFFSET_P. */
110 gen_addr_rtx (machine_mode address_mode
,
111 rtx symbol
, rtx base
, rtx index
, rtx step
, rtx offset
,
112 rtx
*addr
, rtx
**step_p
, rtx
**offset_p
)
122 if (index
&& index
!= const0_rtx
)
127 act_elem
= gen_rtx_MULT (address_mode
, act_elem
, step
);
130 *step_p
= &XEXP (act_elem
, 1);
136 if (base
&& base
!= const0_rtx
)
139 *addr
= simplify_gen_binary (PLUS
, address_mode
, base
, *addr
);
149 act_elem
= gen_rtx_PLUS (address_mode
, act_elem
, offset
);
152 *offset_p
= &XEXP (act_elem
, 1);
154 if (GET_CODE (symbol
) == SYMBOL_REF
155 || GET_CODE (symbol
) == LABEL_REF
156 || GET_CODE (symbol
) == CONST
)
157 act_elem
= gen_rtx_CONST (address_mode
, act_elem
);
161 *addr
= gen_rtx_PLUS (address_mode
, *addr
, act_elem
);
169 *addr
= gen_rtx_PLUS (address_mode
, *addr
, offset
);
171 *offset_p
= &XEXP (*addr
, 1);
185 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
187 If REALLY_EXPAND is false, just make fake registers instead
188 of really expanding the operands, and perform the expansion in-place
189 by using one of the "templates". */
192 addr_for_mem_ref (struct mem_address
*addr
, addr_space_t as
,
195 scalar_int_mode address_mode
= targetm
.addr_space
.address_mode (as
);
196 scalar_int_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
197 rtx address
, sym
, bse
, idx
, st
, off
;
198 struct mem_addr_template
*templ
;
200 if (addr
->step
&& !integer_onep (addr
->step
))
201 st
= immed_wide_int_const (wi::to_wide (addr
->step
), pointer_mode
);
205 if (addr
->offset
&& !integer_zerop (addr
->offset
))
208 = poly_offset_int::from (wi::to_poly_wide (addr
->offset
), SIGNED
);
209 off
= immed_wide_int_const (dc
, pointer_mode
);
216 unsigned int templ_index
217 = TEMPL_IDX (as
, addr
->symbol
, addr
->base
, addr
->index
, st
, off
);
219 if (templ_index
>= vec_safe_length (mem_addr_template_list
))
220 vec_safe_grow_cleared (mem_addr_template_list
, templ_index
+ 1, true);
222 /* Reuse the templates for addresses, so that we do not waste memory. */
223 templ
= &(*mem_addr_template_list
)[templ_index
];
226 sym
= (addr
->symbol
?
227 gen_rtx_SYMBOL_REF (pointer_mode
, ggc_strdup ("test_symbol"))
230 gen_raw_REG (pointer_mode
, LAST_VIRTUAL_REGISTER
+ 1)
233 gen_raw_REG (pointer_mode
, LAST_VIRTUAL_REGISTER
+ 2)
236 gen_addr_rtx (pointer_mode
, sym
, bse
, idx
,
237 st
? const0_rtx
: NULL_RTX
,
238 off
? const0_rtx
: NULL_RTX
,
252 /* Otherwise really expand the expressions. */
254 ? expand_expr (addr
->symbol
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
257 ? expand_expr (addr
->base
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
260 ? expand_expr (addr
->index
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
263 /* addr->base could be an SSA_NAME that was set to a constant value. The
264 call to expand_expr may expose that constant. If so, fold the value
265 into OFF and clear BSE. Otherwise we may later try to pull a mode from
266 BSE to generate a REG, which won't work with constants because they
268 if (bse
&& GET_CODE (bse
) == CONST_INT
)
271 off
= simplify_gen_binary (PLUS
, pointer_mode
, bse
, off
);
274 gcc_assert (GET_CODE (off
) == CONST_INT
);
277 gen_addr_rtx (pointer_mode
, sym
, bse
, idx
, st
, off
, &address
, NULL
, NULL
);
278 if (pointer_mode
!= address_mode
)
279 address
= convert_memory_address (address_mode
, address
);
283 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
284 the mem_address structure. */
287 addr_for_mem_ref (tree exp
, addr_space_t as
, bool really_expand
)
289 struct mem_address addr
;
290 get_address_description (exp
, &addr
);
291 return addr_for_mem_ref (&addr
, as
, really_expand
);
294 /* Returns address of MEM_REF in TYPE. */
297 tree_mem_ref_addr (tree type
, tree mem_ref
)
301 tree step
= TMR_STEP (mem_ref
), offset
= TMR_OFFSET (mem_ref
);
302 tree addr_base
= NULL_TREE
, addr_off
= NULL_TREE
;
304 addr_base
= fold_convert (type
, TMR_BASE (mem_ref
));
306 act_elem
= TMR_INDEX (mem_ref
);
310 act_elem
= fold_build2 (MULT_EXPR
, TREE_TYPE (act_elem
),
315 act_elem
= TMR_INDEX2 (mem_ref
);
319 addr_off
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr_off
),
325 if (offset
&& !integer_zerop (offset
))
328 addr_off
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr_off
), addr_off
,
329 fold_convert (TREE_TYPE (addr_off
), offset
));
335 addr
= fold_build_pointer_plus (addr_base
, addr_off
);
342 /* Returns true if a memory reference in MODE and with parameters given by
343 ADDR is valid on the current target. */
346 valid_mem_ref_p (machine_mode mode
, addr_space_t as
,
347 struct mem_address
*addr
, code_helper ch
)
351 address
= addr_for_mem_ref (addr
, as
, false);
355 return memory_address_addr_space_p (mode
, address
, as
, ch
);
358 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
359 is valid on the current target and if so, creates and returns the
360 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
363 create_mem_ref_raw (tree type
, tree alias_ptr_type
, struct mem_address
*addr
,
369 && !valid_mem_ref_p (TYPE_MODE (type
), TYPE_ADDR_SPACE (type
), addr
))
372 if (addr
->step
&& integer_onep (addr
->step
))
373 addr
->step
= NULL_TREE
;
376 addr
->offset
= fold_convert (alias_ptr_type
, addr
->offset
);
378 addr
->offset
= build_int_cst (alias_ptr_type
, 0);
386 && POINTER_TYPE_P (TREE_TYPE (addr
->base
)))
393 base
= build_int_cst (build_pointer_type (type
), 0);
397 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
398 ??? As IVOPTs does not follow restrictions to where the base
399 pointer may point to create a MEM_REF only if we know that
401 if ((TREE_CODE (base
) == ADDR_EXPR
|| TREE_CODE (base
) == INTEGER_CST
)
402 && (!index2
|| integer_zerop (index2
))
403 && (!addr
->index
|| integer_zerop (addr
->index
)))
404 return fold_build2 (MEM_REF
, type
, base
, addr
->offset
);
406 return build5 (TARGET_MEM_REF
, type
,
407 base
, addr
->offset
, addr
->index
, addr
->step
, index2
);
410 /* Returns true if OBJ is an object whose address is a link time constant. */
413 fixed_address_object_p (tree obj
)
416 && (TREE_STATIC (obj
) || DECL_EXTERNAL (obj
))
417 && ! DECL_DLLIMPORT_P (obj
));
420 /* If ADDR contains an address of object that is a link time constant,
421 move it to PARTS->symbol. */
424 move_fixed_address_to_symbol (struct mem_address
*parts
, aff_tree
*addr
)
427 tree val
= NULL_TREE
;
429 for (i
= 0; i
< addr
->n
; i
++)
431 if (addr
->elts
[i
].coef
!= 1)
434 val
= addr
->elts
[i
].val
;
435 if (TREE_CODE (val
) == ADDR_EXPR
436 && fixed_address_object_p (TREE_OPERAND (val
, 0)))
444 aff_combination_remove_elt (addr
, i
);
447 /* Return true if ADDR contains an instance of BASE_HINT and it's moved to
451 move_hint_to_base (tree type
, struct mem_address
*parts
, tree base_hint
,
455 tree val
= NULL_TREE
;
458 for (i
= 0; i
< addr
->n
; i
++)
460 if (addr
->elts
[i
].coef
!= 1)
463 val
= addr
->elts
[i
].val
;
464 if (operand_equal_p (val
, base_hint
, 0))
471 /* Cast value to appropriate pointer type. We cannot use a pointer
472 to TYPE directly, as the back-end will assume registers of pointer
473 type are aligned, and just the base itself may not actually be.
474 We use void pointer to the type's address space instead. */
475 qual
= ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type
));
476 type
= build_qualified_type (void_type_node
, qual
);
477 parts
->base
= fold_convert (build_pointer_type (type
), val
);
478 aff_combination_remove_elt (addr
, i
);
482 /* If ADDR contains an address of a dereferenced pointer, move it to
486 move_pointer_to_base (struct mem_address
*parts
, aff_tree
*addr
)
489 tree val
= NULL_TREE
;
491 for (i
= 0; i
< addr
->n
; i
++)
493 if (addr
->elts
[i
].coef
!= 1)
496 val
= addr
->elts
[i
].val
;
497 if (POINTER_TYPE_P (TREE_TYPE (val
)))
505 aff_combination_remove_elt (addr
, i
);
508 /* Moves the loop variant part V in linear address ADDR to be the index
512 move_variant_to_index (struct mem_address
*parts
, aff_tree
*addr
, tree v
)
515 tree val
= NULL_TREE
;
517 gcc_assert (!parts
->index
);
518 for (i
= 0; i
< addr
->n
; i
++)
520 val
= addr
->elts
[i
].val
;
521 if (operand_equal_p (val
, v
, 0))
528 parts
->index
= fold_convert (sizetype
, val
);
529 parts
->step
= wide_int_to_tree (sizetype
, addr
->elts
[i
].coef
);
530 aff_combination_remove_elt (addr
, i
);
533 /* Adds ELT to PARTS. */
536 add_to_parts (struct mem_address
*parts
, tree elt
)
542 parts
->index
= fold_convert (sizetype
, elt
);
552 /* Add ELT to base. */
553 type
= TREE_TYPE (parts
->base
);
554 if (POINTER_TYPE_P (type
))
555 parts
->base
= fold_build_pointer_plus (parts
->base
, elt
);
557 parts
->base
= fold_build2 (PLUS_EXPR
, type
, parts
->base
, elt
);
560 /* Returns true if multiplying by RATIO is allowed in an address. Test the
561 validity for a memory reference accessing memory of mode MODE in address
565 multiplier_allowed_in_address_p (HOST_WIDE_INT ratio
, machine_mode mode
,
568 #define MAX_RATIO 128
569 unsigned int data_index
= (int) as
* MAX_MACHINE_MODE
+ (int) mode
;
570 static vec
<sbitmap
> valid_mult_list
;
573 if (data_index
>= valid_mult_list
.length ())
574 valid_mult_list
.safe_grow_cleared (data_index
+ 1, true);
576 valid_mult
= valid_mult_list
[data_index
];
579 machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
580 rtx reg1
= gen_raw_REG (address_mode
, LAST_VIRTUAL_REGISTER
+ 1);
581 rtx reg2
= gen_raw_REG (address_mode
, LAST_VIRTUAL_REGISTER
+ 2);
585 valid_mult
= sbitmap_alloc (2 * MAX_RATIO
+ 1);
586 bitmap_clear (valid_mult
);
587 scaled
= gen_rtx_fmt_ee (MULT
, address_mode
, reg1
, NULL_RTX
);
588 addr
= gen_rtx_fmt_ee (PLUS
, address_mode
, scaled
, reg2
);
589 for (i
= -MAX_RATIO
; i
<= MAX_RATIO
; i
++)
591 XEXP (scaled
, 1) = gen_int_mode (i
, address_mode
);
592 if (memory_address_addr_space_p (mode
, addr
, as
)
593 || memory_address_addr_space_p (mode
, scaled
, as
))
594 bitmap_set_bit (valid_mult
, i
+ MAX_RATIO
);
597 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
599 fprintf (dump_file
, " allowed multipliers:");
600 for (i
= -MAX_RATIO
; i
<= MAX_RATIO
; i
++)
601 if (bitmap_bit_p (valid_mult
, i
+ MAX_RATIO
))
602 fprintf (dump_file
, " %d", (int) i
);
603 fprintf (dump_file
, "\n");
604 fprintf (dump_file
, "\n");
607 valid_mult_list
[data_index
] = valid_mult
;
610 if (ratio
> MAX_RATIO
|| ratio
< -MAX_RATIO
)
613 return bitmap_bit_p (valid_mult
, ratio
+ MAX_RATIO
);
616 /* Finds the most expensive multiplication in ADDR that can be
617 expressed in an addressing mode and move the corresponding
618 element(s) to PARTS. */
621 most_expensive_mult_to_index (tree type
, struct mem_address
*parts
,
622 aff_tree
*addr
, bool speed
)
624 addr_space_t as
= TYPE_ADDR_SPACE (type
);
625 machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
627 unsigned best_mult_cost
= 0, acost
;
628 tree mult_elt
= NULL_TREE
, elt
;
630 enum tree_code op_code
;
632 offset_int best_mult
= 0;
633 for (i
= 0; i
< addr
->n
; i
++)
635 if (!wi::fits_shwi_p (addr
->elts
[i
].coef
))
638 coef
= addr
->elts
[i
].coef
.to_shwi ();
640 || !multiplier_allowed_in_address_p (coef
, TYPE_MODE (type
), as
))
643 acost
= mult_by_coeff_cost (coef
, address_mode
, speed
);
645 if (acost
> best_mult_cost
)
647 best_mult_cost
= acost
;
648 best_mult
= offset_int::from (addr
->elts
[i
].coef
, SIGNED
);
655 /* Collect elements multiplied by best_mult. */
656 for (i
= j
= 0; i
< addr
->n
; i
++)
658 offset_int amult
= offset_int::from (addr
->elts
[i
].coef
, SIGNED
);
659 offset_int amult_neg
= -wi::sext (amult
, TYPE_PRECISION (addr
->type
));
661 if (amult
== best_mult
)
663 else if (amult_neg
== best_mult
)
664 op_code
= MINUS_EXPR
;
667 addr
->elts
[j
] = addr
->elts
[i
];
672 elt
= fold_convert (sizetype
, addr
->elts
[i
].val
);
674 mult_elt
= fold_build2 (op_code
, sizetype
, mult_elt
, elt
);
675 else if (op_code
== PLUS_EXPR
)
678 mult_elt
= fold_build1 (NEGATE_EXPR
, sizetype
, elt
);
682 parts
->index
= mult_elt
;
683 parts
->step
= wide_int_to_tree (sizetype
, best_mult
);
686 /* Splits address ADDR for a memory access of type TYPE into PARTS.
687 If BASE_HINT is non-NULL, it specifies an SSA name to be used
688 preferentially as base of the reference, and IV_CAND is the selected
689 iv candidate used in ADDR. Store true to VAR_IN_BASE if variant
690 part of address is split to PARTS.base.
692 TODO -- be more clever about the distribution of the elements of ADDR
693 to PARTS. Some architectures do not support anything but single
694 register in address, possibly with a small integer offset; while
695 create_mem_ref will simplify the address to an acceptable shape
696 later, it would be more efficient to know that asking for complicated
697 addressing modes is useless. */
700 addr_to_parts (tree type
, aff_tree
*addr
, tree iv_cand
, tree base_hint
,
701 struct mem_address
*parts
, bool *var_in_base
, bool speed
)
706 parts
->symbol
= NULL_TREE
;
707 parts
->base
= NULL_TREE
;
708 parts
->index
= NULL_TREE
;
709 parts
->step
= NULL_TREE
;
711 if (maybe_ne (addr
->offset
, 0))
712 parts
->offset
= wide_int_to_tree (sizetype
, addr
->offset
);
714 parts
->offset
= NULL_TREE
;
716 /* Try to find a symbol. */
717 move_fixed_address_to_symbol (parts
, addr
);
719 /* Since at the moment there is no reliable way to know how to
720 distinguish between pointer and its offset, we decide if var
721 part is the pointer based on guess. */
722 *var_in_base
= (base_hint
!= NULL
&& parts
->symbol
== NULL
);
724 *var_in_base
= move_hint_to_base (type
, parts
, base_hint
, addr
);
726 move_variant_to_index (parts
, addr
, iv_cand
);
728 /* First move the most expensive feasible multiplication to index. */
730 most_expensive_mult_to_index (type
, parts
, addr
, speed
);
732 /* Move pointer into base. */
733 if (!parts
->symbol
&& !parts
->base
)
734 move_pointer_to_base (parts
, addr
);
736 /* Then try to process the remaining elements. */
737 for (i
= 0; i
< addr
->n
; i
++)
739 part
= fold_convert (sizetype
, addr
->elts
[i
].val
);
740 if (addr
->elts
[i
].coef
!= 1)
741 part
= fold_build2 (MULT_EXPR
, sizetype
, part
,
742 wide_int_to_tree (sizetype
, addr
->elts
[i
].coef
));
743 add_to_parts (parts
, part
);
746 add_to_parts (parts
, fold_convert (sizetype
, addr
->rest
));
749 /* Force the PARTS to register. */
752 gimplify_mem_ref_parts (gimple_stmt_iterator
*gsi
, struct mem_address
*parts
)
755 parts
->base
= force_gimple_operand_gsi_1 (gsi
, parts
->base
,
756 is_gimple_mem_ref_addr
, NULL_TREE
,
757 true, GSI_SAME_STMT
);
759 parts
->index
= force_gimple_operand_gsi (gsi
, parts
->index
,
761 true, GSI_SAME_STMT
);
764 /* Return true if the OFFSET in PARTS is the only thing that is making
765 it an invalid address for type TYPE. */
768 mem_ref_valid_without_offset_p (tree type
, mem_address parts
)
771 parts
.base
= parts
.offset
;
772 parts
.offset
= NULL_TREE
;
773 return valid_mem_ref_p (TYPE_MODE (type
), TYPE_ADDR_SPACE (type
), &parts
);
776 /* Fold PARTS->offset into PARTS->base, so that there is no longer
777 a separate offset. Emit any new instructions before GSI. */
780 add_offset_to_base (gimple_stmt_iterator
*gsi
, mem_address
*parts
)
782 tree tmp
= parts
->offset
;
785 tmp
= fold_build_pointer_plus (parts
->base
, tmp
);
786 tmp
= force_gimple_operand_gsi_1 (gsi
, tmp
, is_gimple_mem_ref_addr
,
787 NULL_TREE
, true, GSI_SAME_STMT
);
790 parts
->offset
= NULL_TREE
;
793 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
794 computations are emitted in front of GSI. TYPE is the mode
795 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
796 and BASE_HINT is non NULL if IV_CAND comes from a base address
800 create_mem_ref (gimple_stmt_iterator
*gsi
, tree type
, aff_tree
*addr
,
801 tree alias_ptr_type
, tree iv_cand
, tree base_hint
, bool speed
)
805 struct mem_address parts
;
807 addr_to_parts (type
, addr
, iv_cand
, base_hint
, &parts
, &var_in_base
, speed
);
808 gimplify_mem_ref_parts (gsi
, &parts
);
809 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
813 /* The expression is too complicated. Try making it simpler. */
815 /* Merge symbol into other parts. */
819 parts
.symbol
= NULL_TREE
;
820 gcc_assert (is_gimple_val (tmp
));
824 gcc_assert (useless_type_conversion_p (sizetype
,
825 TREE_TYPE (parts
.base
)));
829 /* Add the symbol to base, eventually forcing it to register. */
830 tmp
= fold_build_pointer_plus (tmp
, parts
.base
);
831 tmp
= force_gimple_operand_gsi_1 (gsi
, tmp
,
832 is_gimple_mem_ref_addr
,
838 /* Move base to index, then move the symbol to base. */
839 parts
.index
= parts
.base
;
846 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
851 /* Move multiplication to index by transforming address expression:
852 [... + index << step + ...]
854 index' = index << step;
855 [... + index' + ,,,]. */
856 if (parts
.step
&& !integer_onep (parts
.step
))
858 gcc_assert (parts
.index
);
859 if (parts
.offset
&& mem_ref_valid_without_offset_p (type
, parts
))
861 add_offset_to_base (gsi
, &parts
);
862 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
863 gcc_assert (mem_ref
);
867 parts
.index
= force_gimple_operand_gsi (gsi
,
868 fold_build2 (MULT_EXPR
, sizetype
,
869 parts
.index
, parts
.step
),
870 true, NULL_TREE
, true, GSI_SAME_STMT
);
871 parts
.step
= NULL_TREE
;
873 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
878 /* Add offset to invariant part by transforming address expression:
879 [base + index + offset]
881 base' = base + offset;
884 index' = index + offset;
886 depending on which one is invariant. */
887 if (parts
.offset
&& !integer_zerop (parts
.offset
))
889 tree old_base
= unshare_expr (parts
.base
);
890 tree old_index
= unshare_expr (parts
.index
);
891 tree old_offset
= unshare_expr (parts
.offset
);
894 parts
.offset
= NULL_TREE
;
895 /* Add offset to invariant part. */
900 tmp
= fold_build_pointer_plus (parts
.base
, tmp
);
901 tmp
= force_gimple_operand_gsi_1 (gsi
, tmp
,
902 is_gimple_mem_ref_addr
,
912 tmp
= fold_build_pointer_plus (parts
.index
, tmp
);
913 tmp
= force_gimple_operand_gsi_1 (gsi
, tmp
,
914 is_gimple_mem_ref_addr
,
921 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
925 /* Restore parts.base, index and offset so that we can check if
926 [base + offset] addressing mode is supported in next step.
927 This is necessary for targets only support [base + offset],
928 but not [base + index] addressing mode. */
929 parts
.base
= old_base
;
930 parts
.index
= old_index
;
931 parts
.offset
= old_offset
;
934 /* Transform [base + index + ...] into:
935 base' = base + index;
940 parts
.index
= NULL_TREE
;
941 /* Add index to base. */
944 tmp
= fold_build_pointer_plus (parts
.base
, tmp
);
945 tmp
= force_gimple_operand_gsi_1 (gsi
, tmp
,
946 is_gimple_mem_ref_addr
,
947 NULL_TREE
, true, GSI_SAME_STMT
);
951 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
956 /* Transform [base + offset] into:
957 base' = base + offset;
959 if (parts
.offset
&& !integer_zerop (parts
.offset
))
961 add_offset_to_base (gsi
, &parts
);
962 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
967 /* Verify that the address is in the simplest possible shape
968 (only a register). If we cannot create such a memory reference,
969 something is really wrong. */
970 gcc_assert (parts
.symbol
== NULL_TREE
);
971 gcc_assert (parts
.index
== NULL_TREE
);
972 gcc_assert (!parts
.step
|| integer_onep (parts
.step
));
973 gcc_assert (!parts
.offset
|| integer_zerop (parts
.offset
));
977 /* Copies components of the address from OP to ADDR. */
980 get_address_description (tree op
, struct mem_address
*addr
)
982 if (TREE_CODE (TMR_BASE (op
)) == ADDR_EXPR
)
984 addr
->symbol
= TMR_BASE (op
);
985 addr
->base
= TMR_INDEX2 (op
);
989 addr
->symbol
= NULL_TREE
;
992 gcc_assert (integer_zerop (TMR_BASE (op
)));
993 addr
->base
= TMR_INDEX2 (op
);
996 addr
->base
= TMR_BASE (op
);
998 addr
->index
= TMR_INDEX (op
);
999 addr
->step
= TMR_STEP (op
);
1000 addr
->offset
= TMR_OFFSET (op
);
1003 /* Copies the reference information from OLD_REF to NEW_REF, where
1004 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
1007 copy_ref_info (tree new_ref
, tree old_ref
)
1009 tree new_ptr_base
= NULL_TREE
;
1011 gcc_assert (TREE_CODE (new_ref
) == MEM_REF
1012 || TREE_CODE (new_ref
) == TARGET_MEM_REF
);
1014 TREE_SIDE_EFFECTS (new_ref
) = TREE_SIDE_EFFECTS (old_ref
);
1015 TREE_THIS_VOLATILE (new_ref
) = TREE_THIS_VOLATILE (old_ref
);
1017 new_ptr_base
= TREE_OPERAND (new_ref
, 0);
1019 tree base
= get_base_address (old_ref
);
1023 /* We can transfer points-to information from an old pointer
1024 or decl base to the new one. */
1026 && TREE_CODE (new_ptr_base
) == SSA_NAME
1027 && !SSA_NAME_PTR_INFO (new_ptr_base
))
1029 if ((TREE_CODE (base
) == MEM_REF
1030 || TREE_CODE (base
) == TARGET_MEM_REF
)
1031 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
1032 && SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0)))
1034 duplicate_ssa_name_ptr_info
1035 (new_ptr_base
, SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0)));
1036 reset_flow_sensitive_info (new_ptr_base
);
1038 else if (VAR_P (base
)
1039 || TREE_CODE (base
) == PARM_DECL
1040 || TREE_CODE (base
) == RESULT_DECL
)
1042 struct ptr_info_def
*pi
= get_ptr_info (new_ptr_base
);
1043 pt_solution_set_var (&pi
->pt
, base
);
1047 /* We can transfer dependence info. */
1048 if (!MR_DEPENDENCE_CLIQUE (new_ref
)
1049 && (TREE_CODE (base
) == MEM_REF
1050 || TREE_CODE (base
) == TARGET_MEM_REF
)
1051 && MR_DEPENDENCE_CLIQUE (base
))
1053 MR_DEPENDENCE_CLIQUE (new_ref
) = MR_DEPENDENCE_CLIQUE (base
);
1054 MR_DEPENDENCE_BASE (new_ref
) = MR_DEPENDENCE_BASE (base
);
1057 /* And alignment info. Note we cannot transfer misalignment info
1058 since that sits on the SSA name but this is flow-sensitive info
1059 which we cannot transfer in this generic routine. */
1060 unsigned old_align
= get_object_alignment (old_ref
);
1061 unsigned new_align
= get_object_alignment (new_ref
);
1062 if (new_align
< old_align
)
1063 TREE_TYPE (new_ref
) = build_aligned_type (TREE_TYPE (new_ref
), old_align
);
1066 /* Move constants in target_mem_ref REF to offset. Returns the new target
1067 mem ref if anything changes, NULL_TREE otherwise. */
1070 maybe_fold_tmr (tree ref
)
1072 struct mem_address addr
;
1073 bool changed
= false;
1076 get_address_description (ref
, &addr
);
1079 && TREE_CODE (addr
.base
) == INTEGER_CST
1080 && !integer_zerop (addr
.base
))
1082 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
1083 TREE_TYPE (addr
.offset
),
1084 addr
.offset
, addr
.base
);
1085 addr
.base
= NULL_TREE
;
1090 && TREE_CODE (TREE_OPERAND (addr
.symbol
, 0)) == MEM_REF
)
1092 addr
.offset
= fold_binary_to_constant
1093 (PLUS_EXPR
, TREE_TYPE (addr
.offset
),
1095 TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 1));
1096 addr
.symbol
= TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 0);
1099 else if (addr
.symbol
1100 && handled_component_p (TREE_OPERAND (addr
.symbol
, 0)))
1103 addr
.symbol
= build_fold_addr_expr
1104 (get_addr_base_and_unit_offset
1105 (TREE_OPERAND (addr
.symbol
, 0), &offset
));
1106 addr
.offset
= int_const_binop (PLUS_EXPR
,
1107 addr
.offset
, size_int (offset
));
1111 if (addr
.index
&& TREE_CODE (addr
.index
) == INTEGER_CST
)
1116 off
= fold_binary_to_constant (MULT_EXPR
, sizetype
,
1118 addr
.step
= NULL_TREE
;
1121 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
1122 TREE_TYPE (addr
.offset
),
1124 addr
.index
= NULL_TREE
;
1131 /* If we have propagated something into this TARGET_MEM_REF and thus
1132 ended up folding it, always create a new TARGET_MEM_REF regardless
1133 if it is valid in this for on the target - the propagation result
1134 wouldn't be anyway. */
1135 new_ref
= create_mem_ref_raw (TREE_TYPE (ref
),
1136 TREE_TYPE (addr
.offset
), &addr
, false);
1137 TREE_SIDE_EFFECTS (new_ref
) = TREE_SIDE_EFFECTS (ref
);
1138 TREE_THIS_VOLATILE (new_ref
) = TREE_THIS_VOLATILE (ref
);
1142 /* Return the preferred index scale factor for accessing memory of mode
1143 MEM_MODE in the address space of pointer BASE. Assume that we're
1144 optimizing for speed if SPEED is true and for size otherwise. */
1146 preferred_mem_scale_factor (tree base
, machine_mode mem_mode
,
1149 /* For BLKmode, we can't do anything so return 1. */
1150 if (mem_mode
== BLKmode
)
1153 struct mem_address parts
= {};
1154 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (base
));
1155 unsigned int fact
= GET_MODE_UNIT_SIZE (mem_mode
);
1157 /* Addressing mode "base + index". */
1158 parts
.index
= integer_one_node
;
1159 parts
.base
= integer_one_node
;
1160 rtx addr
= addr_for_mem_ref (&parts
, as
, false);
1161 unsigned cost
= address_cost (addr
, mem_mode
, as
, speed
);
1163 /* Addressing mode "base + index << scale". */
1164 parts
.step
= wide_int_to_tree (sizetype
, fact
);
1165 addr
= addr_for_mem_ref (&parts
, as
, false);
1166 unsigned new_cost
= address_cost (addr
, mem_mode
, as
, speed
);
1168 /* Compare the cost of an address with an unscaled index with
1169 a scaled index and return factor if useful. */
1170 if (new_cost
< cost
)
1171 return GET_MODE_UNIT_SIZE (mem_mode
);
1175 /* Dump PARTS to FILE. */
1177 extern void dump_mem_address (FILE *, struct mem_address
*);
1179 dump_mem_address (FILE *file
, struct mem_address
*parts
)
1183 fprintf (file
, "symbol: ");
1184 print_generic_expr (file
, TREE_OPERAND (parts
->symbol
, 0), TDF_SLIM
);
1185 fprintf (file
, "\n");
1189 fprintf (file
, "base: ");
1190 print_generic_expr (file
, parts
->base
, TDF_SLIM
);
1191 fprintf (file
, "\n");
1195 fprintf (file
, "index: ");
1196 print_generic_expr (file
, parts
->index
, TDF_SLIM
);
1197 fprintf (file
, "\n");
1201 fprintf (file
, "step: ");
1202 print_generic_expr (file
, parts
->step
, TDF_SLIM
);
1203 fprintf (file
, "\n");
1207 fprintf (file
, "offset: ");
1208 print_generic_expr (file
, parts
->offset
, TDF_SLIM
);
1209 fprintf (file
, "\n");
1213 #include "gt-tree-ssa-address.h"