1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
25 #include "coretypes.h"
32 #include "stringpool.h"
34 #include "tree-ssanames.h"
36 #include "insn-config.h"
39 #include "tree-pretty-print.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "tree-ssa-loop-ivopts.h"
48 #include "tree-affine.h"
51 /* FIXME: We compute address costs using RTL. */
52 #include "tree-ssa-address.h"
54 /* TODO -- handling of symbols (according to Richard Hendersons
55 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
57 There are at least 5 different kinds of symbols that we can run up against:
59 (1) binds_local_p, small data area.
60 (2) binds_local_p, eg local statics
61 (3) !binds_local_p, eg global variables
62 (4) thread local, local_exec
63 (5) thread local, !local_exec
65 Now, (1) won't appear often in an array context, but it certainly can.
66 All you have to do is set -GN high enough, or explicitly mark any
67 random object __attribute__((section (".sdata"))).
69 All of these affect whether or not a symbol is in fact a valid address.
70 The only one tested here is (3). And that result may very well
71 be incorrect for (4) or (5).
73 An incorrect result here does not cause incorrect results out the
74 back end, because the expander in expr.c validizes the address. However
75 it would be nice to improve the handling here in order to produce more
78 /* A "template" for memory address, used to determine whether the address is
81 struct GTY (()) mem_addr_template
{
82 rtx ref
; /* The template. */
83 rtx
* GTY ((skip
)) step_p
; /* The point in template where the step should be
85 rtx
* GTY ((skip
)) off_p
; /* The point in template where the offset should
90 /* The templates. Each of the low five bits of the index corresponds to one
91 component of TARGET_MEM_REF being present, while the high bits identify
92 the address space. See TEMPL_IDX. */
94 static GTY(()) vec
<mem_addr_template
, va_gc
> *mem_addr_template_list
;
96 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
98 | ((SYMBOL != 0) << 4) \
99 | ((BASE != 0) << 3) \
100 | ((INDEX != 0) << 2) \
101 | ((STEP != 0) << 1) \
104 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
105 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
106 to where step is placed to *STEP_P and offset to *OFFSET_P. */
109 gen_addr_rtx (machine_mode address_mode
,
110 rtx symbol
, rtx base
, rtx index
, rtx step
, rtx offset
,
111 rtx
*addr
, rtx
**step_p
, rtx
**offset_p
)
121 if (index
&& index
!= const0_rtx
)
126 act_elem
= gen_rtx_MULT (address_mode
, act_elem
, step
);
129 *step_p
= &XEXP (act_elem
, 1);
135 if (base
&& base
!= const0_rtx
)
138 *addr
= simplify_gen_binary (PLUS
, address_mode
, base
, *addr
);
148 act_elem
= gen_rtx_PLUS (address_mode
, act_elem
, offset
);
151 *offset_p
= &XEXP (act_elem
, 1);
153 if (GET_CODE (symbol
) == SYMBOL_REF
154 || GET_CODE (symbol
) == LABEL_REF
155 || GET_CODE (symbol
) == CONST
)
156 act_elem
= gen_rtx_CONST (address_mode
, act_elem
);
160 *addr
= gen_rtx_PLUS (address_mode
, *addr
, act_elem
);
168 *addr
= gen_rtx_PLUS (address_mode
, *addr
, offset
);
170 *offset_p
= &XEXP (*addr
, 1);
184 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
186 If REALLY_EXPAND is false, just make fake registers instead
187 of really expanding the operands, and perform the expansion in-place
188 by using one of the "templates". */
191 addr_for_mem_ref (struct mem_address
*addr
, addr_space_t as
,
194 scalar_int_mode address_mode
= targetm
.addr_space
.address_mode (as
);
195 scalar_int_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
196 rtx address
, sym
, bse
, idx
, st
, off
;
197 struct mem_addr_template
*templ
;
199 if (addr
->step
&& !integer_onep (addr
->step
))
200 st
= immed_wide_int_const (wi::to_wide (addr
->step
), pointer_mode
);
204 if (addr
->offset
&& !integer_zerop (addr
->offset
))
207 = poly_offset_int::from (wi::to_poly_wide (addr
->offset
), SIGNED
);
208 off
= immed_wide_int_const (dc
, pointer_mode
);
215 unsigned int templ_index
216 = TEMPL_IDX (as
, addr
->symbol
, addr
->base
, addr
->index
, st
, off
);
218 if (templ_index
>= vec_safe_length (mem_addr_template_list
))
219 vec_safe_grow_cleared (mem_addr_template_list
, templ_index
+ 1);
221 /* Reuse the templates for addresses, so that we do not waste memory. */
222 templ
= &(*mem_addr_template_list
)[templ_index
];
225 sym
= (addr
->symbol
?
226 gen_rtx_SYMBOL_REF (pointer_mode
, ggc_strdup ("test_symbol"))
229 gen_raw_REG (pointer_mode
, LAST_VIRTUAL_REGISTER
+ 1)
232 gen_raw_REG (pointer_mode
, LAST_VIRTUAL_REGISTER
+ 2)
235 gen_addr_rtx (pointer_mode
, sym
, bse
, idx
,
236 st
? const0_rtx
: NULL_RTX
,
237 off
? const0_rtx
: NULL_RTX
,
251 /* Otherwise really expand the expressions. */
253 ? expand_expr (addr
->symbol
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
256 ? expand_expr (addr
->base
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
259 ? expand_expr (addr
->index
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
262 /* addr->base could be an SSA_NAME that was set to a constant value. The
263 call to expand_expr may expose that constant. If so, fold the value
264 into OFF and clear BSE. Otherwise we may later try to pull a mode from
265 BSE to generate a REG, which won't work with constants because they
267 if (bse
&& GET_CODE (bse
) == CONST_INT
)
270 off
= simplify_gen_binary (PLUS
, pointer_mode
, bse
, off
);
273 gcc_assert (GET_CODE (off
) == CONST_INT
);
276 gen_addr_rtx (pointer_mode
, sym
, bse
, idx
, st
, off
, &address
, NULL
, NULL
);
277 if (pointer_mode
!= address_mode
)
278 address
= convert_memory_address (address_mode
, address
);
282 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
283 the mem_address structure. */
286 addr_for_mem_ref (tree exp
, addr_space_t as
, bool really_expand
)
288 struct mem_address addr
;
289 get_address_description (exp
, &addr
);
290 return addr_for_mem_ref (&addr
, as
, really_expand
);
293 /* Returns address of MEM_REF in TYPE. */
296 tree_mem_ref_addr (tree type
, tree mem_ref
)
300 tree step
= TMR_STEP (mem_ref
), offset
= TMR_OFFSET (mem_ref
);
301 tree addr_base
= NULL_TREE
, addr_off
= NULL_TREE
;
303 addr_base
= fold_convert (type
, TMR_BASE (mem_ref
));
305 act_elem
= TMR_INDEX (mem_ref
);
309 act_elem
= fold_build2 (MULT_EXPR
, TREE_TYPE (act_elem
),
314 act_elem
= TMR_INDEX2 (mem_ref
);
318 addr_off
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr_off
),
324 if (offset
&& !integer_zerop (offset
))
327 addr_off
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr_off
), addr_off
,
328 fold_convert (TREE_TYPE (addr_off
), offset
));
334 addr
= fold_build_pointer_plus (addr_base
, addr_off
);
341 /* Returns true if a memory reference in MODE and with parameters given by
342 ADDR is valid on the current target. */
345 valid_mem_ref_p (machine_mode mode
, addr_space_t as
,
346 struct mem_address
*addr
)
350 address
= addr_for_mem_ref (addr
, as
, false);
354 return memory_address_addr_space_p (mode
, address
, as
);
357 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
358 is valid on the current target and if so, creates and returns the
359 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
362 create_mem_ref_raw (tree type
, tree alias_ptr_type
, struct mem_address
*addr
,
368 && !valid_mem_ref_p (TYPE_MODE (type
), TYPE_ADDR_SPACE (type
), addr
))
371 if (addr
->step
&& integer_onep (addr
->step
))
372 addr
->step
= NULL_TREE
;
375 addr
->offset
= fold_convert (alias_ptr_type
, addr
->offset
);
377 addr
->offset
= build_int_cst (alias_ptr_type
, 0);
385 && POINTER_TYPE_P (TREE_TYPE (addr
->base
)))
392 base
= build_int_cst (build_pointer_type (type
), 0);
396 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
397 ??? As IVOPTs does not follow restrictions to where the base
398 pointer may point to create a MEM_REF only if we know that
400 if ((TREE_CODE (base
) == ADDR_EXPR
|| TREE_CODE (base
) == INTEGER_CST
)
401 && (!index2
|| integer_zerop (index2
))
402 && (!addr
->index
|| integer_zerop (addr
->index
)))
403 return fold_build2 (MEM_REF
, type
, base
, addr
->offset
);
405 return build5 (TARGET_MEM_REF
, type
,
406 base
, addr
->offset
, addr
->index
, addr
->step
, index2
);
409 /* Returns true if OBJ is an object whose address is a link time constant. */
412 fixed_address_object_p (tree obj
)
415 && (TREE_STATIC (obj
) || DECL_EXTERNAL (obj
))
416 && ! DECL_DLLIMPORT_P (obj
));
419 /* If ADDR contains an address of object that is a link time constant,
420 move it to PARTS->symbol. */
423 move_fixed_address_to_symbol (struct mem_address
*parts
, aff_tree
*addr
)
426 tree val
= NULL_TREE
;
428 for (i
= 0; i
< addr
->n
; i
++)
430 if (addr
->elts
[i
].coef
!= 1)
433 val
= addr
->elts
[i
].val
;
434 if (TREE_CODE (val
) == ADDR_EXPR
435 && fixed_address_object_p (TREE_OPERAND (val
, 0)))
443 aff_combination_remove_elt (addr
, i
);
446 /* Return true if ADDR contains an instance of BASE_HINT and it's moved to
450 move_hint_to_base (tree type
, struct mem_address
*parts
, tree base_hint
,
454 tree val
= NULL_TREE
;
457 for (i
= 0; i
< addr
->n
; i
++)
459 if (addr
->elts
[i
].coef
!= 1)
462 val
= addr
->elts
[i
].val
;
463 if (operand_equal_p (val
, base_hint
, 0))
470 /* Cast value to appropriate pointer type. We cannot use a pointer
471 to TYPE directly, as the back-end will assume registers of pointer
472 type are aligned, and just the base itself may not actually be.
473 We use void pointer to the type's address space instead. */
474 qual
= ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type
));
475 type
= build_qualified_type (void_type_node
, qual
);
476 parts
->base
= fold_convert (build_pointer_type (type
), val
);
477 aff_combination_remove_elt (addr
, i
);
481 /* If ADDR contains an address of a dereferenced pointer, move it to
485 move_pointer_to_base (struct mem_address
*parts
, aff_tree
*addr
)
488 tree val
= NULL_TREE
;
490 for (i
= 0; i
< addr
->n
; i
++)
492 if (addr
->elts
[i
].coef
!= 1)
495 val
= addr
->elts
[i
].val
;
496 if (POINTER_TYPE_P (TREE_TYPE (val
)))
504 aff_combination_remove_elt (addr
, i
);
507 /* Moves the loop variant part V in linear address ADDR to be the index
511 move_variant_to_index (struct mem_address
*parts
, aff_tree
*addr
, tree v
)
514 tree val
= NULL_TREE
;
516 gcc_assert (!parts
->index
);
517 for (i
= 0; i
< addr
->n
; i
++)
519 val
= addr
->elts
[i
].val
;
520 if (operand_equal_p (val
, v
, 0))
527 parts
->index
= fold_convert (sizetype
, val
);
528 parts
->step
= wide_int_to_tree (sizetype
, addr
->elts
[i
].coef
);
529 aff_combination_remove_elt (addr
, i
);
532 /* Adds ELT to PARTS. */
535 add_to_parts (struct mem_address
*parts
, tree elt
)
541 parts
->index
= fold_convert (sizetype
, elt
);
551 /* Add ELT to base. */
552 type
= TREE_TYPE (parts
->base
);
553 if (POINTER_TYPE_P (type
))
554 parts
->base
= fold_build_pointer_plus (parts
->base
, elt
);
556 parts
->base
= fold_build2 (PLUS_EXPR
, type
, parts
->base
, elt
);
559 /* Returns true if multiplying by RATIO is allowed in an address. Test the
560 validity for a memory reference accessing memory of mode MODE in address
564 multiplier_allowed_in_address_p (HOST_WIDE_INT ratio
, machine_mode mode
,
567 #define MAX_RATIO 128
568 unsigned int data_index
= (int) as
* MAX_MACHINE_MODE
+ (int) mode
;
569 static vec
<sbitmap
> valid_mult_list
;
572 if (data_index
>= valid_mult_list
.length ())
573 valid_mult_list
.safe_grow_cleared (data_index
+ 1);
575 valid_mult
= valid_mult_list
[data_index
];
578 machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
579 rtx reg1
= gen_raw_REG (address_mode
, LAST_VIRTUAL_REGISTER
+ 1);
580 rtx reg2
= gen_raw_REG (address_mode
, LAST_VIRTUAL_REGISTER
+ 2);
584 valid_mult
= sbitmap_alloc (2 * MAX_RATIO
+ 1);
585 bitmap_clear (valid_mult
);
586 scaled
= gen_rtx_fmt_ee (MULT
, address_mode
, reg1
, NULL_RTX
);
587 addr
= gen_rtx_fmt_ee (PLUS
, address_mode
, scaled
, reg2
);
588 for (i
= -MAX_RATIO
; i
<= MAX_RATIO
; i
++)
590 XEXP (scaled
, 1) = gen_int_mode (i
, address_mode
);
591 if (memory_address_addr_space_p (mode
, addr
, as
)
592 || memory_address_addr_space_p (mode
, scaled
, as
))
593 bitmap_set_bit (valid_mult
, i
+ MAX_RATIO
);
596 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
598 fprintf (dump_file
, " allowed multipliers:");
599 for (i
= -MAX_RATIO
; i
<= MAX_RATIO
; i
++)
600 if (bitmap_bit_p (valid_mult
, i
+ MAX_RATIO
))
601 fprintf (dump_file
, " %d", (int) i
);
602 fprintf (dump_file
, "\n");
603 fprintf (dump_file
, "\n");
606 valid_mult_list
[data_index
] = valid_mult
;
609 if (ratio
> MAX_RATIO
|| ratio
< -MAX_RATIO
)
612 return bitmap_bit_p (valid_mult
, ratio
+ MAX_RATIO
);
615 /* Finds the most expensive multiplication in ADDR that can be
616 expressed in an addressing mode and move the corresponding
617 element(s) to PARTS. */
620 most_expensive_mult_to_index (tree type
, struct mem_address
*parts
,
621 aff_tree
*addr
, bool speed
)
623 addr_space_t as
= TYPE_ADDR_SPACE (type
);
624 machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
626 unsigned best_mult_cost
= 0, acost
;
627 tree mult_elt
= NULL_TREE
, elt
;
629 enum tree_code op_code
;
631 offset_int best_mult
= 0;
632 for (i
= 0; i
< addr
->n
; i
++)
634 if (!wi::fits_shwi_p (addr
->elts
[i
].coef
))
637 coef
= addr
->elts
[i
].coef
.to_shwi ();
639 || !multiplier_allowed_in_address_p (coef
, TYPE_MODE (type
), as
))
642 acost
= mult_by_coeff_cost (coef
, address_mode
, speed
);
644 if (acost
> best_mult_cost
)
646 best_mult_cost
= acost
;
647 best_mult
= offset_int::from (addr
->elts
[i
].coef
, SIGNED
);
654 /* Collect elements multiplied by best_mult. */
655 for (i
= j
= 0; i
< addr
->n
; i
++)
657 offset_int amult
= offset_int::from (addr
->elts
[i
].coef
, SIGNED
);
658 offset_int amult_neg
= -wi::sext (amult
, TYPE_PRECISION (addr
->type
));
660 if (amult
== best_mult
)
662 else if (amult_neg
== best_mult
)
663 op_code
= MINUS_EXPR
;
666 addr
->elts
[j
] = addr
->elts
[i
];
671 elt
= fold_convert (sizetype
, addr
->elts
[i
].val
);
673 mult_elt
= fold_build2 (op_code
, sizetype
, mult_elt
, elt
);
674 else if (op_code
== PLUS_EXPR
)
677 mult_elt
= fold_build1 (NEGATE_EXPR
, sizetype
, elt
);
681 parts
->index
= mult_elt
;
682 parts
->step
= wide_int_to_tree (sizetype
, best_mult
);
685 /* Splits address ADDR for a memory access of type TYPE into PARTS.
686 If BASE_HINT is non-NULL, it specifies an SSA name to be used
687 preferentially as base of the reference, and IV_CAND is the selected
688 iv candidate used in ADDR. Store true to VAR_IN_BASE if variant
689 part of address is split to PARTS.base.
691 TODO -- be more clever about the distribution of the elements of ADDR
692 to PARTS. Some architectures do not support anything but single
693 register in address, possibly with a small integer offset; while
694 create_mem_ref will simplify the address to an acceptable shape
695 later, it would be more efficient to know that asking for complicated
696 addressing modes is useless. */
699 addr_to_parts (tree type
, aff_tree
*addr
, tree iv_cand
, tree base_hint
,
700 struct mem_address
*parts
, bool *var_in_base
, bool speed
)
705 parts
->symbol
= NULL_TREE
;
706 parts
->base
= NULL_TREE
;
707 parts
->index
= NULL_TREE
;
708 parts
->step
= NULL_TREE
;
710 if (maybe_ne (addr
->offset
, 0))
711 parts
->offset
= wide_int_to_tree (sizetype
, addr
->offset
);
713 parts
->offset
= NULL_TREE
;
715 /* Try to find a symbol. */
716 move_fixed_address_to_symbol (parts
, addr
);
718 /* Since at the moment there is no reliable way to know how to
719 distinguish between pointer and its offset, we decide if var
720 part is the pointer based on guess. */
721 *var_in_base
= (base_hint
!= NULL
&& parts
->symbol
== NULL
);
723 *var_in_base
= move_hint_to_base (type
, parts
, base_hint
, addr
);
725 move_variant_to_index (parts
, addr
, iv_cand
);
727 /* First move the most expensive feasible multiplication to index. */
729 most_expensive_mult_to_index (type
, parts
, addr
, speed
);
731 /* Move pointer into base. */
732 if (!parts
->symbol
&& !parts
->base
)
733 move_pointer_to_base (parts
, addr
);
735 /* Then try to process the remaining elements. */
736 for (i
= 0; i
< addr
->n
; i
++)
738 part
= fold_convert (sizetype
, addr
->elts
[i
].val
);
739 if (addr
->elts
[i
].coef
!= 1)
740 part
= fold_build2 (MULT_EXPR
, sizetype
, part
,
741 wide_int_to_tree (sizetype
, addr
->elts
[i
].coef
));
742 add_to_parts (parts
, part
);
745 add_to_parts (parts
, fold_convert (sizetype
, addr
->rest
));
748 /* Force the PARTS to register. */
751 gimplify_mem_ref_parts (gimple_stmt_iterator
*gsi
, struct mem_address
*parts
)
754 parts
->base
= force_gimple_operand_gsi_1 (gsi
, parts
->base
,
755 is_gimple_mem_ref_addr
, NULL_TREE
,
756 true, GSI_SAME_STMT
);
758 parts
->index
= force_gimple_operand_gsi (gsi
, parts
->index
,
760 true, GSI_SAME_STMT
);
763 /* Return true if the OFFSET in PARTS is the only thing that is making
764 it an invalid address for type TYPE. */
767 mem_ref_valid_without_offset_p (tree type
, mem_address parts
)
770 parts
.base
= parts
.offset
;
771 parts
.offset
= NULL_TREE
;
772 return valid_mem_ref_p (TYPE_MODE (type
), TYPE_ADDR_SPACE (type
), &parts
);
775 /* Fold PARTS->offset into PARTS->base, so that there is no longer
776 a separate offset. Emit any new instructions before GSI. */
779 add_offset_to_base (gimple_stmt_iterator
*gsi
, mem_address
*parts
)
781 tree tmp
= parts
->offset
;
784 tmp
= fold_build_pointer_plus (parts
->base
, tmp
);
785 tmp
= force_gimple_operand_gsi_1 (gsi
, tmp
, is_gimple_mem_ref_addr
,
786 NULL_TREE
, true, GSI_SAME_STMT
);
789 parts
->offset
= NULL_TREE
;
792 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
793 computations are emitted in front of GSI. TYPE is the mode
794 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
795 and BASE_HINT is non NULL if IV_CAND comes from a base address
799 create_mem_ref (gimple_stmt_iterator
*gsi
, tree type
, aff_tree
*addr
,
800 tree alias_ptr_type
, tree iv_cand
, tree base_hint
, bool speed
)
804 struct mem_address parts
;
806 addr_to_parts (type
, addr
, iv_cand
, base_hint
, &parts
, &var_in_base
, speed
);
807 gimplify_mem_ref_parts (gsi
, &parts
);
808 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
812 /* The expression is too complicated. Try making it simpler. */
814 /* Merge symbol into other parts. */
818 parts
.symbol
= NULL_TREE
;
819 gcc_assert (is_gimple_val (tmp
));
823 gcc_assert (useless_type_conversion_p (sizetype
,
824 TREE_TYPE (parts
.base
)));
828 /* Add the symbol to base, eventually forcing it to register. */
829 tmp
= fold_build_pointer_plus (tmp
, parts
.base
);
830 tmp
= force_gimple_operand_gsi_1 (gsi
, tmp
,
831 is_gimple_mem_ref_addr
,
837 /* Move base to index, then move the symbol to base. */
838 parts
.index
= parts
.base
;
845 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
850 /* Move multiplication to index by transforming address expression:
851 [... + index << step + ...]
853 index' = index << step;
854 [... + index' + ,,,]. */
855 if (parts
.step
&& !integer_onep (parts
.step
))
857 gcc_assert (parts
.index
);
858 if (parts
.offset
&& mem_ref_valid_without_offset_p (type
, parts
))
860 add_offset_to_base (gsi
, &parts
);
861 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
862 gcc_assert (mem_ref
);
866 parts
.index
= force_gimple_operand_gsi (gsi
,
867 fold_build2 (MULT_EXPR
, sizetype
,
868 parts
.index
, parts
.step
),
869 true, NULL_TREE
, true, GSI_SAME_STMT
);
870 parts
.step
= NULL_TREE
;
872 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
877 /* Add offset to invariant part by transforming address expression:
878 [base + index + offset]
880 base' = base + offset;
883 index' = index + offset;
885 depending on which one is invariant. */
886 if (parts
.offset
&& !integer_zerop (parts
.offset
))
888 tree old_base
= unshare_expr (parts
.base
);
889 tree old_index
= unshare_expr (parts
.index
);
890 tree old_offset
= unshare_expr (parts
.offset
);
893 parts
.offset
= NULL_TREE
;
894 /* Add offset to invariant part. */
899 tmp
= fold_build_pointer_plus (parts
.base
, tmp
);
900 tmp
= force_gimple_operand_gsi_1 (gsi
, tmp
,
901 is_gimple_mem_ref_addr
,
911 tmp
= fold_build_pointer_plus (parts
.index
, tmp
);
912 tmp
= force_gimple_operand_gsi_1 (gsi
, tmp
,
913 is_gimple_mem_ref_addr
,
920 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
924 /* Restore parts.base, index and offset so that we can check if
925 [base + offset] addressing mode is supported in next step.
926 This is necessary for targets only support [base + offset],
927 but not [base + index] addressing mode. */
928 parts
.base
= old_base
;
929 parts
.index
= old_index
;
930 parts
.offset
= old_offset
;
933 /* Transform [base + index + ...] into:
934 base' = base + index;
939 parts
.index
= NULL_TREE
;
940 /* Add index to base. */
943 tmp
= fold_build_pointer_plus (parts
.base
, tmp
);
944 tmp
= force_gimple_operand_gsi_1 (gsi
, tmp
,
945 is_gimple_mem_ref_addr
,
946 NULL_TREE
, true, GSI_SAME_STMT
);
950 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
955 /* Transform [base + offset] into:
956 base' = base + offset;
958 if (parts
.offset
&& !integer_zerop (parts
.offset
))
960 add_offset_to_base (gsi
, &parts
);
961 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
966 /* Verify that the address is in the simplest possible shape
967 (only a register). If we cannot create such a memory reference,
968 something is really wrong. */
969 gcc_assert (parts
.symbol
== NULL_TREE
);
970 gcc_assert (parts
.index
== NULL_TREE
);
971 gcc_assert (!parts
.step
|| integer_onep (parts
.step
));
972 gcc_assert (!parts
.offset
|| integer_zerop (parts
.offset
));
976 /* Copies components of the address from OP to ADDR. */
979 get_address_description (tree op
, struct mem_address
*addr
)
981 if (TREE_CODE (TMR_BASE (op
)) == ADDR_EXPR
)
983 addr
->symbol
= TMR_BASE (op
);
984 addr
->base
= TMR_INDEX2 (op
);
988 addr
->symbol
= NULL_TREE
;
991 gcc_assert (integer_zerop (TMR_BASE (op
)));
992 addr
->base
= TMR_INDEX2 (op
);
995 addr
->base
= TMR_BASE (op
);
997 addr
->index
= TMR_INDEX (op
);
998 addr
->step
= TMR_STEP (op
);
999 addr
->offset
= TMR_OFFSET (op
);
1002 /* Copies the reference information from OLD_REF to NEW_REF, where
1003 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
1006 copy_ref_info (tree new_ref
, tree old_ref
)
1008 tree new_ptr_base
= NULL_TREE
;
1010 gcc_assert (TREE_CODE (new_ref
) == MEM_REF
1011 || TREE_CODE (new_ref
) == TARGET_MEM_REF
);
1013 TREE_SIDE_EFFECTS (new_ref
) = TREE_SIDE_EFFECTS (old_ref
);
1014 TREE_THIS_VOLATILE (new_ref
) = TREE_THIS_VOLATILE (old_ref
);
1016 new_ptr_base
= TREE_OPERAND (new_ref
, 0);
1018 /* We can transfer points-to information from an old pointer
1019 or decl base to the new one. */
1021 && TREE_CODE (new_ptr_base
) == SSA_NAME
1022 && !SSA_NAME_PTR_INFO (new_ptr_base
))
1024 tree base
= get_base_address (old_ref
);
1027 else if ((TREE_CODE (base
) == MEM_REF
1028 || TREE_CODE (base
) == TARGET_MEM_REF
)
1029 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
1030 && SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0)))
1032 struct ptr_info_def
*new_pi
;
1033 unsigned int align
, misalign
;
1035 duplicate_ssa_name_ptr_info
1036 (new_ptr_base
, SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0)));
1037 new_pi
= SSA_NAME_PTR_INFO (new_ptr_base
);
1038 /* We have to be careful about transferring alignment information. */
1039 if (get_ptr_info_alignment (new_pi
, &align
, &misalign
)
1040 && TREE_CODE (old_ref
) == MEM_REF
1041 && !(TREE_CODE (new_ref
) == TARGET_MEM_REF
1042 && (TMR_INDEX2 (new_ref
)
1043 /* TODO: Below conditions can be relaxed if TMR_INDEX
1044 is an indcution variable and its initial value and
1045 step are aligned. */
1046 || (TMR_INDEX (new_ref
) && !TMR_STEP (new_ref
))
1047 || (TMR_STEP (new_ref
)
1048 && (TREE_INT_CST_LOW (TMR_STEP (new_ref
))
1051 poly_uint64 inc
= (mem_ref_offset (old_ref
)
1052 - mem_ref_offset (new_ref
)).force_uhwi ();
1053 adjust_ptr_info_misalignment (new_pi
, inc
);
1056 mark_ptr_info_alignment_unknown (new_pi
);
1058 else if (VAR_P (base
)
1059 || TREE_CODE (base
) == PARM_DECL
1060 || TREE_CODE (base
) == RESULT_DECL
)
1062 struct ptr_info_def
*pi
= get_ptr_info (new_ptr_base
);
1063 pt_solution_set_var (&pi
->pt
, base
);
1068 /* Move constants in target_mem_ref REF to offset. Returns the new target
1069 mem ref if anything changes, NULL_TREE otherwise. */
1072 maybe_fold_tmr (tree ref
)
1074 struct mem_address addr
;
1075 bool changed
= false;
1078 get_address_description (ref
, &addr
);
1081 && TREE_CODE (addr
.base
) == INTEGER_CST
1082 && !integer_zerop (addr
.base
))
1084 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
1085 TREE_TYPE (addr
.offset
),
1086 addr
.offset
, addr
.base
);
1087 addr
.base
= NULL_TREE
;
1092 && TREE_CODE (TREE_OPERAND (addr
.symbol
, 0)) == MEM_REF
)
1094 addr
.offset
= fold_binary_to_constant
1095 (PLUS_EXPR
, TREE_TYPE (addr
.offset
),
1097 TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 1));
1098 addr
.symbol
= TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 0);
1101 else if (addr
.symbol
1102 && handled_component_p (TREE_OPERAND (addr
.symbol
, 0)))
1105 addr
.symbol
= build_fold_addr_expr
1106 (get_addr_base_and_unit_offset
1107 (TREE_OPERAND (addr
.symbol
, 0), &offset
));
1108 addr
.offset
= int_const_binop (PLUS_EXPR
,
1109 addr
.offset
, size_int (offset
));
1113 if (addr
.index
&& TREE_CODE (addr
.index
) == INTEGER_CST
)
1118 off
= fold_binary_to_constant (MULT_EXPR
, sizetype
,
1120 addr
.step
= NULL_TREE
;
1123 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
1124 TREE_TYPE (addr
.offset
),
1126 addr
.index
= NULL_TREE
;
1133 /* If we have propagated something into this TARGET_MEM_REF and thus
1134 ended up folding it, always create a new TARGET_MEM_REF regardless
1135 if it is valid in this for on the target - the propagation result
1136 wouldn't be anyway. */
1137 new_ref
= create_mem_ref_raw (TREE_TYPE (ref
),
1138 TREE_TYPE (addr
.offset
), &addr
, false);
1139 TREE_SIDE_EFFECTS (new_ref
) = TREE_SIDE_EFFECTS (ref
);
1140 TREE_THIS_VOLATILE (new_ref
) = TREE_THIS_VOLATILE (ref
);
1144 /* Return the preferred index scale factor for accessing memory of mode
1145 MEM_MODE in the address space of pointer BASE. Assume that we're
1146 optimizing for speed if SPEED is true and for size otherwise. */
1148 preferred_mem_scale_factor (tree base
, machine_mode mem_mode
,
1151 /* For BLKmode, we can't do anything so return 1. */
1152 if (mem_mode
== BLKmode
)
1155 struct mem_address parts
= {};
1156 addr_space_t as
= TYPE_ADDR_SPACE (TREE_TYPE (base
));
1157 unsigned int fact
= GET_MODE_UNIT_SIZE (mem_mode
);
1159 /* Addressing mode "base + index". */
1160 parts
.index
= integer_one_node
;
1161 parts
.base
= integer_one_node
;
1162 rtx addr
= addr_for_mem_ref (&parts
, as
, false);
1163 unsigned cost
= address_cost (addr
, mem_mode
, as
, speed
);
1165 /* Addressing mode "base + index << scale". */
1166 parts
.step
= wide_int_to_tree (sizetype
, fact
);
1167 addr
= addr_for_mem_ref (&parts
, as
, false);
1168 unsigned new_cost
= address_cost (addr
, mem_mode
, as
, speed
);
1170 /* Compare the cost of an address with an unscaled index with
1171 a scaled index and return factor if useful. */
1172 if (new_cost
< cost
)
1173 return GET_MODE_UNIT_SIZE (mem_mode
);
1177 /* Dump PARTS to FILE. */
1179 extern void dump_mem_address (FILE *, struct mem_address
*);
1181 dump_mem_address (FILE *file
, struct mem_address
*parts
)
1185 fprintf (file
, "symbol: ");
1186 print_generic_expr (file
, TREE_OPERAND (parts
->symbol
, 0), TDF_SLIM
);
1187 fprintf (file
, "\n");
1191 fprintf (file
, "base: ");
1192 print_generic_expr (file
, parts
->base
, TDF_SLIM
);
1193 fprintf (file
, "\n");
1197 fprintf (file
, "index: ");
1198 print_generic_expr (file
, parts
->index
, TDF_SLIM
);
1199 fprintf (file
, "\n");
1203 fprintf (file
, "step: ");
1204 print_generic_expr (file
, parts
->step
, TDF_SLIM
);
1205 fprintf (file
, "\n");
1209 fprintf (file
, "offset: ");
1210 print_generic_expr (file
, parts
->offset
, TDF_SLIM
);
1211 fprintf (file
, "\n");
1215 #include "gt-tree-ssa-address.h"