xfail scan-tree-dump-not throw in g++.dg/pr99966.C on hppa*64*-*-*
[official-gcc.git] / gcc / tree-ssa-address.cc
blobc4dfa371bb8cf6f42b2b1fe93a29928c76765aaa
1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004-2024 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "memmodel.h"
32 #include "stringpool.h"
33 #include "tree-vrp.h"
34 #include "tree-ssanames.h"
35 #include "expmed.h"
36 #include "insn-config.h"
37 #include "emit-rtl.h"
38 #include "recog.h"
39 #include "tree-pretty-print.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "tree-ssa-loop-ivopts.h"
45 #include "expr.h"
46 #include "tree-dfa.h"
47 #include "dumpfile.h"
48 #include "tree-affine.h"
49 #include "gimplify.h"
50 #include "builtins.h"
52 /* FIXME: We compute address costs using RTL. */
53 #include "tree-ssa-address.h"
55 /* TODO -- handling of symbols (according to Richard Hendersons
56 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
58 There are at least 5 different kinds of symbols that we can run up against:
60 (1) binds_local_p, small data area.
61 (2) binds_local_p, eg local statics
62 (3) !binds_local_p, eg global variables
63 (4) thread local, local_exec
64 (5) thread local, !local_exec
66 Now, (1) won't appear often in an array context, but it certainly can.
67 All you have to do is set -GN high enough, or explicitly mark any
68 random object __attribute__((section (".sdata"))).
70 All of these affect whether or not a symbol is in fact a valid address.
71 The only one tested here is (3). And that result may very well
72 be incorrect for (4) or (5).
74 An incorrect result here does not cause incorrect results out the
75 back end, because the expander in expr.cc validizes the address. However
76 it would be nice to improve the handling here in order to produce more
77 precise results. */
79 /* A "template" for memory address, used to determine whether the address is
80 valid for mode. */
82 struct GTY (()) mem_addr_template {
83 rtx ref; /* The template. */
84 rtx * GTY ((skip)) step_p; /* The point in template where the step should be
85 filled in. */
86 rtx * GTY ((skip)) off_p; /* The point in template where the offset should
87 be filled in. */
91 /* The templates. Each of the low five bits of the index corresponds to one
92 component of TARGET_MEM_REF being present, while the high bits identify
93 the address space. See TEMPL_IDX. */
95 static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
97 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
98 (((int) (AS) << 5) \
99 | ((SYMBOL != 0) << 4) \
100 | ((BASE != 0) << 3) \
101 | ((INDEX != 0) << 2) \
102 | ((STEP != 0) << 1) \
103 | (OFFSET != 0))
105 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
106 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
107 to where step is placed to *STEP_P and offset to *OFFSET_P. */
109 static void
110 gen_addr_rtx (machine_mode address_mode,
111 rtx symbol, rtx base, rtx index, rtx step, rtx offset,
112 rtx *addr, rtx **step_p, rtx **offset_p)
114 rtx act_elem;
116 *addr = NULL_RTX;
117 if (step_p)
118 *step_p = NULL;
119 if (offset_p)
120 *offset_p = NULL;
122 if (index && index != const0_rtx)
124 act_elem = index;
125 if (step)
127 act_elem = gen_rtx_MULT (address_mode, act_elem, step);
129 if (step_p)
130 *step_p = &XEXP (act_elem, 1);
133 *addr = act_elem;
136 if (base && base != const0_rtx)
138 if (*addr)
139 *addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
140 else
141 *addr = base;
144 if (symbol)
146 act_elem = symbol;
147 if (offset)
149 act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
151 if (offset_p)
152 *offset_p = &XEXP (act_elem, 1);
154 if (GET_CODE (symbol) == SYMBOL_REF
155 || GET_CODE (symbol) == LABEL_REF
156 || GET_CODE (symbol) == CONST)
157 act_elem = gen_rtx_CONST (address_mode, act_elem);
160 if (*addr)
161 *addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
162 else
163 *addr = act_elem;
165 else if (offset)
167 if (*addr)
169 *addr = gen_rtx_PLUS (address_mode, *addr, offset);
170 if (offset_p)
171 *offset_p = &XEXP (*addr, 1);
173 else
175 *addr = offset;
176 if (offset_p)
177 *offset_p = addr;
181 if (!*addr)
182 *addr = const0_rtx;
185 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
186 in address space AS.
187 If REALLY_EXPAND is false, just make fake registers instead
188 of really expanding the operands, and perform the expansion in-place
189 by using one of the "templates". */
192 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
193 bool really_expand)
195 scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
196 scalar_int_mode pointer_mode = targetm.addr_space.pointer_mode (as);
197 rtx address, sym, bse, idx, st, off;
198 struct mem_addr_template *templ;
200 if (addr->step && !integer_onep (addr->step))
201 st = immed_wide_int_const (wi::to_wide (addr->step), pointer_mode);
202 else
203 st = NULL_RTX;
205 if (addr->offset && !integer_zerop (addr->offset))
207 poly_offset_int dc
208 = poly_offset_int::from (wi::to_poly_wide (addr->offset), SIGNED);
209 off = immed_wide_int_const (dc, pointer_mode);
211 else
212 off = NULL_RTX;
214 if (!really_expand)
216 unsigned int templ_index
217 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
219 if (templ_index >= vec_safe_length (mem_addr_template_list))
220 vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1, true);
222 /* Reuse the templates for addresses, so that we do not waste memory. */
223 templ = &(*mem_addr_template_list)[templ_index];
224 if (!templ->ref)
226 sym = (addr->symbol ?
227 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
228 : NULL_RTX);
229 bse = (addr->base ?
230 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
231 : NULL_RTX);
232 idx = (addr->index ?
233 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
234 : NULL_RTX);
236 gen_addr_rtx (pointer_mode, sym, bse, idx,
237 st? const0_rtx : NULL_RTX,
238 off? const0_rtx : NULL_RTX,
239 &templ->ref,
240 &templ->step_p,
241 &templ->off_p);
244 if (st)
245 *templ->step_p = st;
246 if (off)
247 *templ->off_p = off;
249 return templ->ref;
252 /* Otherwise really expand the expressions. */
253 sym = (addr->symbol
254 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
255 : NULL_RTX);
256 bse = (addr->base
257 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
258 : NULL_RTX);
259 idx = (addr->index
260 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
261 : NULL_RTX);
263 /* addr->base could be an SSA_NAME that was set to a constant value. The
264 call to expand_expr may expose that constant. If so, fold the value
265 into OFF and clear BSE. Otherwise we may later try to pull a mode from
266 BSE to generate a REG, which won't work with constants because they
267 are modeless. */
268 if (bse && GET_CODE (bse) == CONST_INT)
270 if (off)
271 off = simplify_gen_binary (PLUS, pointer_mode, bse, off);
272 else
273 off = bse;
274 gcc_assert (GET_CODE (off) == CONST_INT);
275 bse = NULL_RTX;
277 gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
278 if (pointer_mode != address_mode)
279 address = convert_memory_address (address_mode, address);
280 return address;
283 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
284 the mem_address structure. */
287 addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
289 struct mem_address addr;
290 get_address_description (exp, &addr);
291 return addr_for_mem_ref (&addr, as, really_expand);
294 /* Returns address of MEM_REF in TYPE. */
296 tree
297 tree_mem_ref_addr (tree type, tree mem_ref)
299 tree addr;
300 tree act_elem;
301 tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
302 tree addr_base = NULL_TREE, addr_off = NULL_TREE;
304 addr_base = fold_convert (type, TMR_BASE (mem_ref));
306 act_elem = TMR_INDEX (mem_ref);
307 if (act_elem)
309 if (step)
310 act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
311 act_elem, step);
312 addr_off = act_elem;
315 act_elem = TMR_INDEX2 (mem_ref);
316 if (act_elem)
318 if (addr_off)
319 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
320 addr_off, act_elem);
321 else
322 addr_off = act_elem;
325 if (offset && !integer_zerop (offset))
327 if (addr_off)
328 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
329 fold_convert (TREE_TYPE (addr_off), offset));
330 else
331 addr_off = offset;
334 if (addr_off)
335 addr = fold_build_pointer_plus (addr_base, addr_off);
336 else
337 addr = addr_base;
339 return addr;
342 /* Returns true if a memory reference in MODE and with parameters given by
343 ADDR is valid on the current target. */
345 bool
346 valid_mem_ref_p (machine_mode mode, addr_space_t as,
347 struct mem_address *addr, code_helper ch)
349 rtx address;
351 address = addr_for_mem_ref (addr, as, false);
352 if (!address)
353 return false;
355 return memory_address_addr_space_p (mode, address, as, ch);
358 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
359 is valid on the current target and if so, creates and returns the
360 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
362 static tree
363 create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
364 bool verify)
366 tree base, index2;
368 if (verify
369 && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
370 return NULL_TREE;
372 if (addr->offset)
373 addr->offset = fold_convert (alias_ptr_type, addr->offset);
374 else
375 addr->offset = build_int_cst (alias_ptr_type, 0);
377 if (addr->symbol)
379 base = addr->symbol;
380 index2 = addr->base;
382 else if (addr->base
383 && POINTER_TYPE_P (TREE_TYPE (addr->base)))
385 base = addr->base;
386 index2 = NULL_TREE;
388 else
390 base = build_int_cst (build_pointer_type (type), 0);
391 index2 = addr->base;
394 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
395 ??? As IVOPTs does not follow restrictions to where the base
396 pointer may point to create a MEM_REF only if we know that
397 base is valid. */
398 if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
399 && (!index2 || integer_zerop (index2))
400 && (!addr->index || integer_zerop (addr->index)))
401 return fold_build2 (MEM_REF, type, base, addr->offset);
403 return build5 (TARGET_MEM_REF, type,
404 base, addr->offset, addr->index, addr->step, index2);
407 /* Returns true if OBJ is an object whose address is a link time constant. */
409 static bool
410 fixed_address_object_p (tree obj)
412 return (VAR_P (obj)
413 && (TREE_STATIC (obj) || DECL_EXTERNAL (obj))
414 && ! DECL_DLLIMPORT_P (obj));
417 /* If ADDR contains an address of object that is a link time constant,
418 move it to PARTS->symbol. */
420 void
421 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
423 unsigned i;
424 tree val = NULL_TREE;
426 for (i = 0; i < addr->n; i++)
428 if (addr->elts[i].coef != 1)
429 continue;
431 val = addr->elts[i].val;
432 if (TREE_CODE (val) == ADDR_EXPR
433 && fixed_address_object_p (TREE_OPERAND (val, 0)))
434 break;
437 if (i == addr->n)
438 return;
440 parts->symbol = val;
441 aff_combination_remove_elt (addr, i);
444 /* Return true if ADDR contains an instance of BASE_HINT and it's moved to
445 PARTS->base. */
447 static bool
448 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
449 aff_tree *addr)
451 unsigned i;
452 tree val = NULL_TREE;
453 int qual;
455 for (i = 0; i < addr->n; i++)
457 if (addr->elts[i].coef != 1)
458 continue;
460 val = addr->elts[i].val;
461 if (operand_equal_p (val, base_hint, 0))
462 break;
465 if (i == addr->n)
466 return false;
468 /* Cast value to appropriate pointer type. We cannot use a pointer
469 to TYPE directly, as the back-end will assume registers of pointer
470 type are aligned, and just the base itself may not actually be.
471 We use void pointer to the type's address space instead. */
472 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
473 type = build_qualified_type (void_type_node, qual);
474 parts->base = fold_convert (build_pointer_type (type), val);
475 aff_combination_remove_elt (addr, i);
476 return true;
479 /* If ADDR contains an address of a dereferenced pointer, move it to
480 PARTS->base. */
482 static void
483 move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
485 unsigned i;
486 tree val = NULL_TREE;
488 for (i = 0; i < addr->n; i++)
490 if (addr->elts[i].coef != 1)
491 continue;
493 val = addr->elts[i].val;
494 if (POINTER_TYPE_P (TREE_TYPE (val)))
495 break;
498 if (i == addr->n)
499 return;
501 parts->base = val;
502 aff_combination_remove_elt (addr, i);
505 /* Moves the loop variant part V in linear address ADDR to be the index
506 of PARTS. */
508 static void
509 move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
511 unsigned i;
512 tree val = NULL_TREE;
514 gcc_assert (!parts->index);
515 for (i = 0; i < addr->n; i++)
517 val = addr->elts[i].val;
518 if (operand_equal_p (val, v, 0))
519 break;
522 if (i == addr->n)
523 return;
525 parts->index = fold_convert (sizetype, val);
526 parts->step = wide_int_to_tree (sizetype, addr->elts[i].coef);
527 aff_combination_remove_elt (addr, i);
530 /* Adds ELT to PARTS. */
532 static void
533 add_to_parts (struct mem_address *parts, tree elt)
535 tree type;
537 if (!parts->index)
539 parts->index = fold_convert (sizetype, elt);
540 return;
543 if (!parts->base)
545 parts->base = elt;
546 return;
549 /* Add ELT to base. */
550 type = TREE_TYPE (parts->base);
551 if (POINTER_TYPE_P (type))
552 parts->base = fold_build_pointer_plus (parts->base, elt);
553 else
554 parts->base = fold_build2 (PLUS_EXPR, type, parts->base, elt);
557 /* Returns true if multiplying by RATIO is allowed in an address. Test the
558 validity for a memory reference accessing memory of mode MODE in address
559 space AS. */
561 static bool
562 multiplier_allowed_in_address_p (HOST_WIDE_INT ratio, machine_mode mode,
563 addr_space_t as)
565 #define MAX_RATIO 128
566 unsigned int data_index = (int) as * MAX_MACHINE_MODE + (int) mode;
567 static vec<sbitmap> valid_mult_list;
568 sbitmap valid_mult;
570 if (data_index >= valid_mult_list.length ())
571 valid_mult_list.safe_grow_cleared (data_index + 1, true);
573 valid_mult = valid_mult_list[data_index];
574 if (!valid_mult)
576 machine_mode address_mode = targetm.addr_space.address_mode (as);
577 rtx reg1 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 1);
578 rtx reg2 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 2);
579 rtx addr, scaled;
580 HOST_WIDE_INT i;
582 valid_mult = sbitmap_alloc (2 * MAX_RATIO + 1);
583 bitmap_clear (valid_mult);
584 scaled = gen_rtx_fmt_ee (MULT, address_mode, reg1, NULL_RTX);
585 addr = gen_rtx_fmt_ee (PLUS, address_mode, scaled, reg2);
586 for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
588 XEXP (scaled, 1) = gen_int_mode (i, address_mode);
589 if (memory_address_addr_space_p (mode, addr, as)
590 || memory_address_addr_space_p (mode, scaled, as))
591 bitmap_set_bit (valid_mult, i + MAX_RATIO);
594 if (dump_file && (dump_flags & TDF_DETAILS))
596 fprintf (dump_file, " allowed multipliers:");
597 for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
598 if (bitmap_bit_p (valid_mult, i + MAX_RATIO))
599 fprintf (dump_file, " %d", (int) i);
600 fprintf (dump_file, "\n");
601 fprintf (dump_file, "\n");
604 valid_mult_list[data_index] = valid_mult;
607 if (ratio > MAX_RATIO || ratio < -MAX_RATIO)
608 return false;
610 return bitmap_bit_p (valid_mult, ratio + MAX_RATIO);
613 /* Finds the most expensive multiplication in ADDR that can be
614 expressed in an addressing mode and move the corresponding
615 element(s) to PARTS. */
617 static void
618 most_expensive_mult_to_index (tree type, struct mem_address *parts,
619 aff_tree *addr, bool speed)
621 addr_space_t as = TYPE_ADDR_SPACE (type);
622 machine_mode address_mode = targetm.addr_space.address_mode (as);
623 HOST_WIDE_INT coef;
624 unsigned best_mult_cost = 0, acost;
625 tree mult_elt = NULL_TREE, elt;
626 unsigned i, j;
627 enum tree_code op_code;
629 offset_int best_mult = 0;
630 for (i = 0; i < addr->n; i++)
632 if (!wi::fits_shwi_p (addr->elts[i].coef))
633 continue;
635 coef = addr->elts[i].coef.to_shwi ();
636 if (coef == 1
637 || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
638 continue;
640 acost = mult_by_coeff_cost (coef, address_mode, speed);
642 if (acost > best_mult_cost)
644 best_mult_cost = acost;
645 best_mult = offset_int::from (addr->elts[i].coef, SIGNED);
649 if (!best_mult_cost)
650 return;
652 /* Collect elements multiplied by best_mult. */
653 for (i = j = 0; i < addr->n; i++)
655 offset_int amult = offset_int::from (addr->elts[i].coef, SIGNED);
656 offset_int amult_neg = -wi::sext (amult, TYPE_PRECISION (addr->type));
658 if (amult == best_mult)
659 op_code = PLUS_EXPR;
660 else if (amult_neg == best_mult)
661 op_code = MINUS_EXPR;
662 else
664 addr->elts[j] = addr->elts[i];
665 j++;
666 continue;
669 elt = fold_convert (sizetype, addr->elts[i].val);
670 if (mult_elt)
671 mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
672 else if (op_code == PLUS_EXPR)
673 mult_elt = elt;
674 else
675 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
677 addr->n = j;
679 parts->index = mult_elt;
680 parts->step = wide_int_to_tree (sizetype, best_mult);
683 /* Splits address ADDR for a memory access of type TYPE into PARTS.
684 If BASE_HINT is non-NULL, it specifies an SSA name to be used
685 preferentially as base of the reference, and IV_CAND is the selected
686 iv candidate used in ADDR. Store true to VAR_IN_BASE if variant
687 part of address is split to PARTS.base.
689 TODO -- be more clever about the distribution of the elements of ADDR
690 to PARTS. Some architectures do not support anything but single
691 register in address, possibly with a small integer offset; while
692 create_mem_ref will simplify the address to an acceptable shape
693 later, it would be more efficient to know that asking for complicated
694 addressing modes is useless. */
696 static void
697 addr_to_parts (tree type, aff_tree *addr, tree iv_cand, tree base_hint,
698 struct mem_address *parts, bool *var_in_base, bool speed)
700 tree part;
701 unsigned i;
703 parts->symbol = NULL_TREE;
704 parts->base = NULL_TREE;
705 parts->index = NULL_TREE;
706 parts->step = NULL_TREE;
708 if (maybe_ne (addr->offset, 0))
709 parts->offset = wide_int_to_tree (sizetype, addr->offset);
710 else
711 parts->offset = NULL_TREE;
713 /* Try to find a symbol. */
714 move_fixed_address_to_symbol (parts, addr);
716 /* Since at the moment there is no reliable way to know how to
717 distinguish between pointer and its offset, we decide if var
718 part is the pointer based on guess. */
719 *var_in_base = (base_hint != NULL && parts->symbol == NULL);
720 if (*var_in_base)
721 *var_in_base = move_hint_to_base (type, parts, base_hint, addr);
722 else
723 move_variant_to_index (parts, addr, iv_cand);
725 /* First move the most expensive feasible multiplication to index. */
726 if (!parts->index)
727 most_expensive_mult_to_index (type, parts, addr, speed);
729 /* Move pointer into base. */
730 if (!parts->symbol && !parts->base)
731 move_pointer_to_base (parts, addr);
733 /* Then try to process the remaining elements. */
734 for (i = 0; i < addr->n; i++)
736 part = fold_convert (sizetype, addr->elts[i].val);
737 if (addr->elts[i].coef != 1)
738 part = fold_build2 (MULT_EXPR, sizetype, part,
739 wide_int_to_tree (sizetype, addr->elts[i].coef));
740 add_to_parts (parts, part);
742 if (addr->rest)
743 add_to_parts (parts, fold_convert (sizetype, addr->rest));
746 /* Force the PARTS to register. */
748 static void
749 gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
751 if (parts->base)
752 parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
753 is_gimple_mem_ref_addr, NULL_TREE,
754 true, GSI_SAME_STMT);
755 if (parts->index)
756 parts->index = force_gimple_operand_gsi (gsi, parts->index,
757 true, NULL_TREE,
758 true, GSI_SAME_STMT);
761 /* Return true if the OFFSET in PARTS is the only thing that is making
762 it an invalid address for type TYPE. */
764 static bool
765 mem_ref_valid_without_offset_p (tree type, mem_address parts)
767 if (!parts.base)
768 parts.base = parts.offset;
769 parts.offset = NULL_TREE;
770 return valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), &parts);
773 /* Fold PARTS->offset into PARTS->base, so that there is no longer
774 a separate offset. Emit any new instructions before GSI. */
776 static void
777 add_offset_to_base (gimple_stmt_iterator *gsi, mem_address *parts)
779 tree tmp = parts->offset;
780 if (parts->base)
782 tmp = fold_build_pointer_plus (parts->base, tmp);
783 tmp = force_gimple_operand_gsi_1 (gsi, tmp, is_gimple_mem_ref_addr,
784 NULL_TREE, true, GSI_SAME_STMT);
786 parts->base = tmp;
787 parts->offset = NULL_TREE;
790 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
791 computations are emitted in front of GSI. TYPE is the mode
792 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
793 and BASE_HINT is non NULL if IV_CAND comes from a base address
794 object. */
796 tree
797 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
798 tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
800 bool var_in_base;
801 tree mem_ref, tmp;
802 struct mem_address parts;
804 addr_to_parts (type, addr, iv_cand, base_hint, &parts, &var_in_base, speed);
805 gimplify_mem_ref_parts (gsi, &parts);
806 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
807 if (mem_ref)
808 return mem_ref;
810 /* The expression is too complicated. Try making it simpler. */
812 /* Merge symbol into other parts. */
813 if (parts.symbol)
815 tmp = parts.symbol;
816 parts.symbol = NULL_TREE;
817 gcc_assert (is_gimple_val (tmp));
819 if (parts.base)
821 gcc_assert (useless_type_conversion_p (sizetype,
822 TREE_TYPE (parts.base)));
824 if (parts.index)
826 /* Add the symbol to base, eventually forcing it to register. */
827 tmp = fold_build_pointer_plus (tmp, parts.base);
828 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
829 is_gimple_mem_ref_addr,
830 NULL_TREE, true,
831 GSI_SAME_STMT);
833 else
835 /* Move base to index, then move the symbol to base. */
836 parts.index = parts.base;
838 parts.base = tmp;
840 else
841 parts.base = tmp;
843 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
844 if (mem_ref)
845 return mem_ref;
848 /* Move multiplication to index by transforming address expression:
849 [... + index << step + ...]
850 into:
851 index' = index << step;
852 [... + index' + ,,,]. */
853 if (parts.step && !integer_onep (parts.step))
855 gcc_assert (parts.index);
856 if (parts.offset && mem_ref_valid_without_offset_p (type, parts))
858 add_offset_to_base (gsi, &parts);
859 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
860 gcc_assert (mem_ref);
861 return mem_ref;
864 parts.index = force_gimple_operand_gsi (gsi,
865 fold_build2 (MULT_EXPR, sizetype,
866 parts.index, parts.step),
867 true, NULL_TREE, true, GSI_SAME_STMT);
868 parts.step = NULL_TREE;
870 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
871 if (mem_ref)
872 return mem_ref;
875 /* Add offset to invariant part by transforming address expression:
876 [base + index + offset]
877 into:
878 base' = base + offset;
879 [base' + index]
881 index' = index + offset;
882 [base + index']
883 depending on which one is invariant. */
884 if (parts.offset && !integer_zerop (parts.offset))
886 tree old_base = unshare_expr (parts.base);
887 tree old_index = unshare_expr (parts.index);
888 tree old_offset = unshare_expr (parts.offset);
890 tmp = parts.offset;
891 parts.offset = NULL_TREE;
892 /* Add offset to invariant part. */
893 if (!var_in_base)
895 if (parts.base)
897 tmp = fold_build_pointer_plus (parts.base, tmp);
898 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
899 is_gimple_mem_ref_addr,
900 NULL_TREE, true,
901 GSI_SAME_STMT);
903 parts.base = tmp;
905 else
907 if (parts.index)
909 tmp = fold_build_pointer_plus (parts.index, tmp);
910 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
911 is_gimple_mem_ref_addr,
912 NULL_TREE, true,
913 GSI_SAME_STMT);
915 parts.index = tmp;
918 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
919 if (mem_ref)
920 return mem_ref;
922 /* Restore parts.base, index and offset so that we can check if
923 [base + offset] addressing mode is supported in next step.
924 This is necessary for targets only support [base + offset],
925 but not [base + index] addressing mode. */
926 parts.base = old_base;
927 parts.index = old_index;
928 parts.offset = old_offset;
931 /* Transform [base + index + ...] into:
932 base' = base + index;
933 [base' + ...]. */
934 if (parts.index)
936 tmp = parts.index;
937 parts.index = NULL_TREE;
938 /* Add index to base. */
939 if (parts.base)
941 tmp = fold_build_pointer_plus (parts.base, tmp);
942 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
943 is_gimple_mem_ref_addr,
944 NULL_TREE, true, GSI_SAME_STMT);
946 parts.base = tmp;
948 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
949 if (mem_ref)
950 return mem_ref;
953 /* Transform [base + offset] into:
954 base' = base + offset;
955 [base']. */
956 if (parts.offset && !integer_zerop (parts.offset))
958 add_offset_to_base (gsi, &parts);
959 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
960 if (mem_ref)
961 return mem_ref;
964 /* Verify that the address is in the simplest possible shape
965 (only a register). If we cannot create such a memory reference,
966 something is really wrong. */
967 gcc_assert (parts.symbol == NULL_TREE);
968 gcc_assert (parts.index == NULL_TREE);
969 gcc_assert (!parts.step || integer_onep (parts.step));
970 gcc_assert (!parts.offset || integer_zerop (parts.offset));
971 gcc_unreachable ();
974 /* Copies components of the address from OP to ADDR. */
976 void
977 get_address_description (tree op, struct mem_address *addr)
979 if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
981 addr->symbol = TMR_BASE (op);
982 addr->base = TMR_INDEX2 (op);
984 else
986 addr->symbol = NULL_TREE;
987 if (TMR_INDEX2 (op))
989 gcc_assert (integer_zerop (TMR_BASE (op)));
990 addr->base = TMR_INDEX2 (op);
992 else
993 addr->base = TMR_BASE (op);
995 addr->index = TMR_INDEX (op);
996 addr->step = TMR_STEP (op);
997 addr->offset = TMR_OFFSET (op);
1000 /* Copies the reference information from OLD_REF to NEW_REF, where
1001 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
1003 void
1004 copy_ref_info (tree new_ref, tree old_ref)
1006 tree new_ptr_base = NULL_TREE;
1008 gcc_assert (TREE_CODE (new_ref) == MEM_REF
1009 || TREE_CODE (new_ref) == TARGET_MEM_REF);
1011 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
1012 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
1014 new_ptr_base = TREE_OPERAND (new_ref, 0);
1016 tree base = get_base_address (old_ref);
1017 if (!base)
1018 return;
1020 /* We can transfer points-to information from an old pointer
1021 or decl base to the new one. */
1022 if (new_ptr_base
1023 && TREE_CODE (new_ptr_base) == SSA_NAME
1024 && !SSA_NAME_PTR_INFO (new_ptr_base))
1026 if ((TREE_CODE (base) == MEM_REF
1027 || TREE_CODE (base) == TARGET_MEM_REF)
1028 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1029 && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
1031 duplicate_ssa_name_ptr_info
1032 (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
1033 reset_flow_sensitive_info (new_ptr_base);
1035 else if (VAR_P (base)
1036 || TREE_CODE (base) == PARM_DECL
1037 || TREE_CODE (base) == RESULT_DECL)
1039 struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
1040 pt_solution_set_var (&pi->pt, base);
1044 /* We can transfer dependence info. */
1045 if (!MR_DEPENDENCE_CLIQUE (new_ref)
1046 && (TREE_CODE (base) == MEM_REF
1047 || TREE_CODE (base) == TARGET_MEM_REF)
1048 && MR_DEPENDENCE_CLIQUE (base))
1050 MR_DEPENDENCE_CLIQUE (new_ref) = MR_DEPENDENCE_CLIQUE (base);
1051 MR_DEPENDENCE_BASE (new_ref) = MR_DEPENDENCE_BASE (base);
1054 /* And alignment info. Note we cannot transfer misalignment info
1055 since that sits on the SSA name but this is flow-sensitive info
1056 which we cannot transfer in this generic routine. */
1057 unsigned old_align = get_object_alignment (old_ref);
1058 unsigned new_align = get_object_alignment (new_ref);
1059 if (new_align < old_align)
1060 TREE_TYPE (new_ref) = build_aligned_type (TREE_TYPE (new_ref), old_align);
1063 /* Move constants in target_mem_ref REF to offset. Returns the new target
1064 mem ref if anything changes, NULL_TREE otherwise. */
1066 tree
1067 maybe_fold_tmr (tree ref)
1069 struct mem_address addr;
1070 bool changed = false;
1071 tree new_ref, off;
1073 get_address_description (ref, &addr);
1075 if (addr.base
1076 && TREE_CODE (addr.base) == INTEGER_CST
1077 && !integer_zerop (addr.base))
1079 addr.offset = fold_binary_to_constant (PLUS_EXPR,
1080 TREE_TYPE (addr.offset),
1081 addr.offset, addr.base);
1082 addr.base = NULL_TREE;
1083 changed = true;
1086 if (addr.symbol
1087 && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
1089 addr.offset = fold_binary_to_constant
1090 (PLUS_EXPR, TREE_TYPE (addr.offset),
1091 addr.offset,
1092 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
1093 addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
1094 changed = true;
1096 else if (addr.symbol
1097 && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
1099 poly_int64 offset;
1100 addr.symbol = build_fold_addr_expr
1101 (get_addr_base_and_unit_offset
1102 (TREE_OPERAND (addr.symbol, 0), &offset));
1103 addr.offset = int_const_binop (PLUS_EXPR,
1104 addr.offset, size_int (offset));
1105 changed = true;
1108 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
1110 off = addr.index;
1111 if (addr.step)
1113 off = fold_binary_to_constant (MULT_EXPR, sizetype,
1114 off, addr.step);
1115 addr.step = NULL_TREE;
1118 addr.offset = fold_binary_to_constant (PLUS_EXPR,
1119 TREE_TYPE (addr.offset),
1120 addr.offset, off);
1121 addr.index = NULL_TREE;
1122 changed = true;
1125 if (!changed)
1126 return NULL_TREE;
1128 /* If we have propagated something into this TARGET_MEM_REF and thus
1129 ended up folding it, always create a new TARGET_MEM_REF regardless
1130 if it is valid in this for on the target - the propagation result
1131 wouldn't be anyway. */
1132 new_ref = create_mem_ref_raw (TREE_TYPE (ref),
1133 TREE_TYPE (addr.offset), &addr, false);
1134 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
1135 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
1136 return new_ref;
1139 /* Return the preferred index scale factor for accessing memory of mode
1140 MEM_MODE in the address space of pointer BASE. Assume that we're
1141 optimizing for speed if SPEED is true and for size otherwise. */
1142 unsigned int
1143 preferred_mem_scale_factor (tree base, machine_mode mem_mode,
1144 bool speed)
1146 /* For BLKmode, we can't do anything so return 1. */
1147 if (mem_mode == BLKmode)
1148 return 1;
1150 struct mem_address parts = {};
1151 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1152 unsigned int fact = GET_MODE_UNIT_SIZE (mem_mode);
1154 /* Addressing mode "base + index". */
1155 parts.index = integer_one_node;
1156 parts.base = integer_one_node;
1157 rtx addr = addr_for_mem_ref (&parts, as, false);
1158 unsigned cost = address_cost (addr, mem_mode, as, speed);
1160 /* Addressing mode "base + index << scale". */
1161 parts.step = wide_int_to_tree (sizetype, fact);
1162 addr = addr_for_mem_ref (&parts, as, false);
1163 unsigned new_cost = address_cost (addr, mem_mode, as, speed);
1165 /* Compare the cost of an address with an unscaled index with
1166 a scaled index and return factor if useful. */
1167 if (new_cost < cost)
1168 return GET_MODE_UNIT_SIZE (mem_mode);
1169 return 1;
1172 /* Dump PARTS to FILE. */
1174 extern void dump_mem_address (FILE *, struct mem_address *);
1175 void
1176 dump_mem_address (FILE *file, struct mem_address *parts)
1178 if (parts->symbol)
1180 fprintf (file, "symbol: ");
1181 print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
1182 fprintf (file, "\n");
1184 if (parts->base)
1186 fprintf (file, "base: ");
1187 print_generic_expr (file, parts->base, TDF_SLIM);
1188 fprintf (file, "\n");
1190 if (parts->index)
1192 fprintf (file, "index: ");
1193 print_generic_expr (file, parts->index, TDF_SLIM);
1194 fprintf (file, "\n");
1196 if (parts->step)
1198 fprintf (file, "step: ");
1199 print_generic_expr (file, parts->step, TDF_SLIM);
1200 fprintf (file, "\n");
1202 if (parts->offset)
1204 fprintf (file, "offset: ");
1205 print_generic_expr (file, parts->offset, TDF_SLIM);
1206 fprintf (file, "\n");
1210 #include "gt-tree-ssa-address.h"