pr70100.c: Add -mvsx.
[official-gcc.git] / gcc / tree-ssa-address.c
blob8004951d2e875241eebc37cb22d10fd70307c43d
1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "memmodel.h"
32 #include "stringpool.h"
33 #include "tree-vrp.h"
34 #include "tree-ssanames.h"
35 #include "expmed.h"
36 #include "insn-config.h"
37 #include "emit-rtl.h"
38 #include "recog.h"
39 #include "tree-pretty-print.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "tree-ssa-loop-ivopts.h"
45 #include "expr.h"
46 #include "tree-dfa.h"
47 #include "dumpfile.h"
48 #include "tree-affine.h"
49 #include "gimplify.h"
51 /* FIXME: We compute address costs using RTL. */
52 #include "tree-ssa-address.h"
54 /* TODO -- handling of symbols (according to Richard Hendersons
55 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
57 There are at least 5 different kinds of symbols that we can run up against:
59 (1) binds_local_p, small data area.
60 (2) binds_local_p, eg local statics
61 (3) !binds_local_p, eg global variables
62 (4) thread local, local_exec
63 (5) thread local, !local_exec
65 Now, (1) won't appear often in an array context, but it certainly can.
66 All you have to do is set -GN high enough, or explicitly mark any
67 random object __attribute__((section (".sdata"))).
69 All of these affect whether or not a symbol is in fact a valid address.
70 The only one tested here is (3). And that result may very well
71 be incorrect for (4) or (5).
73 An incorrect result here does not cause incorrect results out the
74 back end, because the expander in expr.c validizes the address. However
75 it would be nice to improve the handling here in order to produce more
76 precise results. */
78 /* A "template" for memory address, used to determine whether the address is
79 valid for mode. */
81 struct GTY (()) mem_addr_template {
82 rtx ref; /* The template. */
83 rtx * GTY ((skip)) step_p; /* The point in template where the step should be
84 filled in. */
85 rtx * GTY ((skip)) off_p; /* The point in template where the offset should
86 be filled in. */
90 /* The templates. Each of the low five bits of the index corresponds to one
91 component of TARGET_MEM_REF being present, while the high bits identify
92 the address space. See TEMPL_IDX. */
94 static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
96 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
97 (((int) (AS) << 5) \
98 | ((SYMBOL != 0) << 4) \
99 | ((BASE != 0) << 3) \
100 | ((INDEX != 0) << 2) \
101 | ((STEP != 0) << 1) \
102 | (OFFSET != 0))
104 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
105 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
106 to where step is placed to *STEP_P and offset to *OFFSET_P. */
108 static void
109 gen_addr_rtx (machine_mode address_mode,
110 rtx symbol, rtx base, rtx index, rtx step, rtx offset,
111 rtx *addr, rtx **step_p, rtx **offset_p)
113 rtx act_elem;
115 *addr = NULL_RTX;
116 if (step_p)
117 *step_p = NULL;
118 if (offset_p)
119 *offset_p = NULL;
121 if (index && index != const0_rtx)
123 act_elem = index;
124 if (step)
126 act_elem = gen_rtx_MULT (address_mode, act_elem, step);
128 if (step_p)
129 *step_p = &XEXP (act_elem, 1);
132 *addr = act_elem;
135 if (base && base != const0_rtx)
137 if (*addr)
138 *addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
139 else
140 *addr = base;
143 if (symbol)
145 act_elem = symbol;
146 if (offset)
148 act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
150 if (offset_p)
151 *offset_p = &XEXP (act_elem, 1);
153 if (GET_CODE (symbol) == SYMBOL_REF
154 || GET_CODE (symbol) == LABEL_REF
155 || GET_CODE (symbol) == CONST)
156 act_elem = gen_rtx_CONST (address_mode, act_elem);
159 if (*addr)
160 *addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
161 else
162 *addr = act_elem;
164 else if (offset)
166 if (*addr)
168 *addr = gen_rtx_PLUS (address_mode, *addr, offset);
169 if (offset_p)
170 *offset_p = &XEXP (*addr, 1);
172 else
174 *addr = offset;
175 if (offset_p)
176 *offset_p = addr;
180 if (!*addr)
181 *addr = const0_rtx;
184 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
185 in address space AS.
186 If REALLY_EXPAND is false, just make fake registers instead
187 of really expanding the operands, and perform the expansion in-place
188 by using one of the "templates". */
191 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
192 bool really_expand)
194 scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
195 scalar_int_mode pointer_mode = targetm.addr_space.pointer_mode (as);
196 rtx address, sym, bse, idx, st, off;
197 struct mem_addr_template *templ;
199 if (addr->step && !integer_onep (addr->step))
200 st = immed_wide_int_const (wi::to_wide (addr->step), pointer_mode);
201 else
202 st = NULL_RTX;
204 if (addr->offset && !integer_zerop (addr->offset))
206 poly_offset_int dc
207 = poly_offset_int::from (wi::to_poly_wide (addr->offset), SIGNED);
208 off = immed_wide_int_const (dc, pointer_mode);
210 else
211 off = NULL_RTX;
213 if (!really_expand)
215 unsigned int templ_index
216 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
218 if (templ_index >= vec_safe_length (mem_addr_template_list))
219 vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
221 /* Reuse the templates for addresses, so that we do not waste memory. */
222 templ = &(*mem_addr_template_list)[templ_index];
223 if (!templ->ref)
225 sym = (addr->symbol ?
226 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
227 : NULL_RTX);
228 bse = (addr->base ?
229 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
230 : NULL_RTX);
231 idx = (addr->index ?
232 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
233 : NULL_RTX);
235 gen_addr_rtx (pointer_mode, sym, bse, idx,
236 st? const0_rtx : NULL_RTX,
237 off? const0_rtx : NULL_RTX,
238 &templ->ref,
239 &templ->step_p,
240 &templ->off_p);
243 if (st)
244 *templ->step_p = st;
245 if (off)
246 *templ->off_p = off;
248 return templ->ref;
251 /* Otherwise really expand the expressions. */
252 sym = (addr->symbol
253 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
254 : NULL_RTX);
255 bse = (addr->base
256 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
257 : NULL_RTX);
258 idx = (addr->index
259 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
260 : NULL_RTX);
262 /* addr->base could be an SSA_NAME that was set to a constant value. The
263 call to expand_expr may expose that constant. If so, fold the value
264 into OFF and clear BSE. Otherwise we may later try to pull a mode from
265 BSE to generate a REG, which won't work with constants because they
266 are modeless. */
267 if (bse && GET_CODE (bse) == CONST_INT)
269 if (off)
270 off = simplify_gen_binary (PLUS, pointer_mode, bse, off);
271 else
272 off = bse;
273 gcc_assert (GET_CODE (off) == CONST_INT);
274 bse = NULL_RTX;
276 gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
277 if (pointer_mode != address_mode)
278 address = convert_memory_address (address_mode, address);
279 return address;
282 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
283 the mem_address structure. */
286 addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
288 struct mem_address addr;
289 get_address_description (exp, &addr);
290 return addr_for_mem_ref (&addr, as, really_expand);
293 /* Returns address of MEM_REF in TYPE. */
295 tree
296 tree_mem_ref_addr (tree type, tree mem_ref)
298 tree addr;
299 tree act_elem;
300 tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
301 tree addr_base = NULL_TREE, addr_off = NULL_TREE;
303 addr_base = fold_convert (type, TMR_BASE (mem_ref));
305 act_elem = TMR_INDEX (mem_ref);
306 if (act_elem)
308 if (step)
309 act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
310 act_elem, step);
311 addr_off = act_elem;
314 act_elem = TMR_INDEX2 (mem_ref);
315 if (act_elem)
317 if (addr_off)
318 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
319 addr_off, act_elem);
320 else
321 addr_off = act_elem;
324 if (offset && !integer_zerop (offset))
326 if (addr_off)
327 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
328 fold_convert (TREE_TYPE (addr_off), offset));
329 else
330 addr_off = offset;
333 if (addr_off)
334 addr = fold_build_pointer_plus (addr_base, addr_off);
335 else
336 addr = addr_base;
338 return addr;
341 /* Returns true if a memory reference in MODE and with parameters given by
342 ADDR is valid on the current target. */
344 bool
345 valid_mem_ref_p (machine_mode mode, addr_space_t as,
346 struct mem_address *addr)
348 rtx address;
350 address = addr_for_mem_ref (addr, as, false);
351 if (!address)
352 return false;
354 return memory_address_addr_space_p (mode, address, as);
357 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
358 is valid on the current target and if so, creates and returns the
359 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
361 static tree
362 create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
363 bool verify)
365 tree base, index2;
367 if (verify
368 && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
369 return NULL_TREE;
371 if (addr->step && integer_onep (addr->step))
372 addr->step = NULL_TREE;
374 if (addr->offset)
375 addr->offset = fold_convert (alias_ptr_type, addr->offset);
376 else
377 addr->offset = build_int_cst (alias_ptr_type, 0);
379 if (addr->symbol)
381 base = addr->symbol;
382 index2 = addr->base;
384 else if (addr->base
385 && POINTER_TYPE_P (TREE_TYPE (addr->base)))
387 base = addr->base;
388 index2 = NULL_TREE;
390 else
392 base = build_int_cst (build_pointer_type (type), 0);
393 index2 = addr->base;
396 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
397 ??? As IVOPTs does not follow restrictions to where the base
398 pointer may point to create a MEM_REF only if we know that
399 base is valid. */
400 if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
401 && (!index2 || integer_zerop (index2))
402 && (!addr->index || integer_zerop (addr->index)))
403 return fold_build2 (MEM_REF, type, base, addr->offset);
405 return build5 (TARGET_MEM_REF, type,
406 base, addr->offset, addr->index, addr->step, index2);
409 /* Returns true if OBJ is an object whose address is a link time constant. */
411 static bool
412 fixed_address_object_p (tree obj)
414 return (VAR_P (obj)
415 && (TREE_STATIC (obj) || DECL_EXTERNAL (obj))
416 && ! DECL_DLLIMPORT_P (obj));
419 /* If ADDR contains an address of object that is a link time constant,
420 move it to PARTS->symbol. */
422 void
423 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
425 unsigned i;
426 tree val = NULL_TREE;
428 for (i = 0; i < addr->n; i++)
430 if (addr->elts[i].coef != 1)
431 continue;
433 val = addr->elts[i].val;
434 if (TREE_CODE (val) == ADDR_EXPR
435 && fixed_address_object_p (TREE_OPERAND (val, 0)))
436 break;
439 if (i == addr->n)
440 return;
442 parts->symbol = val;
443 aff_combination_remove_elt (addr, i);
446 /* Return true if ADDR contains an instance of BASE_HINT and it's moved to
447 PARTS->base. */
449 static bool
450 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
451 aff_tree *addr)
453 unsigned i;
454 tree val = NULL_TREE;
455 int qual;
457 for (i = 0; i < addr->n; i++)
459 if (addr->elts[i].coef != 1)
460 continue;
462 val = addr->elts[i].val;
463 if (operand_equal_p (val, base_hint, 0))
464 break;
467 if (i == addr->n)
468 return false;
470 /* Cast value to appropriate pointer type. We cannot use a pointer
471 to TYPE directly, as the back-end will assume registers of pointer
472 type are aligned, and just the base itself may not actually be.
473 We use void pointer to the type's address space instead. */
474 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
475 type = build_qualified_type (void_type_node, qual);
476 parts->base = fold_convert (build_pointer_type (type), val);
477 aff_combination_remove_elt (addr, i);
478 return true;
481 /* If ADDR contains an address of a dereferenced pointer, move it to
482 PARTS->base. */
484 static void
485 move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
487 unsigned i;
488 tree val = NULL_TREE;
490 for (i = 0; i < addr->n; i++)
492 if (addr->elts[i].coef != 1)
493 continue;
495 val = addr->elts[i].val;
496 if (POINTER_TYPE_P (TREE_TYPE (val)))
497 break;
500 if (i == addr->n)
501 return;
503 parts->base = val;
504 aff_combination_remove_elt (addr, i);
507 /* Moves the loop variant part V in linear address ADDR to be the index
508 of PARTS. */
510 static void
511 move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
513 unsigned i;
514 tree val = NULL_TREE;
516 gcc_assert (!parts->index);
517 for (i = 0; i < addr->n; i++)
519 val = addr->elts[i].val;
520 if (operand_equal_p (val, v, 0))
521 break;
524 if (i == addr->n)
525 return;
527 parts->index = fold_convert (sizetype, val);
528 parts->step = wide_int_to_tree (sizetype, addr->elts[i].coef);
529 aff_combination_remove_elt (addr, i);
532 /* Adds ELT to PARTS. */
534 static void
535 add_to_parts (struct mem_address *parts, tree elt)
537 tree type;
539 if (!parts->index)
541 parts->index = fold_convert (sizetype, elt);
542 return;
545 if (!parts->base)
547 parts->base = elt;
548 return;
551 /* Add ELT to base. */
552 type = TREE_TYPE (parts->base);
553 if (POINTER_TYPE_P (type))
554 parts->base = fold_build_pointer_plus (parts->base, elt);
555 else
556 parts->base = fold_build2 (PLUS_EXPR, type, parts->base, elt);
559 /* Returns true if multiplying by RATIO is allowed in an address. Test the
560 validity for a memory reference accessing memory of mode MODE in address
561 space AS. */
563 static bool
564 multiplier_allowed_in_address_p (HOST_WIDE_INT ratio, machine_mode mode,
565 addr_space_t as)
567 #define MAX_RATIO 128
568 unsigned int data_index = (int) as * MAX_MACHINE_MODE + (int) mode;
569 static vec<sbitmap> valid_mult_list;
570 sbitmap valid_mult;
572 if (data_index >= valid_mult_list.length ())
573 valid_mult_list.safe_grow_cleared (data_index + 1);
575 valid_mult = valid_mult_list[data_index];
576 if (!valid_mult)
578 machine_mode address_mode = targetm.addr_space.address_mode (as);
579 rtx reg1 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 1);
580 rtx reg2 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 2);
581 rtx addr, scaled;
582 HOST_WIDE_INT i;
584 valid_mult = sbitmap_alloc (2 * MAX_RATIO + 1);
585 bitmap_clear (valid_mult);
586 scaled = gen_rtx_fmt_ee (MULT, address_mode, reg1, NULL_RTX);
587 addr = gen_rtx_fmt_ee (PLUS, address_mode, scaled, reg2);
588 for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
590 XEXP (scaled, 1) = gen_int_mode (i, address_mode);
591 if (memory_address_addr_space_p (mode, addr, as)
592 || memory_address_addr_space_p (mode, scaled, as))
593 bitmap_set_bit (valid_mult, i + MAX_RATIO);
596 if (dump_file && (dump_flags & TDF_DETAILS))
598 fprintf (dump_file, " allowed multipliers:");
599 for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
600 if (bitmap_bit_p (valid_mult, i + MAX_RATIO))
601 fprintf (dump_file, " %d", (int) i);
602 fprintf (dump_file, "\n");
603 fprintf (dump_file, "\n");
606 valid_mult_list[data_index] = valid_mult;
609 if (ratio > MAX_RATIO || ratio < -MAX_RATIO)
610 return false;
612 return bitmap_bit_p (valid_mult, ratio + MAX_RATIO);
615 /* Finds the most expensive multiplication in ADDR that can be
616 expressed in an addressing mode and move the corresponding
617 element(s) to PARTS. */
619 static void
620 most_expensive_mult_to_index (tree type, struct mem_address *parts,
621 aff_tree *addr, bool speed)
623 addr_space_t as = TYPE_ADDR_SPACE (type);
624 machine_mode address_mode = targetm.addr_space.address_mode (as);
625 HOST_WIDE_INT coef;
626 unsigned best_mult_cost = 0, acost;
627 tree mult_elt = NULL_TREE, elt;
628 unsigned i, j;
629 enum tree_code op_code;
631 offset_int best_mult = 0;
632 for (i = 0; i < addr->n; i++)
634 if (!wi::fits_shwi_p (addr->elts[i].coef))
635 continue;
637 coef = addr->elts[i].coef.to_shwi ();
638 if (coef == 1
639 || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
640 continue;
642 acost = mult_by_coeff_cost (coef, address_mode, speed);
644 if (acost > best_mult_cost)
646 best_mult_cost = acost;
647 best_mult = offset_int::from (addr->elts[i].coef, SIGNED);
651 if (!best_mult_cost)
652 return;
654 /* Collect elements multiplied by best_mult. */
655 for (i = j = 0; i < addr->n; i++)
657 offset_int amult = offset_int::from (addr->elts[i].coef, SIGNED);
658 offset_int amult_neg = -wi::sext (amult, TYPE_PRECISION (addr->type));
660 if (amult == best_mult)
661 op_code = PLUS_EXPR;
662 else if (amult_neg == best_mult)
663 op_code = MINUS_EXPR;
664 else
666 addr->elts[j] = addr->elts[i];
667 j++;
668 continue;
671 elt = fold_convert (sizetype, addr->elts[i].val);
672 if (mult_elt)
673 mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
674 else if (op_code == PLUS_EXPR)
675 mult_elt = elt;
676 else
677 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
679 addr->n = j;
681 parts->index = mult_elt;
682 parts->step = wide_int_to_tree (sizetype, best_mult);
685 /* Splits address ADDR for a memory access of type TYPE into PARTS.
686 If BASE_HINT is non-NULL, it specifies an SSA name to be used
687 preferentially as base of the reference, and IV_CAND is the selected
688 iv candidate used in ADDR. Store true to VAR_IN_BASE if variant
689 part of address is split to PARTS.base.
691 TODO -- be more clever about the distribution of the elements of ADDR
692 to PARTS. Some architectures do not support anything but single
693 register in address, possibly with a small integer offset; while
694 create_mem_ref will simplify the address to an acceptable shape
695 later, it would be more efficient to know that asking for complicated
696 addressing modes is useless. */
698 static void
699 addr_to_parts (tree type, aff_tree *addr, tree iv_cand, tree base_hint,
700 struct mem_address *parts, bool *var_in_base, bool speed)
702 tree part;
703 unsigned i;
705 parts->symbol = NULL_TREE;
706 parts->base = NULL_TREE;
707 parts->index = NULL_TREE;
708 parts->step = NULL_TREE;
710 if (maybe_ne (addr->offset, 0))
711 parts->offset = wide_int_to_tree (sizetype, addr->offset);
712 else
713 parts->offset = NULL_TREE;
715 /* Try to find a symbol. */
716 move_fixed_address_to_symbol (parts, addr);
718 /* Since at the moment there is no reliable way to know how to
719 distinguish between pointer and its offset, we decide if var
720 part is the pointer based on guess. */
721 *var_in_base = (base_hint != NULL && parts->symbol == NULL);
722 if (*var_in_base)
723 *var_in_base = move_hint_to_base (type, parts, base_hint, addr);
724 else
725 move_variant_to_index (parts, addr, iv_cand);
727 /* First move the most expensive feasible multiplication to index. */
728 if (!parts->index)
729 most_expensive_mult_to_index (type, parts, addr, speed);
731 /* Move pointer into base. */
732 if (!parts->symbol && !parts->base)
733 move_pointer_to_base (parts, addr);
735 /* Then try to process the remaining elements. */
736 for (i = 0; i < addr->n; i++)
738 part = fold_convert (sizetype, addr->elts[i].val);
739 if (addr->elts[i].coef != 1)
740 part = fold_build2 (MULT_EXPR, sizetype, part,
741 wide_int_to_tree (sizetype, addr->elts[i].coef));
742 add_to_parts (parts, part);
744 if (addr->rest)
745 add_to_parts (parts, fold_convert (sizetype, addr->rest));
748 /* Force the PARTS to register. */
750 static void
751 gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
753 if (parts->base)
754 parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
755 is_gimple_mem_ref_addr, NULL_TREE,
756 true, GSI_SAME_STMT);
757 if (parts->index)
758 parts->index = force_gimple_operand_gsi (gsi, parts->index,
759 true, NULL_TREE,
760 true, GSI_SAME_STMT);
763 /* Return true if the OFFSET in PARTS is the only thing that is making
764 it an invalid address for type TYPE. */
766 static bool
767 mem_ref_valid_without_offset_p (tree type, mem_address parts)
769 if (!parts.base)
770 parts.base = parts.offset;
771 parts.offset = NULL_TREE;
772 return valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), &parts);
775 /* Fold PARTS->offset into PARTS->base, so that there is no longer
776 a separate offset. Emit any new instructions before GSI. */
778 static void
779 add_offset_to_base (gimple_stmt_iterator *gsi, mem_address *parts)
781 tree tmp = parts->offset;
782 if (parts->base)
784 tmp = fold_build_pointer_plus (parts->base, tmp);
785 tmp = force_gimple_operand_gsi_1 (gsi, tmp, is_gimple_mem_ref_addr,
786 NULL_TREE, true, GSI_SAME_STMT);
788 parts->base = tmp;
789 parts->offset = NULL_TREE;
792 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
793 computations are emitted in front of GSI. TYPE is the mode
794 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
795 and BASE_HINT is non NULL if IV_CAND comes from a base address
796 object. */
798 tree
799 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
800 tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
802 bool var_in_base;
803 tree mem_ref, tmp;
804 struct mem_address parts;
806 addr_to_parts (type, addr, iv_cand, base_hint, &parts, &var_in_base, speed);
807 gimplify_mem_ref_parts (gsi, &parts);
808 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
809 if (mem_ref)
810 return mem_ref;
812 /* The expression is too complicated. Try making it simpler. */
814 /* Merge symbol into other parts. */
815 if (parts.symbol)
817 tmp = parts.symbol;
818 parts.symbol = NULL_TREE;
819 gcc_assert (is_gimple_val (tmp));
821 if (parts.base)
823 gcc_assert (useless_type_conversion_p (sizetype,
824 TREE_TYPE (parts.base)));
826 if (parts.index)
828 /* Add the symbol to base, eventually forcing it to register. */
829 tmp = fold_build_pointer_plus (tmp, parts.base);
830 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
831 is_gimple_mem_ref_addr,
832 NULL_TREE, true,
833 GSI_SAME_STMT);
835 else
837 /* Move base to index, then move the symbol to base. */
838 parts.index = parts.base;
840 parts.base = tmp;
842 else
843 parts.base = tmp;
845 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
846 if (mem_ref)
847 return mem_ref;
850 /* Move multiplication to index by transforming address expression:
851 [... + index << step + ...]
852 into:
853 index' = index << step;
854 [... + index' + ,,,]. */
855 if (parts.step && !integer_onep (parts.step))
857 gcc_assert (parts.index);
858 if (parts.offset && mem_ref_valid_without_offset_p (type, parts))
860 add_offset_to_base (gsi, &parts);
861 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
862 gcc_assert (mem_ref);
863 return mem_ref;
866 parts.index = force_gimple_operand_gsi (gsi,
867 fold_build2 (MULT_EXPR, sizetype,
868 parts.index, parts.step),
869 true, NULL_TREE, true, GSI_SAME_STMT);
870 parts.step = NULL_TREE;
872 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
873 if (mem_ref)
874 return mem_ref;
877 /* Add offset to invariant part by transforming address expression:
878 [base + index + offset]
879 into:
880 base' = base + offset;
881 [base' + index]
883 index' = index + offset;
884 [base + index']
885 depending on which one is invariant. */
886 if (parts.offset && !integer_zerop (parts.offset))
888 tree old_base = unshare_expr (parts.base);
889 tree old_index = unshare_expr (parts.index);
890 tree old_offset = unshare_expr (parts.offset);
892 tmp = parts.offset;
893 parts.offset = NULL_TREE;
894 /* Add offset to invariant part. */
895 if (!var_in_base)
897 if (parts.base)
899 tmp = fold_build_pointer_plus (parts.base, tmp);
900 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
901 is_gimple_mem_ref_addr,
902 NULL_TREE, true,
903 GSI_SAME_STMT);
905 parts.base = tmp;
907 else
909 if (parts.index)
911 tmp = fold_build_pointer_plus (parts.index, tmp);
912 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
913 is_gimple_mem_ref_addr,
914 NULL_TREE, true,
915 GSI_SAME_STMT);
917 parts.index = tmp;
920 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
921 if (mem_ref)
922 return mem_ref;
924 /* Restore parts.base, index and offset so that we can check if
925 [base + offset] addressing mode is supported in next step.
926 This is necessary for targets only support [base + offset],
927 but not [base + index] addressing mode. */
928 parts.base = old_base;
929 parts.index = old_index;
930 parts.offset = old_offset;
933 /* Transform [base + index + ...] into:
934 base' = base + index;
935 [base' + ...]. */
936 if (parts.index)
938 tmp = parts.index;
939 parts.index = NULL_TREE;
940 /* Add index to base. */
941 if (parts.base)
943 tmp = fold_build_pointer_plus (parts.base, tmp);
944 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
945 is_gimple_mem_ref_addr,
946 NULL_TREE, true, GSI_SAME_STMT);
948 parts.base = tmp;
950 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
951 if (mem_ref)
952 return mem_ref;
955 /* Transform [base + offset] into:
956 base' = base + offset;
957 [base']. */
958 if (parts.offset && !integer_zerop (parts.offset))
960 add_offset_to_base (gsi, &parts);
961 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
962 if (mem_ref)
963 return mem_ref;
966 /* Verify that the address is in the simplest possible shape
967 (only a register). If we cannot create such a memory reference,
968 something is really wrong. */
969 gcc_assert (parts.symbol == NULL_TREE);
970 gcc_assert (parts.index == NULL_TREE);
971 gcc_assert (!parts.step || integer_onep (parts.step));
972 gcc_assert (!parts.offset || integer_zerop (parts.offset));
973 gcc_unreachable ();
976 /* Copies components of the address from OP to ADDR. */
978 void
979 get_address_description (tree op, struct mem_address *addr)
981 if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
983 addr->symbol = TMR_BASE (op);
984 addr->base = TMR_INDEX2 (op);
986 else
988 addr->symbol = NULL_TREE;
989 if (TMR_INDEX2 (op))
991 gcc_assert (integer_zerop (TMR_BASE (op)));
992 addr->base = TMR_INDEX2 (op);
994 else
995 addr->base = TMR_BASE (op);
997 addr->index = TMR_INDEX (op);
998 addr->step = TMR_STEP (op);
999 addr->offset = TMR_OFFSET (op);
1002 /* Copies the reference information from OLD_REF to NEW_REF, where
1003 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
1005 void
1006 copy_ref_info (tree new_ref, tree old_ref)
1008 tree new_ptr_base = NULL_TREE;
1010 gcc_assert (TREE_CODE (new_ref) == MEM_REF
1011 || TREE_CODE (new_ref) == TARGET_MEM_REF);
1013 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
1014 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
1016 new_ptr_base = TREE_OPERAND (new_ref, 0);
1018 /* We can transfer points-to information from an old pointer
1019 or decl base to the new one. */
1020 if (new_ptr_base
1021 && TREE_CODE (new_ptr_base) == SSA_NAME
1022 && !SSA_NAME_PTR_INFO (new_ptr_base))
1024 tree base = get_base_address (old_ref);
1025 if (!base)
1027 else if ((TREE_CODE (base) == MEM_REF
1028 || TREE_CODE (base) == TARGET_MEM_REF)
1029 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1030 && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
1032 struct ptr_info_def *new_pi;
1033 unsigned int align, misalign;
1035 duplicate_ssa_name_ptr_info
1036 (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
1037 new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
1038 /* We have to be careful about transferring alignment information. */
1039 if (get_ptr_info_alignment (new_pi, &align, &misalign)
1040 && TREE_CODE (old_ref) == MEM_REF
1041 && !(TREE_CODE (new_ref) == TARGET_MEM_REF
1042 && (TMR_INDEX2 (new_ref)
1043 /* TODO: Below conditions can be relaxed if TMR_INDEX
1044 is an indcution variable and its initial value and
1045 step are aligned. */
1046 || (TMR_INDEX (new_ref) && !TMR_STEP (new_ref))
1047 || (TMR_STEP (new_ref)
1048 && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
1049 < align)))))
1051 poly_uint64 inc = (mem_ref_offset (old_ref)
1052 - mem_ref_offset (new_ref)).force_uhwi ();
1053 adjust_ptr_info_misalignment (new_pi, inc);
1055 else
1056 mark_ptr_info_alignment_unknown (new_pi);
1058 else if (VAR_P (base)
1059 || TREE_CODE (base) == PARM_DECL
1060 || TREE_CODE (base) == RESULT_DECL)
1062 struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
1063 pt_solution_set_var (&pi->pt, base);
1068 /* Move constants in target_mem_ref REF to offset. Returns the new target
1069 mem ref if anything changes, NULL_TREE otherwise. */
1071 tree
1072 maybe_fold_tmr (tree ref)
1074 struct mem_address addr;
1075 bool changed = false;
1076 tree new_ref, off;
1078 get_address_description (ref, &addr);
1080 if (addr.base
1081 && TREE_CODE (addr.base) == INTEGER_CST
1082 && !integer_zerop (addr.base))
1084 addr.offset = fold_binary_to_constant (PLUS_EXPR,
1085 TREE_TYPE (addr.offset),
1086 addr.offset, addr.base);
1087 addr.base = NULL_TREE;
1088 changed = true;
1091 if (addr.symbol
1092 && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
1094 addr.offset = fold_binary_to_constant
1095 (PLUS_EXPR, TREE_TYPE (addr.offset),
1096 addr.offset,
1097 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
1098 addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
1099 changed = true;
1101 else if (addr.symbol
1102 && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
1104 poly_int64 offset;
1105 addr.symbol = build_fold_addr_expr
1106 (get_addr_base_and_unit_offset
1107 (TREE_OPERAND (addr.symbol, 0), &offset));
1108 addr.offset = int_const_binop (PLUS_EXPR,
1109 addr.offset, size_int (offset));
1110 changed = true;
1113 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
1115 off = addr.index;
1116 if (addr.step)
1118 off = fold_binary_to_constant (MULT_EXPR, sizetype,
1119 off, addr.step);
1120 addr.step = NULL_TREE;
1123 addr.offset = fold_binary_to_constant (PLUS_EXPR,
1124 TREE_TYPE (addr.offset),
1125 addr.offset, off);
1126 addr.index = NULL_TREE;
1127 changed = true;
1130 if (!changed)
1131 return NULL_TREE;
1133 /* If we have propagated something into this TARGET_MEM_REF and thus
1134 ended up folding it, always create a new TARGET_MEM_REF regardless
1135 if it is valid in this for on the target - the propagation result
1136 wouldn't be anyway. */
1137 new_ref = create_mem_ref_raw (TREE_TYPE (ref),
1138 TREE_TYPE (addr.offset), &addr, false);
1139 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
1140 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
1141 return new_ref;
1144 /* Return the preferred index scale factor for accessing memory of mode
1145 MEM_MODE in the address space of pointer BASE. Assume that we're
1146 optimizing for speed if SPEED is true and for size otherwise. */
1147 unsigned int
1148 preferred_mem_scale_factor (tree base, machine_mode mem_mode,
1149 bool speed)
1151 /* For BLKmode, we can't do anything so return 1. */
1152 if (mem_mode == BLKmode)
1153 return 1;
1155 struct mem_address parts = {};
1156 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1157 unsigned int fact = GET_MODE_UNIT_SIZE (mem_mode);
1159 /* Addressing mode "base + index". */
1160 parts.index = integer_one_node;
1161 parts.base = integer_one_node;
1162 rtx addr = addr_for_mem_ref (&parts, as, false);
1163 unsigned cost = address_cost (addr, mem_mode, as, speed);
1165 /* Addressing mode "base + index << scale". */
1166 parts.step = wide_int_to_tree (sizetype, fact);
1167 addr = addr_for_mem_ref (&parts, as, false);
1168 unsigned new_cost = address_cost (addr, mem_mode, as, speed);
1170 /* Compare the cost of an address with an unscaled index with
1171 a scaled index and return factor if useful. */
1172 if (new_cost < cost)
1173 return GET_MODE_UNIT_SIZE (mem_mode);
1174 return 1;
1177 /* Dump PARTS to FILE. */
1179 extern void dump_mem_address (FILE *, struct mem_address *);
1180 void
1181 dump_mem_address (FILE *file, struct mem_address *parts)
1183 if (parts->symbol)
1185 fprintf (file, "symbol: ");
1186 print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
1187 fprintf (file, "\n");
1189 if (parts->base)
1191 fprintf (file, "base: ");
1192 print_generic_expr (file, parts->base, TDF_SLIM);
1193 fprintf (file, "\n");
1195 if (parts->index)
1197 fprintf (file, "index: ");
1198 print_generic_expr (file, parts->index, TDF_SLIM);
1199 fprintf (file, "\n");
1201 if (parts->step)
1203 fprintf (file, "step: ");
1204 print_generic_expr (file, parts->step, TDF_SLIM);
1205 fprintf (file, "\n");
1207 if (parts->offset)
1209 fprintf (file, "offset: ");
1210 print_generic_expr (file, parts->offset, TDF_SLIM);
1211 fprintf (file, "\n");
1215 #include "gt-tree-ssa-address.h"