Daily bump.
[official-gcc.git] / gcc / tree-ssa-address.c
bloba53ade0600d01c9d48f6c789b78fd42d7a06a95b
1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "stringpool.h"
32 #include "tree-vrp.h"
33 #include "tree-ssanames.h"
34 #include "expmed.h"
35 #include "insn-config.h"
36 #include "recog.h"
37 #include "tree-pretty-print.h"
38 #include "fold-const.h"
39 #include "stor-layout.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "tree-ssa-loop-ivopts.h"
43 #include "expr.h"
44 #include "tree-dfa.h"
45 #include "dumpfile.h"
46 #include "tree-affine.h"
48 /* FIXME: We compute address costs using RTL. */
49 #include "tree-ssa-address.h"
51 /* TODO -- handling of symbols (according to Richard Hendersons
52 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
54 There are at least 5 different kinds of symbols that we can run up against:
56 (1) binds_local_p, small data area.
57 (2) binds_local_p, eg local statics
58 (3) !binds_local_p, eg global variables
59 (4) thread local, local_exec
60 (5) thread local, !local_exec
62 Now, (1) won't appear often in an array context, but it certainly can.
63 All you have to do is set -GN high enough, or explicitly mark any
64 random object __attribute__((section (".sdata"))).
66 All of these affect whether or not a symbol is in fact a valid address.
67 The only one tested here is (3). And that result may very well
68 be incorrect for (4) or (5).
70 An incorrect result here does not cause incorrect results out the
71 back end, because the expander in expr.c validizes the address. However
72 it would be nice to improve the handling here in order to produce more
73 precise results. */
75 /* A "template" for memory address, used to determine whether the address is
76 valid for mode. */
78 struct GTY (()) mem_addr_template {
79 rtx ref; /* The template. */
80 rtx * GTY ((skip)) step_p; /* The point in template where the step should be
81 filled in. */
82 rtx * GTY ((skip)) off_p; /* The point in template where the offset should
83 be filled in. */
87 /* The templates. Each of the low five bits of the index corresponds to one
88 component of TARGET_MEM_REF being present, while the high bits identify
89 the address space. See TEMPL_IDX. */
91 static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
93 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
94 (((int) (AS) << 5) \
95 | ((SYMBOL != 0) << 4) \
96 | ((BASE != 0) << 3) \
97 | ((INDEX != 0) << 2) \
98 | ((STEP != 0) << 1) \
99 | (OFFSET != 0))
101 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
102 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
103 to where step is placed to *STEP_P and offset to *OFFSET_P. */
105 static void
106 gen_addr_rtx (machine_mode address_mode,
107 rtx symbol, rtx base, rtx index, rtx step, rtx offset,
108 rtx *addr, rtx **step_p, rtx **offset_p)
110 rtx act_elem;
112 *addr = NULL_RTX;
113 if (step_p)
114 *step_p = NULL;
115 if (offset_p)
116 *offset_p = NULL;
118 if (index)
120 act_elem = index;
121 if (step)
123 act_elem = gen_rtx_MULT (address_mode, act_elem, step);
125 if (step_p)
126 *step_p = &XEXP (act_elem, 1);
129 *addr = act_elem;
132 if (base && base != const0_rtx)
134 if (*addr)
135 *addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
136 else
137 *addr = base;
140 if (symbol)
142 act_elem = symbol;
143 if (offset)
145 act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
147 if (offset_p)
148 *offset_p = &XEXP (act_elem, 1);
150 if (GET_CODE (symbol) == SYMBOL_REF
151 || GET_CODE (symbol) == LABEL_REF
152 || GET_CODE (symbol) == CONST)
153 act_elem = gen_rtx_CONST (address_mode, act_elem);
156 if (*addr)
157 *addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
158 else
159 *addr = act_elem;
161 else if (offset)
163 if (*addr)
165 *addr = gen_rtx_PLUS (address_mode, *addr, offset);
166 if (offset_p)
167 *offset_p = &XEXP (*addr, 1);
169 else
171 *addr = offset;
172 if (offset_p)
173 *offset_p = addr;
177 if (!*addr)
178 *addr = const0_rtx;
181 /* Description of a memory address. */
183 struct mem_address
185 tree symbol, base, index, step, offset;
188 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
189 in address space AS.
190 If REALLY_EXPAND is false, just make fake registers instead
191 of really expanding the operands, and perform the expansion in-place
192 by using one of the "templates". */
195 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
196 bool really_expand)
198 machine_mode address_mode = targetm.addr_space.address_mode (as);
199 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
200 rtx address, sym, bse, idx, st, off;
201 struct mem_addr_template *templ;
203 if (addr->step && !integer_onep (addr->step))
204 st = immed_wide_int_const (addr->step, pointer_mode);
205 else
206 st = NULL_RTX;
208 if (addr->offset && !integer_zerop (addr->offset))
210 offset_int dc = offset_int::from (addr->offset, SIGNED);
211 off = immed_wide_int_const (dc, pointer_mode);
213 else
214 off = NULL_RTX;
216 if (!really_expand)
218 unsigned int templ_index
219 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
221 if (templ_index >= vec_safe_length (mem_addr_template_list))
222 vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
224 /* Reuse the templates for addresses, so that we do not waste memory. */
225 templ = &(*mem_addr_template_list)[templ_index];
226 if (!templ->ref)
228 sym = (addr->symbol ?
229 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
230 : NULL_RTX);
231 bse = (addr->base ?
232 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
233 : NULL_RTX);
234 idx = (addr->index ?
235 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
236 : NULL_RTX);
238 gen_addr_rtx (pointer_mode, sym, bse, idx,
239 st? const0_rtx : NULL_RTX,
240 off? const0_rtx : NULL_RTX,
241 &templ->ref,
242 &templ->step_p,
243 &templ->off_p);
246 if (st)
247 *templ->step_p = st;
248 if (off)
249 *templ->off_p = off;
251 return templ->ref;
254 /* Otherwise really expand the expressions. */
255 sym = (addr->symbol
256 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
257 : NULL_RTX);
258 bse = (addr->base
259 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
260 : NULL_RTX);
261 idx = (addr->index
262 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
263 : NULL_RTX);
265 gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
266 if (pointer_mode != address_mode)
267 address = convert_memory_address (address_mode, address);
268 return address;
271 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
272 the mem_address structure. */
275 addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
277 struct mem_address addr;
278 get_address_description (exp, &addr);
279 return addr_for_mem_ref (&addr, as, really_expand);
282 /* Returns address of MEM_REF in TYPE. */
284 tree
285 tree_mem_ref_addr (tree type, tree mem_ref)
287 tree addr;
288 tree act_elem;
289 tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
290 tree addr_base = NULL_TREE, addr_off = NULL_TREE;
292 addr_base = fold_convert (type, TMR_BASE (mem_ref));
294 act_elem = TMR_INDEX (mem_ref);
295 if (act_elem)
297 if (step)
298 act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
299 act_elem, step);
300 addr_off = act_elem;
303 act_elem = TMR_INDEX2 (mem_ref);
304 if (act_elem)
306 if (addr_off)
307 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
308 addr_off, act_elem);
309 else
310 addr_off = act_elem;
313 if (offset && !integer_zerop (offset))
315 if (addr_off)
316 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
317 fold_convert (TREE_TYPE (addr_off), offset));
318 else
319 addr_off = offset;
322 if (addr_off)
323 addr = fold_build_pointer_plus (addr_base, addr_off);
324 else
325 addr = addr_base;
327 return addr;
330 /* Returns true if a memory reference in MODE and with parameters given by
331 ADDR is valid on the current target. */
333 static bool
334 valid_mem_ref_p (machine_mode mode, addr_space_t as,
335 struct mem_address *addr)
337 rtx address;
339 address = addr_for_mem_ref (addr, as, false);
340 if (!address)
341 return false;
343 return memory_address_addr_space_p (mode, address, as);
346 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
347 is valid on the current target and if so, creates and returns the
348 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
350 static tree
351 create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
352 bool verify)
354 tree base, index2;
356 if (verify
357 && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
358 return NULL_TREE;
360 if (addr->step && integer_onep (addr->step))
361 addr->step = NULL_TREE;
363 if (addr->offset)
364 addr->offset = fold_convert (alias_ptr_type, addr->offset);
365 else
366 addr->offset = build_int_cst (alias_ptr_type, 0);
368 if (addr->symbol)
370 base = addr->symbol;
371 index2 = addr->base;
373 else if (addr->base
374 && POINTER_TYPE_P (TREE_TYPE (addr->base)))
376 base = addr->base;
377 index2 = NULL_TREE;
379 else
381 base = build_int_cst (build_pointer_type (type), 0);
382 index2 = addr->base;
385 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
386 ??? As IVOPTs does not follow restrictions to where the base
387 pointer may point to create a MEM_REF only if we know that
388 base is valid. */
389 if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
390 && (!index2 || integer_zerop (index2))
391 && (!addr->index || integer_zerop (addr->index)))
392 return fold_build2 (MEM_REF, type, base, addr->offset);
394 return build5 (TARGET_MEM_REF, type,
395 base, addr->offset, addr->index, addr->step, index2);
398 /* Returns true if OBJ is an object whose address is a link time constant. */
400 static bool
401 fixed_address_object_p (tree obj)
403 return (VAR_P (obj)
404 && (TREE_STATIC (obj) || DECL_EXTERNAL (obj))
405 && ! DECL_DLLIMPORT_P (obj));
408 /* If ADDR contains an address of object that is a link time constant,
409 move it to PARTS->symbol. */
411 static void
412 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
414 unsigned i;
415 tree val = NULL_TREE;
417 for (i = 0; i < addr->n; i++)
419 if (addr->elts[i].coef != 1)
420 continue;
422 val = addr->elts[i].val;
423 if (TREE_CODE (val) == ADDR_EXPR
424 && fixed_address_object_p (TREE_OPERAND (val, 0)))
425 break;
428 if (i == addr->n)
429 return;
431 parts->symbol = val;
432 aff_combination_remove_elt (addr, i);
435 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
437 static void
438 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
439 aff_tree *addr)
441 unsigned i;
442 tree val = NULL_TREE;
443 int qual;
445 for (i = 0; i < addr->n; i++)
447 if (addr->elts[i].coef != 1)
448 continue;
450 val = addr->elts[i].val;
451 if (operand_equal_p (val, base_hint, 0))
452 break;
455 if (i == addr->n)
456 return;
458 /* Cast value to appropriate pointer type. We cannot use a pointer
459 to TYPE directly, as the back-end will assume registers of pointer
460 type are aligned, and just the base itself may not actually be.
461 We use void pointer to the type's address space instead. */
462 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
463 type = build_qualified_type (void_type_node, qual);
464 parts->base = fold_convert (build_pointer_type (type), val);
465 aff_combination_remove_elt (addr, i);
468 /* If ADDR contains an address of a dereferenced pointer, move it to
469 PARTS->base. */
471 static void
472 move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
474 unsigned i;
475 tree val = NULL_TREE;
477 for (i = 0; i < addr->n; i++)
479 if (addr->elts[i].coef != 1)
480 continue;
482 val = addr->elts[i].val;
483 if (POINTER_TYPE_P (TREE_TYPE (val)))
484 break;
487 if (i == addr->n)
488 return;
490 parts->base = val;
491 aff_combination_remove_elt (addr, i);
494 /* Moves the loop variant part V in linear address ADDR to be the index
495 of PARTS. */
497 static void
498 move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
500 unsigned i;
501 tree val = NULL_TREE;
503 gcc_assert (!parts->index);
504 for (i = 0; i < addr->n; i++)
506 val = addr->elts[i].val;
507 if (operand_equal_p (val, v, 0))
508 break;
511 if (i == addr->n)
512 return;
514 parts->index = fold_convert (sizetype, val);
515 parts->step = wide_int_to_tree (sizetype, addr->elts[i].coef);
516 aff_combination_remove_elt (addr, i);
519 /* Adds ELT to PARTS. */
521 static void
522 add_to_parts (struct mem_address *parts, tree elt)
524 tree type;
526 if (!parts->index)
528 parts->index = fold_convert (sizetype, elt);
529 return;
532 if (!parts->base)
534 parts->base = elt;
535 return;
538 /* Add ELT to base. */
539 type = TREE_TYPE (parts->base);
540 if (POINTER_TYPE_P (type))
541 parts->base = fold_build_pointer_plus (parts->base, elt);
542 else
543 parts->base = fold_build2 (PLUS_EXPR, type,
544 parts->base, elt);
547 /* Finds the most expensive multiplication in ADDR that can be
548 expressed in an addressing mode and move the corresponding
549 element(s) to PARTS. */
551 static void
552 most_expensive_mult_to_index (tree type, struct mem_address *parts,
553 aff_tree *addr, bool speed)
555 addr_space_t as = TYPE_ADDR_SPACE (type);
556 machine_mode address_mode = targetm.addr_space.address_mode (as);
557 HOST_WIDE_INT coef;
558 unsigned best_mult_cost = 0, acost;
559 tree mult_elt = NULL_TREE, elt;
560 unsigned i, j;
561 enum tree_code op_code;
563 offset_int best_mult = 0;
564 for (i = 0; i < addr->n; i++)
566 if (!wi::fits_shwi_p (addr->elts[i].coef))
567 continue;
569 coef = addr->elts[i].coef.to_shwi ();
570 if (coef == 1
571 || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
572 continue;
574 acost = mult_by_coeff_cost (coef, address_mode, speed);
576 if (acost > best_mult_cost)
578 best_mult_cost = acost;
579 best_mult = offset_int::from (addr->elts[i].coef, SIGNED);
583 if (!best_mult_cost)
584 return;
586 /* Collect elements multiplied by best_mult. */
587 for (i = j = 0; i < addr->n; i++)
589 offset_int amult = offset_int::from (addr->elts[i].coef, SIGNED);
590 offset_int amult_neg = -wi::sext (amult, TYPE_PRECISION (addr->type));
592 if (amult == best_mult)
593 op_code = PLUS_EXPR;
594 else if (amult_neg == best_mult)
595 op_code = MINUS_EXPR;
596 else
598 addr->elts[j] = addr->elts[i];
599 j++;
600 continue;
603 elt = fold_convert (sizetype, addr->elts[i].val);
604 if (mult_elt)
605 mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
606 else if (op_code == PLUS_EXPR)
607 mult_elt = elt;
608 else
609 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
611 addr->n = j;
613 parts->index = mult_elt;
614 parts->step = wide_int_to_tree (sizetype, best_mult);
617 /* Splits address ADDR for a memory access of type TYPE into PARTS.
618 If BASE_HINT is non-NULL, it specifies an SSA name to be used
619 preferentially as base of the reference, and IV_CAND is the selected
620 iv candidate used in ADDR.
622 TODO -- be more clever about the distribution of the elements of ADDR
623 to PARTS. Some architectures do not support anything but single
624 register in address, possibly with a small integer offset; while
625 create_mem_ref will simplify the address to an acceptable shape
626 later, it would be more efficient to know that asking for complicated
627 addressing modes is useless. */
629 static void
630 addr_to_parts (tree type, aff_tree *addr, tree iv_cand,
631 tree base_hint, struct mem_address *parts,
632 bool speed)
634 tree part;
635 unsigned i;
637 parts->symbol = NULL_TREE;
638 parts->base = NULL_TREE;
639 parts->index = NULL_TREE;
640 parts->step = NULL_TREE;
642 if (addr->offset != 0)
643 parts->offset = wide_int_to_tree (sizetype, addr->offset);
644 else
645 parts->offset = NULL_TREE;
647 /* Try to find a symbol. */
648 move_fixed_address_to_symbol (parts, addr);
650 /* No need to do address parts reassociation if the number of parts
651 is <= 2 -- in that case, no loop invariant code motion can be
652 exposed. */
654 if (!base_hint && (addr->n > 2))
655 move_variant_to_index (parts, addr, iv_cand);
657 /* First move the most expensive feasible multiplication
658 to index. */
659 if (!parts->index)
660 most_expensive_mult_to_index (type, parts, addr, speed);
662 /* Try to find a base of the reference. Since at the moment
663 there is no reliable way how to distinguish between pointer and its
664 offset, this is just a guess. */
665 if (!parts->symbol && base_hint)
666 move_hint_to_base (type, parts, base_hint, addr);
667 if (!parts->symbol && !parts->base)
668 move_pointer_to_base (parts, addr);
670 /* Then try to process the remaining elements. */
671 for (i = 0; i < addr->n; i++)
673 part = fold_convert (sizetype, addr->elts[i].val);
674 if (addr->elts[i].coef != 1)
675 part = fold_build2 (MULT_EXPR, sizetype, part,
676 wide_int_to_tree (sizetype, addr->elts[i].coef));
677 add_to_parts (parts, part);
679 if (addr->rest)
680 add_to_parts (parts, fold_convert (sizetype, addr->rest));
683 /* Force the PARTS to register. */
685 static void
686 gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
688 if (parts->base)
689 parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
690 is_gimple_mem_ref_addr, NULL_TREE,
691 true, GSI_SAME_STMT);
692 if (parts->index)
693 parts->index = force_gimple_operand_gsi (gsi, parts->index,
694 true, NULL_TREE,
695 true, GSI_SAME_STMT);
698 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
699 computations are emitted in front of GSI. TYPE is the mode
700 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
701 and BASE_HINT is non NULL if IV_CAND comes from a base address
702 object. */
704 tree
705 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
706 tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
708 tree mem_ref, tmp;
709 struct mem_address parts;
711 addr_to_parts (type, addr, iv_cand, base_hint, &parts, speed);
712 gimplify_mem_ref_parts (gsi, &parts);
713 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
714 if (mem_ref)
715 return mem_ref;
717 /* The expression is too complicated. Try making it simpler. */
719 if (parts.step && !integer_onep (parts.step))
721 /* Move the multiplication to index. */
722 gcc_assert (parts.index);
723 parts.index = force_gimple_operand_gsi (gsi,
724 fold_build2 (MULT_EXPR, sizetype,
725 parts.index, parts.step),
726 true, NULL_TREE, true, GSI_SAME_STMT);
727 parts.step = NULL_TREE;
729 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
730 if (mem_ref)
731 return mem_ref;
734 if (parts.symbol)
736 tmp = parts.symbol;
737 gcc_assert (is_gimple_val (tmp));
739 /* Add the symbol to base, eventually forcing it to register. */
740 if (parts.base)
742 gcc_assert (useless_type_conversion_p
743 (sizetype, TREE_TYPE (parts.base)));
745 if (parts.index)
747 parts.base = force_gimple_operand_gsi_1 (gsi,
748 fold_build_pointer_plus (tmp, parts.base),
749 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
751 else
753 parts.index = parts.base;
754 parts.base = tmp;
757 else
758 parts.base = tmp;
759 parts.symbol = NULL_TREE;
761 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
762 if (mem_ref)
763 return mem_ref;
766 if (parts.index)
768 /* Add index to base. */
769 if (parts.base)
771 parts.base = force_gimple_operand_gsi_1 (gsi,
772 fold_build_pointer_plus (parts.base, parts.index),
773 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
775 else
776 parts.base = parts.index;
777 parts.index = NULL_TREE;
779 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
780 if (mem_ref)
781 return mem_ref;
784 if (parts.offset && !integer_zerop (parts.offset))
786 /* Try adding offset to base. */
787 if (parts.base)
789 parts.base = force_gimple_operand_gsi_1 (gsi,
790 fold_build_pointer_plus (parts.base, parts.offset),
791 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
793 else
794 parts.base = parts.offset;
796 parts.offset = NULL_TREE;
798 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
799 if (mem_ref)
800 return mem_ref;
803 /* Verify that the address is in the simplest possible shape
804 (only a register). If we cannot create such a memory reference,
805 something is really wrong. */
806 gcc_assert (parts.symbol == NULL_TREE);
807 gcc_assert (parts.index == NULL_TREE);
808 gcc_assert (!parts.step || integer_onep (parts.step));
809 gcc_assert (!parts.offset || integer_zerop (parts.offset));
810 gcc_unreachable ();
813 /* Copies components of the address from OP to ADDR. */
815 void
816 get_address_description (tree op, struct mem_address *addr)
818 if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
820 addr->symbol = TMR_BASE (op);
821 addr->base = TMR_INDEX2 (op);
823 else
825 addr->symbol = NULL_TREE;
826 if (TMR_INDEX2 (op))
828 gcc_assert (integer_zerop (TMR_BASE (op)));
829 addr->base = TMR_INDEX2 (op);
831 else
832 addr->base = TMR_BASE (op);
834 addr->index = TMR_INDEX (op);
835 addr->step = TMR_STEP (op);
836 addr->offset = TMR_OFFSET (op);
839 /* Copies the reference information from OLD_REF to NEW_REF, where
840 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
842 void
843 copy_ref_info (tree new_ref, tree old_ref)
845 tree new_ptr_base = NULL_TREE;
847 gcc_assert (TREE_CODE (new_ref) == MEM_REF
848 || TREE_CODE (new_ref) == TARGET_MEM_REF);
850 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
851 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
853 new_ptr_base = TREE_OPERAND (new_ref, 0);
855 /* We can transfer points-to information from an old pointer
856 or decl base to the new one. */
857 if (new_ptr_base
858 && TREE_CODE (new_ptr_base) == SSA_NAME
859 && !SSA_NAME_PTR_INFO (new_ptr_base))
861 tree base = get_base_address (old_ref);
862 if (!base)
864 else if ((TREE_CODE (base) == MEM_REF
865 || TREE_CODE (base) == TARGET_MEM_REF)
866 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
867 && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
869 struct ptr_info_def *new_pi;
870 unsigned int align, misalign;
872 duplicate_ssa_name_ptr_info
873 (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
874 new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
875 /* We have to be careful about transferring alignment information. */
876 if (get_ptr_info_alignment (new_pi, &align, &misalign)
877 && TREE_CODE (old_ref) == MEM_REF
878 && !(TREE_CODE (new_ref) == TARGET_MEM_REF
879 && (TMR_INDEX2 (new_ref)
880 /* TODO: Below conditions can be relaxed if TMR_INDEX
881 is an indcution variable and its initial value and
882 step are aligned. */
883 || (TMR_INDEX (new_ref) && !TMR_STEP (new_ref))
884 || (TMR_STEP (new_ref)
885 && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
886 < align)))))
888 unsigned int inc = (mem_ref_offset (old_ref).to_short_addr ()
889 - mem_ref_offset (new_ref).to_short_addr ());
890 adjust_ptr_info_misalignment (new_pi, inc);
892 else
893 mark_ptr_info_alignment_unknown (new_pi);
895 else if (VAR_P (base)
896 || TREE_CODE (base) == PARM_DECL
897 || TREE_CODE (base) == RESULT_DECL)
899 struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
900 pt_solution_set_var (&pi->pt, base);
905 /* Move constants in target_mem_ref REF to offset. Returns the new target
906 mem ref if anything changes, NULL_TREE otherwise. */
908 tree
909 maybe_fold_tmr (tree ref)
911 struct mem_address addr;
912 bool changed = false;
913 tree new_ref, off;
915 get_address_description (ref, &addr);
917 if (addr.base
918 && TREE_CODE (addr.base) == INTEGER_CST
919 && !integer_zerop (addr.base))
921 addr.offset = fold_binary_to_constant (PLUS_EXPR,
922 TREE_TYPE (addr.offset),
923 addr.offset, addr.base);
924 addr.base = NULL_TREE;
925 changed = true;
928 if (addr.symbol
929 && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
931 addr.offset = fold_binary_to_constant
932 (PLUS_EXPR, TREE_TYPE (addr.offset),
933 addr.offset,
934 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
935 addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
936 changed = true;
938 else if (addr.symbol
939 && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
941 HOST_WIDE_INT offset;
942 addr.symbol = build_fold_addr_expr
943 (get_addr_base_and_unit_offset
944 (TREE_OPERAND (addr.symbol, 0), &offset));
945 addr.offset = int_const_binop (PLUS_EXPR,
946 addr.offset, size_int (offset));
947 changed = true;
950 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
952 off = addr.index;
953 if (addr.step)
955 off = fold_binary_to_constant (MULT_EXPR, sizetype,
956 off, addr.step);
957 addr.step = NULL_TREE;
960 addr.offset = fold_binary_to_constant (PLUS_EXPR,
961 TREE_TYPE (addr.offset),
962 addr.offset, off);
963 addr.index = NULL_TREE;
964 changed = true;
967 if (!changed)
968 return NULL_TREE;
970 /* If we have propagated something into this TARGET_MEM_REF and thus
971 ended up folding it, always create a new TARGET_MEM_REF regardless
972 if it is valid in this for on the target - the propagation result
973 wouldn't be anyway. */
974 new_ref = create_mem_ref_raw (TREE_TYPE (ref),
975 TREE_TYPE (addr.offset), &addr, false);
976 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
977 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
978 return new_ref;
981 /* Dump PARTS to FILE. */
983 extern void dump_mem_address (FILE *, struct mem_address *);
984 void
985 dump_mem_address (FILE *file, struct mem_address *parts)
987 if (parts->symbol)
989 fprintf (file, "symbol: ");
990 print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
991 fprintf (file, "\n");
993 if (parts->base)
995 fprintf (file, "base: ");
996 print_generic_expr (file, parts->base, TDF_SLIM);
997 fprintf (file, "\n");
999 if (parts->index)
1001 fprintf (file, "index: ");
1002 print_generic_expr (file, parts->index, TDF_SLIM);
1003 fprintf (file, "\n");
1005 if (parts->step)
1007 fprintf (file, "step: ");
1008 print_generic_expr (file, parts->step, TDF_SLIM);
1009 fprintf (file, "\n");
1011 if (parts->offset)
1013 fprintf (file, "offset: ");
1014 print_generic_expr (file, parts->offset, TDF_SLIM);
1015 fprintf (file, "\n");
1019 #include "gt-tree-ssa-address.h"