Remove assert in get_def_bb_for_const
[official-gcc.git] / gcc / tree-ssa-address.c
blobb04545c2ed55a6908d2ef61b244cfdd7dcf89d02
1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004-2016 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "stringpool.h"
32 #include "tree-ssanames.h"
33 #include "expmed.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "tree-pretty-print.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "gimple-iterator.h"
40 #include "gimplify-me.h"
41 #include "tree-ssa-loop-ivopts.h"
42 #include "expr.h"
43 #include "tree-dfa.h"
44 #include "dumpfile.h"
45 #include "tree-affine.h"
47 /* FIXME: We compute address costs using RTL. */
48 #include "tree-ssa-address.h"
50 /* TODO -- handling of symbols (according to Richard Hendersons
51 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
53 There are at least 5 different kinds of symbols that we can run up against:
55 (1) binds_local_p, small data area.
56 (2) binds_local_p, eg local statics
57 (3) !binds_local_p, eg global variables
58 (4) thread local, local_exec
59 (5) thread local, !local_exec
61 Now, (1) won't appear often in an array context, but it certainly can.
62 All you have to do is set -GN high enough, or explicitly mark any
63 random object __attribute__((section (".sdata"))).
65 All of these affect whether or not a symbol is in fact a valid address.
66 The only one tested here is (3). And that result may very well
67 be incorrect for (4) or (5).
69 An incorrect result here does not cause incorrect results out the
70 back end, because the expander in expr.c validizes the address. However
71 it would be nice to improve the handling here in order to produce more
72 precise results. */
74 /* A "template" for memory address, used to determine whether the address is
75 valid for mode. */
77 struct GTY (()) mem_addr_template {
78 rtx ref; /* The template. */
79 rtx * GTY ((skip)) step_p; /* The point in template where the step should be
80 filled in. */
81 rtx * GTY ((skip)) off_p; /* The point in template where the offset should
82 be filled in. */
86 /* The templates. Each of the low five bits of the index corresponds to one
87 component of TARGET_MEM_REF being present, while the high bits identify
88 the address space. See TEMPL_IDX. */
90 static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
92 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
93 (((int) (AS) << 5) \
94 | ((SYMBOL != 0) << 4) \
95 | ((BASE != 0) << 3) \
96 | ((INDEX != 0) << 2) \
97 | ((STEP != 0) << 1) \
98 | (OFFSET != 0))
100 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
101 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
102 to where step is placed to *STEP_P and offset to *OFFSET_P. */
104 static void
105 gen_addr_rtx (machine_mode address_mode,
106 rtx symbol, rtx base, rtx index, rtx step, rtx offset,
107 rtx *addr, rtx **step_p, rtx **offset_p)
109 rtx act_elem;
111 *addr = NULL_RTX;
112 if (step_p)
113 *step_p = NULL;
114 if (offset_p)
115 *offset_p = NULL;
117 if (index)
119 act_elem = index;
120 if (step)
122 act_elem = gen_rtx_MULT (address_mode, act_elem, step);
124 if (step_p)
125 *step_p = &XEXP (act_elem, 1);
128 *addr = act_elem;
131 if (base && base != const0_rtx)
133 if (*addr)
134 *addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
135 else
136 *addr = base;
139 if (symbol)
141 act_elem = symbol;
142 if (offset)
144 act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
146 if (offset_p)
147 *offset_p = &XEXP (act_elem, 1);
149 if (GET_CODE (symbol) == SYMBOL_REF
150 || GET_CODE (symbol) == LABEL_REF
151 || GET_CODE (symbol) == CONST)
152 act_elem = gen_rtx_CONST (address_mode, act_elem);
155 if (*addr)
156 *addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
157 else
158 *addr = act_elem;
160 else if (offset)
162 if (*addr)
164 *addr = gen_rtx_PLUS (address_mode, *addr, offset);
165 if (offset_p)
166 *offset_p = &XEXP (*addr, 1);
168 else
170 *addr = offset;
171 if (offset_p)
172 *offset_p = addr;
176 if (!*addr)
177 *addr = const0_rtx;
180 /* Description of a memory address. */
182 struct mem_address
184 tree symbol, base, index, step, offset;
187 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
188 in address space AS.
189 If REALLY_EXPAND is false, just make fake registers instead
190 of really expanding the operands, and perform the expansion in-place
191 by using one of the "templates". */
194 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
195 bool really_expand)
197 machine_mode address_mode = targetm.addr_space.address_mode (as);
198 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
199 rtx address, sym, bse, idx, st, off;
200 struct mem_addr_template *templ;
202 if (addr->step && !integer_onep (addr->step))
203 st = immed_wide_int_const (addr->step, pointer_mode);
204 else
205 st = NULL_RTX;
207 if (addr->offset && !integer_zerop (addr->offset))
209 offset_int dc = offset_int::from (addr->offset, SIGNED);
210 off = immed_wide_int_const (dc, pointer_mode);
212 else
213 off = NULL_RTX;
215 if (!really_expand)
217 unsigned int templ_index
218 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
220 if (templ_index >= vec_safe_length (mem_addr_template_list))
221 vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
223 /* Reuse the templates for addresses, so that we do not waste memory. */
224 templ = &(*mem_addr_template_list)[templ_index];
225 if (!templ->ref)
227 sym = (addr->symbol ?
228 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
229 : NULL_RTX);
230 bse = (addr->base ?
231 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
232 : NULL_RTX);
233 idx = (addr->index ?
234 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
235 : NULL_RTX);
237 gen_addr_rtx (pointer_mode, sym, bse, idx,
238 st? const0_rtx : NULL_RTX,
239 off? const0_rtx : NULL_RTX,
240 &templ->ref,
241 &templ->step_p,
242 &templ->off_p);
245 if (st)
246 *templ->step_p = st;
247 if (off)
248 *templ->off_p = off;
250 return templ->ref;
253 /* Otherwise really expand the expressions. */
254 sym = (addr->symbol
255 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
256 : NULL_RTX);
257 bse = (addr->base
258 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
259 : NULL_RTX);
260 idx = (addr->index
261 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
262 : NULL_RTX);
264 gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
265 if (pointer_mode != address_mode)
266 address = convert_memory_address (address_mode, address);
267 return address;
270 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
271 the mem_address structure. */
274 addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
276 struct mem_address addr;
277 get_address_description (exp, &addr);
278 return addr_for_mem_ref (&addr, as, really_expand);
281 /* Returns address of MEM_REF in TYPE. */
283 tree
284 tree_mem_ref_addr (tree type, tree mem_ref)
286 tree addr;
287 tree act_elem;
288 tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
289 tree addr_base = NULL_TREE, addr_off = NULL_TREE;
291 addr_base = fold_convert (type, TMR_BASE (mem_ref));
293 act_elem = TMR_INDEX (mem_ref);
294 if (act_elem)
296 if (step)
297 act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
298 act_elem, step);
299 addr_off = act_elem;
302 act_elem = TMR_INDEX2 (mem_ref);
303 if (act_elem)
305 if (addr_off)
306 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
307 addr_off, act_elem);
308 else
309 addr_off = act_elem;
312 if (offset && !integer_zerop (offset))
314 if (addr_off)
315 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
316 fold_convert (TREE_TYPE (addr_off), offset));
317 else
318 addr_off = offset;
321 if (addr_off)
322 addr = fold_build_pointer_plus (addr_base, addr_off);
323 else
324 addr = addr_base;
326 return addr;
329 /* Returns true if a memory reference in MODE and with parameters given by
330 ADDR is valid on the current target. */
332 static bool
333 valid_mem_ref_p (machine_mode mode, addr_space_t as,
334 struct mem_address *addr)
336 rtx address;
338 address = addr_for_mem_ref (addr, as, false);
339 if (!address)
340 return false;
342 return memory_address_addr_space_p (mode, address, as);
345 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
346 is valid on the current target and if so, creates and returns the
347 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
349 static tree
350 create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
351 bool verify)
353 tree base, index2;
355 if (verify
356 && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
357 return NULL_TREE;
359 if (addr->step && integer_onep (addr->step))
360 addr->step = NULL_TREE;
362 if (addr->offset)
363 addr->offset = fold_convert (alias_ptr_type, addr->offset);
364 else
365 addr->offset = build_int_cst (alias_ptr_type, 0);
367 if (addr->symbol)
369 base = addr->symbol;
370 index2 = addr->base;
372 else if (addr->base
373 && POINTER_TYPE_P (TREE_TYPE (addr->base)))
375 base = addr->base;
376 index2 = NULL_TREE;
378 else
380 base = build_int_cst (build_pointer_type (type), 0);
381 index2 = addr->base;
384 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
385 ??? As IVOPTs does not follow restrictions to where the base
386 pointer may point to create a MEM_REF only if we know that
387 base is valid. */
388 if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
389 && (!index2 || integer_zerop (index2))
390 && (!addr->index || integer_zerop (addr->index)))
391 return fold_build2 (MEM_REF, type, base, addr->offset);
393 return build5 (TARGET_MEM_REF, type,
394 base, addr->offset, addr->index, addr->step, index2);
397 /* Returns true if OBJ is an object whose address is a link time constant. */
399 static bool
400 fixed_address_object_p (tree obj)
402 return (TREE_CODE (obj) == VAR_DECL
403 && (TREE_STATIC (obj)
404 || DECL_EXTERNAL (obj))
405 && ! DECL_DLLIMPORT_P (obj));
408 /* If ADDR contains an address of object that is a link time constant,
409 move it to PARTS->symbol. */
411 static void
412 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
414 unsigned i;
415 tree val = NULL_TREE;
417 for (i = 0; i < addr->n; i++)
419 if (addr->elts[i].coef != 1)
420 continue;
422 val = addr->elts[i].val;
423 if (TREE_CODE (val) == ADDR_EXPR
424 && fixed_address_object_p (TREE_OPERAND (val, 0)))
425 break;
428 if (i == addr->n)
429 return;
431 parts->symbol = val;
432 aff_combination_remove_elt (addr, i);
435 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
437 static void
438 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
439 aff_tree *addr)
441 unsigned i;
442 tree val = NULL_TREE;
443 int qual;
445 for (i = 0; i < addr->n; i++)
447 if (addr->elts[i].coef != 1)
448 continue;
450 val = addr->elts[i].val;
451 if (operand_equal_p (val, base_hint, 0))
452 break;
455 if (i == addr->n)
456 return;
458 /* Cast value to appropriate pointer type. We cannot use a pointer
459 to TYPE directly, as the back-end will assume registers of pointer
460 type are aligned, and just the base itself may not actually be.
461 We use void pointer to the type's address space instead. */
462 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
463 type = build_qualified_type (void_type_node, qual);
464 parts->base = fold_convert (build_pointer_type (type), val);
465 aff_combination_remove_elt (addr, i);
468 /* If ADDR contains an address of a dereferenced pointer, move it to
469 PARTS->base. */
471 static void
472 move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
474 unsigned i;
475 tree val = NULL_TREE;
477 for (i = 0; i < addr->n; i++)
479 if (addr->elts[i].coef != 1)
480 continue;
482 val = addr->elts[i].val;
483 if (POINTER_TYPE_P (TREE_TYPE (val)))
484 break;
487 if (i == addr->n)
488 return;
490 parts->base = val;
491 aff_combination_remove_elt (addr, i);
494 /* Moves the loop variant part V in linear address ADDR to be the index
495 of PARTS. */
497 static void
498 move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
500 unsigned i;
501 tree val = NULL_TREE;
503 gcc_assert (!parts->index);
504 for (i = 0; i < addr->n; i++)
506 val = addr->elts[i].val;
507 if (operand_equal_p (val, v, 0))
508 break;
511 if (i == addr->n)
512 return;
514 parts->index = fold_convert (sizetype, val);
515 parts->step = wide_int_to_tree (sizetype, addr->elts[i].coef);
516 aff_combination_remove_elt (addr, i);
519 /* Adds ELT to PARTS. */
521 static void
522 add_to_parts (struct mem_address *parts, tree elt)
524 tree type;
526 if (!parts->index)
528 parts->index = fold_convert (sizetype, elt);
529 return;
532 if (!parts->base)
534 parts->base = elt;
535 return;
538 /* Add ELT to base. */
539 type = TREE_TYPE (parts->base);
540 if (POINTER_TYPE_P (type))
541 parts->base = fold_build_pointer_plus (parts->base, elt);
542 else
543 parts->base = fold_build2 (PLUS_EXPR, type,
544 parts->base, elt);
547 /* Finds the most expensive multiplication in ADDR that can be
548 expressed in an addressing mode and move the corresponding
549 element(s) to PARTS. */
551 static void
552 most_expensive_mult_to_index (tree type, struct mem_address *parts,
553 aff_tree *addr, bool speed)
555 addr_space_t as = TYPE_ADDR_SPACE (type);
556 machine_mode address_mode = targetm.addr_space.address_mode (as);
557 HOST_WIDE_INT coef;
558 unsigned best_mult_cost = 0, acost;
559 tree mult_elt = NULL_TREE, elt;
560 unsigned i, j;
561 enum tree_code op_code;
563 offset_int best_mult = 0;
564 for (i = 0; i < addr->n; i++)
566 if (!wi::fits_shwi_p (addr->elts[i].coef))
567 continue;
569 coef = addr->elts[i].coef.to_shwi ();
570 if (coef == 1
571 || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
572 continue;
574 acost = mult_by_coeff_cost (coef, address_mode, speed);
576 if (acost > best_mult_cost)
578 best_mult_cost = acost;
579 best_mult = offset_int::from (addr->elts[i].coef, SIGNED);
583 if (!best_mult_cost)
584 return;
586 /* Collect elements multiplied by best_mult. */
587 for (i = j = 0; i < addr->n; i++)
589 offset_int amult = offset_int::from (addr->elts[i].coef, SIGNED);
590 offset_int amult_neg = -wi::sext (amult, TYPE_PRECISION (addr->type));
592 if (amult == best_mult)
593 op_code = PLUS_EXPR;
594 else if (amult_neg == best_mult)
595 op_code = MINUS_EXPR;
596 else
598 addr->elts[j] = addr->elts[i];
599 j++;
600 continue;
603 elt = fold_convert (sizetype, addr->elts[i].val);
604 if (mult_elt)
605 mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
606 else if (op_code == PLUS_EXPR)
607 mult_elt = elt;
608 else
609 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
611 addr->n = j;
613 parts->index = mult_elt;
614 parts->step = wide_int_to_tree (sizetype, best_mult);
617 /* Splits address ADDR for a memory access of type TYPE into PARTS.
618 If BASE_HINT is non-NULL, it specifies an SSA name to be used
619 preferentially as base of the reference, and IV_CAND is the selected
620 iv candidate used in ADDR.
622 TODO -- be more clever about the distribution of the elements of ADDR
623 to PARTS. Some architectures do not support anything but single
624 register in address, possibly with a small integer offset; while
625 create_mem_ref will simplify the address to an acceptable shape
626 later, it would be more efficient to know that asking for complicated
627 addressing modes is useless. */
629 static void
630 addr_to_parts (tree type, aff_tree *addr, tree iv_cand,
631 tree base_hint, struct mem_address *parts,
632 bool speed)
634 tree part;
635 unsigned i;
637 parts->symbol = NULL_TREE;
638 parts->base = NULL_TREE;
639 parts->index = NULL_TREE;
640 parts->step = NULL_TREE;
642 if (addr->offset != 0)
643 parts->offset = wide_int_to_tree (sizetype, addr->offset);
644 else
645 parts->offset = NULL_TREE;
647 /* Try to find a symbol. */
648 move_fixed_address_to_symbol (parts, addr);
650 /* No need to do address parts reassociation if the number of parts
651 is <= 2 -- in that case, no loop invariant code motion can be
652 exposed. */
654 if (!base_hint && (addr->n > 2))
655 move_variant_to_index (parts, addr, iv_cand);
657 /* First move the most expensive feasible multiplication
658 to index. */
659 if (!parts->index)
660 most_expensive_mult_to_index (type, parts, addr, speed);
662 /* Try to find a base of the reference. Since at the moment
663 there is no reliable way how to distinguish between pointer and its
664 offset, this is just a guess. */
665 if (!parts->symbol && base_hint)
666 move_hint_to_base (type, parts, base_hint, addr);
667 if (!parts->symbol && !parts->base)
668 move_pointer_to_base (parts, addr);
670 /* Then try to process the remaining elements. */
671 for (i = 0; i < addr->n; i++)
673 part = fold_convert (sizetype, addr->elts[i].val);
674 if (addr->elts[i].coef != 1)
675 part = fold_build2 (MULT_EXPR, sizetype, part,
676 wide_int_to_tree (sizetype, addr->elts[i].coef));
677 add_to_parts (parts, part);
679 if (addr->rest)
680 add_to_parts (parts, fold_convert (sizetype, addr->rest));
683 /* Force the PARTS to register. */
685 static void
686 gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
688 if (parts->base)
689 parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
690 is_gimple_mem_ref_addr, NULL_TREE,
691 true, GSI_SAME_STMT);
692 if (parts->index)
693 parts->index = force_gimple_operand_gsi (gsi, parts->index,
694 true, NULL_TREE,
695 true, GSI_SAME_STMT);
698 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
699 computations are emitted in front of GSI. TYPE is the mode
700 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
701 and BASE_HINT is non NULL if IV_CAND comes from a base address
702 object. */
704 tree
705 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
706 tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
708 tree mem_ref, tmp;
709 struct mem_address parts;
711 addr_to_parts (type, addr, iv_cand, base_hint, &parts, speed);
712 gimplify_mem_ref_parts (gsi, &parts);
713 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
714 if (mem_ref)
715 return mem_ref;
717 /* The expression is too complicated. Try making it simpler. */
719 if (parts.step && !integer_onep (parts.step))
721 /* Move the multiplication to index. */
722 gcc_assert (parts.index);
723 parts.index = force_gimple_operand_gsi (gsi,
724 fold_build2 (MULT_EXPR, sizetype,
725 parts.index, parts.step),
726 true, NULL_TREE, true, GSI_SAME_STMT);
727 parts.step = NULL_TREE;
729 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
730 if (mem_ref)
731 return mem_ref;
734 if (parts.symbol)
736 tmp = parts.symbol;
737 gcc_assert (is_gimple_val (tmp));
739 /* Add the symbol to base, eventually forcing it to register. */
740 if (parts.base)
742 gcc_assert (useless_type_conversion_p
743 (sizetype, TREE_TYPE (parts.base)));
745 if (parts.index)
747 parts.base = force_gimple_operand_gsi_1 (gsi,
748 fold_build_pointer_plus (tmp, parts.base),
749 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
751 else
753 parts.index = parts.base;
754 parts.base = tmp;
757 else
758 parts.base = tmp;
759 parts.symbol = NULL_TREE;
761 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
762 if (mem_ref)
763 return mem_ref;
766 if (parts.index)
768 /* Add index to base. */
769 if (parts.base)
771 parts.base = force_gimple_operand_gsi_1 (gsi,
772 fold_build_pointer_plus (parts.base, parts.index),
773 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
775 else
776 parts.base = parts.index;
777 parts.index = NULL_TREE;
779 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
780 if (mem_ref)
781 return mem_ref;
784 if (parts.offset && !integer_zerop (parts.offset))
786 /* Try adding offset to base. */
787 if (parts.base)
789 parts.base = force_gimple_operand_gsi_1 (gsi,
790 fold_build_pointer_plus (parts.base, parts.offset),
791 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
793 else
794 parts.base = parts.offset;
796 parts.offset = NULL_TREE;
798 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
799 if (mem_ref)
800 return mem_ref;
803 /* Verify that the address is in the simplest possible shape
804 (only a register). If we cannot create such a memory reference,
805 something is really wrong. */
806 gcc_assert (parts.symbol == NULL_TREE);
807 gcc_assert (parts.index == NULL_TREE);
808 gcc_assert (!parts.step || integer_onep (parts.step));
809 gcc_assert (!parts.offset || integer_zerop (parts.offset));
810 gcc_unreachable ();
813 /* Copies components of the address from OP to ADDR. */
815 void
816 get_address_description (tree op, struct mem_address *addr)
818 if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
820 addr->symbol = TMR_BASE (op);
821 addr->base = TMR_INDEX2 (op);
823 else
825 addr->symbol = NULL_TREE;
826 if (TMR_INDEX2 (op))
828 gcc_assert (integer_zerop (TMR_BASE (op)));
829 addr->base = TMR_INDEX2 (op);
831 else
832 addr->base = TMR_BASE (op);
834 addr->index = TMR_INDEX (op);
835 addr->step = TMR_STEP (op);
836 addr->offset = TMR_OFFSET (op);
839 /* Copies the reference information from OLD_REF to NEW_REF, where
840 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
842 void
843 copy_ref_info (tree new_ref, tree old_ref)
845 tree new_ptr_base = NULL_TREE;
847 gcc_assert (TREE_CODE (new_ref) == MEM_REF
848 || TREE_CODE (new_ref) == TARGET_MEM_REF);
850 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
851 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
853 new_ptr_base = TREE_OPERAND (new_ref, 0);
855 /* We can transfer points-to information from an old pointer
856 or decl base to the new one. */
857 if (new_ptr_base
858 && TREE_CODE (new_ptr_base) == SSA_NAME
859 && !SSA_NAME_PTR_INFO (new_ptr_base))
861 tree base = get_base_address (old_ref);
862 if (!base)
864 else if ((TREE_CODE (base) == MEM_REF
865 || TREE_CODE (base) == TARGET_MEM_REF)
866 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
867 && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
869 struct ptr_info_def *new_pi;
870 unsigned int align, misalign;
872 duplicate_ssa_name_ptr_info
873 (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
874 new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
875 /* We have to be careful about transferring alignment information. */
876 if (get_ptr_info_alignment (new_pi, &align, &misalign)
877 && TREE_CODE (old_ref) == MEM_REF
878 && !(TREE_CODE (new_ref) == TARGET_MEM_REF
879 && (TMR_INDEX2 (new_ref)
880 /* TODO: Below conditions can be relaxed if TMR_INDEX
881 is an indcution variable and its initial value and
882 step are aligned. */
883 || (TMR_INDEX (new_ref) && !TMR_STEP (new_ref))
884 || (TMR_STEP (new_ref)
885 && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
886 < align)))))
888 unsigned int inc = (mem_ref_offset (old_ref).to_short_addr ()
889 - mem_ref_offset (new_ref).to_short_addr ());
890 adjust_ptr_info_misalignment (new_pi, inc);
892 else
893 mark_ptr_info_alignment_unknown (new_pi);
895 else if (TREE_CODE (base) == VAR_DECL
896 || TREE_CODE (base) == PARM_DECL
897 || TREE_CODE (base) == RESULT_DECL)
899 struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
900 pt_solution_set_var (&pi->pt, base);
905 /* Move constants in target_mem_ref REF to offset. Returns the new target
906 mem ref if anything changes, NULL_TREE otherwise. */
908 tree
909 maybe_fold_tmr (tree ref)
911 struct mem_address addr;
912 bool changed = false;
913 tree new_ref, off;
915 get_address_description (ref, &addr);
917 if (addr.base
918 && TREE_CODE (addr.base) == INTEGER_CST
919 && !integer_zerop (addr.base))
921 addr.offset = fold_binary_to_constant (PLUS_EXPR,
922 TREE_TYPE (addr.offset),
923 addr.offset, addr.base);
924 addr.base = NULL_TREE;
925 changed = true;
928 if (addr.symbol
929 && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
931 addr.offset = fold_binary_to_constant
932 (PLUS_EXPR, TREE_TYPE (addr.offset),
933 addr.offset,
934 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
935 addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
936 changed = true;
938 else if (addr.symbol
939 && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
941 HOST_WIDE_INT offset;
942 addr.symbol = build_fold_addr_expr
943 (get_addr_base_and_unit_offset
944 (TREE_OPERAND (addr.symbol, 0), &offset));
945 addr.offset = int_const_binop (PLUS_EXPR,
946 addr.offset, size_int (offset));
947 changed = true;
950 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
952 off = addr.index;
953 if (addr.step)
955 off = fold_binary_to_constant (MULT_EXPR, sizetype,
956 off, addr.step);
957 addr.step = NULL_TREE;
960 addr.offset = fold_binary_to_constant (PLUS_EXPR,
961 TREE_TYPE (addr.offset),
962 addr.offset, off);
963 addr.index = NULL_TREE;
964 changed = true;
967 if (!changed)
968 return NULL_TREE;
970 /* If we have propagated something into this TARGET_MEM_REF and thus
971 ended up folding it, always create a new TARGET_MEM_REF regardless
972 if it is valid in this for on the target - the propagation result
973 wouldn't be anyway. */
974 new_ref = create_mem_ref_raw (TREE_TYPE (ref),
975 TREE_TYPE (addr.offset), &addr, false);
976 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
977 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
978 return new_ref;
981 /* Dump PARTS to FILE. */
983 extern void dump_mem_address (FILE *, struct mem_address *);
984 void
985 dump_mem_address (FILE *file, struct mem_address *parts)
987 if (parts->symbol)
989 fprintf (file, "symbol: ");
990 print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
991 fprintf (file, "\n");
993 if (parts->base)
995 fprintf (file, "base: ");
996 print_generic_expr (file, parts->base, TDF_SLIM);
997 fprintf (file, "\n");
999 if (parts->index)
1001 fprintf (file, "index: ");
1002 print_generic_expr (file, parts->index, TDF_SLIM);
1003 fprintf (file, "\n");
1005 if (parts->step)
1007 fprintf (file, "step: ");
1008 print_generic_expr (file, parts->step, TDF_SLIM);
1009 fprintf (file, "\n");
1011 if (parts->offset)
1013 fprintf (file, "offset: ");
1014 print_generic_expr (file, parts->offset, TDF_SLIM);
1015 fprintf (file, "\n");
1019 #include "gt-tree-ssa-address.h"