* doc/contrib.texi (Contributors): Add Ira Rosen.
[official-gcc.git] / gcc / tree-ssa-address.c
blobd6afa2b8d66ca98efbe77f4b2d3cc3643f4cb459
1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "hash-set.h"
28 #include "machmode.h"
29 #include "vec.h"
30 #include "double-int.h"
31 #include "input.h"
32 #include "alias.h"
33 #include "symtab.h"
34 #include "wide-int.h"
35 #include "inchash.h"
36 #include "tree.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "tm_p.h"
40 #include "predict.h"
41 #include "hard-reg-set.h"
42 #include "function.h"
43 #include "basic-block.h"
44 #include "tree-pretty-print.h"
45 #include "tree-ssa-alias.h"
46 #include "internal-fn.h"
47 #include "gimple-expr.h"
48 #include "is-a.h"
49 #include "gimple.h"
50 #include "gimple-iterator.h"
51 #include "gimplify-me.h"
52 #include "stringpool.h"
53 #include "tree-ssanames.h"
54 #include "tree-ssa-loop-ivopts.h"
55 #include "hashtab.h"
56 #include "rtl.h"
57 #include "flags.h"
58 #include "statistics.h"
59 #include "real.h"
60 #include "fixed-value.h"
61 #include "insn-config.h"
62 #include "expmed.h"
63 #include "dojump.h"
64 #include "explow.h"
65 #include "calls.h"
66 #include "emit-rtl.h"
67 #include "varasm.h"
68 #include "stmt.h"
69 #include "expr.h"
70 #include "tree-dfa.h"
71 #include "dumpfile.h"
72 #include "tree-inline.h"
73 #include "tree-affine.h"
75 /* FIXME: We compute address costs using RTL. */
76 #include "recog.h"
77 #include "target.h"
78 #include "tree-ssa-address.h"
80 /* TODO -- handling of symbols (according to Richard Hendersons
81 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
83 There are at least 5 different kinds of symbols that we can run up against:
85 (1) binds_local_p, small data area.
86 (2) binds_local_p, eg local statics
87 (3) !binds_local_p, eg global variables
88 (4) thread local, local_exec
89 (5) thread local, !local_exec
91 Now, (1) won't appear often in an array context, but it certainly can.
92 All you have to do is set -GN high enough, or explicitly mark any
93 random object __attribute__((section (".sdata"))).
95 All of these affect whether or not a symbol is in fact a valid address.
96 The only one tested here is (3). And that result may very well
97 be incorrect for (4) or (5).
99 An incorrect result here does not cause incorrect results out the
100 back end, because the expander in expr.c validizes the address. However
101 it would be nice to improve the handling here in order to produce more
102 precise results. */
104 /* A "template" for memory address, used to determine whether the address is
105 valid for mode. */
107 typedef struct GTY (()) mem_addr_template {
108 rtx ref; /* The template. */
109 rtx * GTY ((skip)) step_p; /* The point in template where the step should be
110 filled in. */
111 rtx * GTY ((skip)) off_p; /* The point in template where the offset should
112 be filled in. */
113 } mem_addr_template;
116 /* The templates. Each of the low five bits of the index corresponds to one
117 component of TARGET_MEM_REF being present, while the high bits identify
118 the address space. See TEMPL_IDX. */
120 static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
122 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
123 (((int) (AS) << 5) \
124 | ((SYMBOL != 0) << 4) \
125 | ((BASE != 0) << 3) \
126 | ((INDEX != 0) << 2) \
127 | ((STEP != 0) << 1) \
128 | (OFFSET != 0))
130 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
131 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
132 to where step is placed to *STEP_P and offset to *OFFSET_P. */
134 static void
135 gen_addr_rtx (machine_mode address_mode,
136 rtx symbol, rtx base, rtx index, rtx step, rtx offset,
137 rtx *addr, rtx **step_p, rtx **offset_p)
139 rtx act_elem;
141 *addr = NULL_RTX;
142 if (step_p)
143 *step_p = NULL;
144 if (offset_p)
145 *offset_p = NULL;
147 if (index)
149 act_elem = index;
150 if (step)
152 act_elem = gen_rtx_MULT (address_mode, act_elem, step);
154 if (step_p)
155 *step_p = &XEXP (act_elem, 1);
158 *addr = act_elem;
161 if (base && base != const0_rtx)
163 if (*addr)
164 *addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
165 else
166 *addr = base;
169 if (symbol)
171 act_elem = symbol;
172 if (offset)
174 act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
176 if (offset_p)
177 *offset_p = &XEXP (act_elem, 1);
179 if (GET_CODE (symbol) == SYMBOL_REF
180 || GET_CODE (symbol) == LABEL_REF
181 || GET_CODE (symbol) == CONST)
182 act_elem = gen_rtx_CONST (address_mode, act_elem);
185 if (*addr)
186 *addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
187 else
188 *addr = act_elem;
190 else if (offset)
192 if (*addr)
194 *addr = gen_rtx_PLUS (address_mode, *addr, offset);
195 if (offset_p)
196 *offset_p = &XEXP (*addr, 1);
198 else
200 *addr = offset;
201 if (offset_p)
202 *offset_p = addr;
206 if (!*addr)
207 *addr = const0_rtx;
210 /* Description of a memory address. */
212 struct mem_address
214 tree symbol, base, index, step, offset;
217 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
218 in address space AS.
219 If REALLY_EXPAND is false, just make fake registers instead
220 of really expanding the operands, and perform the expansion in-place
221 by using one of the "templates". */
224 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
225 bool really_expand)
227 machine_mode address_mode = targetm.addr_space.address_mode (as);
228 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
229 rtx address, sym, bse, idx, st, off;
230 struct mem_addr_template *templ;
232 if (addr->step && !integer_onep (addr->step))
233 st = immed_wide_int_const (addr->step, pointer_mode);
234 else
235 st = NULL_RTX;
237 if (addr->offset && !integer_zerop (addr->offset))
239 offset_int dc = offset_int::from (addr->offset, SIGNED);
240 off = immed_wide_int_const (dc, pointer_mode);
242 else
243 off = NULL_RTX;
245 if (!really_expand)
247 unsigned int templ_index
248 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
250 if (templ_index >= vec_safe_length (mem_addr_template_list))
251 vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
253 /* Reuse the templates for addresses, so that we do not waste memory. */
254 templ = &(*mem_addr_template_list)[templ_index];
255 if (!templ->ref)
257 sym = (addr->symbol ?
258 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
259 : NULL_RTX);
260 bse = (addr->base ?
261 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
262 : NULL_RTX);
263 idx = (addr->index ?
264 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
265 : NULL_RTX);
267 gen_addr_rtx (pointer_mode, sym, bse, idx,
268 st? const0_rtx : NULL_RTX,
269 off? const0_rtx : NULL_RTX,
270 &templ->ref,
271 &templ->step_p,
272 &templ->off_p);
275 if (st)
276 *templ->step_p = st;
277 if (off)
278 *templ->off_p = off;
280 return templ->ref;
283 /* Otherwise really expand the expressions. */
284 sym = (addr->symbol
285 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
286 : NULL_RTX);
287 bse = (addr->base
288 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
289 : NULL_RTX);
290 idx = (addr->index
291 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
292 : NULL_RTX);
294 gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
295 if (pointer_mode != address_mode)
296 address = convert_memory_address (address_mode, address);
297 return address;
300 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
301 the mem_address structure. */
304 addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
306 struct mem_address addr;
307 get_address_description (exp, &addr);
308 return addr_for_mem_ref (&addr, as, really_expand);
311 /* Returns address of MEM_REF in TYPE. */
313 tree
314 tree_mem_ref_addr (tree type, tree mem_ref)
316 tree addr;
317 tree act_elem;
318 tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
319 tree addr_base = NULL_TREE, addr_off = NULL_TREE;
321 addr_base = fold_convert (type, TMR_BASE (mem_ref));
323 act_elem = TMR_INDEX (mem_ref);
324 if (act_elem)
326 if (step)
327 act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
328 act_elem, step);
329 addr_off = act_elem;
332 act_elem = TMR_INDEX2 (mem_ref);
333 if (act_elem)
335 if (addr_off)
336 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
337 addr_off, act_elem);
338 else
339 addr_off = act_elem;
342 if (offset && !integer_zerop (offset))
344 if (addr_off)
345 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
346 fold_convert (TREE_TYPE (addr_off), offset));
347 else
348 addr_off = offset;
351 if (addr_off)
352 addr = fold_build_pointer_plus (addr_base, addr_off);
353 else
354 addr = addr_base;
356 return addr;
359 /* Returns true if a memory reference in MODE and with parameters given by
360 ADDR is valid on the current target. */
362 static bool
363 valid_mem_ref_p (machine_mode mode, addr_space_t as,
364 struct mem_address *addr)
366 rtx address;
368 address = addr_for_mem_ref (addr, as, false);
369 if (!address)
370 return false;
372 return memory_address_addr_space_p (mode, address, as);
375 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
376 is valid on the current target and if so, creates and returns the
377 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
379 static tree
380 create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
381 bool verify)
383 tree base, index2;
385 if (verify
386 && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
387 return NULL_TREE;
389 if (addr->step && integer_onep (addr->step))
390 addr->step = NULL_TREE;
392 if (addr->offset)
393 addr->offset = fold_convert (alias_ptr_type, addr->offset);
394 else
395 addr->offset = build_int_cst (alias_ptr_type, 0);
397 if (addr->symbol)
399 base = addr->symbol;
400 index2 = addr->base;
402 else if (addr->base
403 && POINTER_TYPE_P (TREE_TYPE (addr->base)))
405 base = addr->base;
406 index2 = NULL_TREE;
408 else
410 base = build_int_cst (ptr_type_node, 0);
411 index2 = addr->base;
414 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
415 ??? As IVOPTs does not follow restrictions to where the base
416 pointer may point to create a MEM_REF only if we know that
417 base is valid. */
418 if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
419 && (!index2 || integer_zerop (index2))
420 && (!addr->index || integer_zerop (addr->index)))
421 return fold_build2 (MEM_REF, type, base, addr->offset);
423 return build5 (TARGET_MEM_REF, type,
424 base, addr->offset, addr->index, addr->step, index2);
427 /* Returns true if OBJ is an object whose address is a link time constant. */
429 static bool
430 fixed_address_object_p (tree obj)
432 return (TREE_CODE (obj) == VAR_DECL
433 && (TREE_STATIC (obj)
434 || DECL_EXTERNAL (obj))
435 && ! DECL_DLLIMPORT_P (obj));
438 /* If ADDR contains an address of object that is a link time constant,
439 move it to PARTS->symbol. */
441 static void
442 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
444 unsigned i;
445 tree val = NULL_TREE;
447 for (i = 0; i < addr->n; i++)
449 if (addr->elts[i].coef != 1)
450 continue;
452 val = addr->elts[i].val;
453 if (TREE_CODE (val) == ADDR_EXPR
454 && fixed_address_object_p (TREE_OPERAND (val, 0)))
455 break;
458 if (i == addr->n)
459 return;
461 parts->symbol = val;
462 aff_combination_remove_elt (addr, i);
465 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
467 static void
468 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
469 aff_tree *addr)
471 unsigned i;
472 tree val = NULL_TREE;
473 int qual;
475 for (i = 0; i < addr->n; i++)
477 if (addr->elts[i].coef != 1)
478 continue;
480 val = addr->elts[i].val;
481 if (operand_equal_p (val, base_hint, 0))
482 break;
485 if (i == addr->n)
486 return;
488 /* Cast value to appropriate pointer type. We cannot use a pointer
489 to TYPE directly, as the back-end will assume registers of pointer
490 type are aligned, and just the base itself may not actually be.
491 We use void pointer to the type's address space instead. */
492 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
493 type = build_qualified_type (void_type_node, qual);
494 parts->base = fold_convert (build_pointer_type (type), val);
495 aff_combination_remove_elt (addr, i);
498 /* If ADDR contains an address of a dereferenced pointer, move it to
499 PARTS->base. */
501 static void
502 move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
504 unsigned i;
505 tree val = NULL_TREE;
507 for (i = 0; i < addr->n; i++)
509 if (addr->elts[i].coef != 1)
510 continue;
512 val = addr->elts[i].val;
513 if (POINTER_TYPE_P (TREE_TYPE (val)))
514 break;
517 if (i == addr->n)
518 return;
520 parts->base = val;
521 aff_combination_remove_elt (addr, i);
524 /* Moves the loop variant part V in linear address ADDR to be the index
525 of PARTS. */
527 static void
528 move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
530 unsigned i;
531 tree val = NULL_TREE;
533 gcc_assert (!parts->index);
534 for (i = 0; i < addr->n; i++)
536 val = addr->elts[i].val;
537 if (operand_equal_p (val, v, 0))
538 break;
541 if (i == addr->n)
542 return;
544 parts->index = fold_convert (sizetype, val);
545 parts->step = wide_int_to_tree (sizetype, addr->elts[i].coef);
546 aff_combination_remove_elt (addr, i);
549 /* Adds ELT to PARTS. */
551 static void
552 add_to_parts (struct mem_address *parts, tree elt)
554 tree type;
556 if (!parts->index)
558 parts->index = fold_convert (sizetype, elt);
559 return;
562 if (!parts->base)
564 parts->base = elt;
565 return;
568 /* Add ELT to base. */
569 type = TREE_TYPE (parts->base);
570 if (POINTER_TYPE_P (type))
571 parts->base = fold_build_pointer_plus (parts->base, elt);
572 else
573 parts->base = fold_build2 (PLUS_EXPR, type,
574 parts->base, elt);
577 /* Finds the most expensive multiplication in ADDR that can be
578 expressed in an addressing mode and move the corresponding
579 element(s) to PARTS. */
581 static void
582 most_expensive_mult_to_index (tree type, struct mem_address *parts,
583 aff_tree *addr, bool speed)
585 addr_space_t as = TYPE_ADDR_SPACE (type);
586 machine_mode address_mode = targetm.addr_space.address_mode (as);
587 HOST_WIDE_INT coef;
588 unsigned best_mult_cost = 0, acost;
589 tree mult_elt = NULL_TREE, elt;
590 unsigned i, j;
591 enum tree_code op_code;
593 offset_int best_mult = 0;
594 for (i = 0; i < addr->n; i++)
596 if (!wi::fits_shwi_p (addr->elts[i].coef))
597 continue;
599 coef = addr->elts[i].coef.to_shwi ();
600 if (coef == 1
601 || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
602 continue;
604 acost = mult_by_coeff_cost (coef, address_mode, speed);
606 if (acost > best_mult_cost)
608 best_mult_cost = acost;
609 best_mult = offset_int::from (addr->elts[i].coef, SIGNED);
613 if (!best_mult_cost)
614 return;
616 /* Collect elements multiplied by best_mult. */
617 for (i = j = 0; i < addr->n; i++)
619 offset_int amult = offset_int::from (addr->elts[i].coef, SIGNED);
620 offset_int amult_neg = -wi::sext (amult, TYPE_PRECISION (addr->type));
622 if (amult == best_mult)
623 op_code = PLUS_EXPR;
624 else if (amult_neg == best_mult)
625 op_code = MINUS_EXPR;
626 else
628 addr->elts[j] = addr->elts[i];
629 j++;
630 continue;
633 elt = fold_convert (sizetype, addr->elts[i].val);
634 if (mult_elt)
635 mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
636 else if (op_code == PLUS_EXPR)
637 mult_elt = elt;
638 else
639 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
641 addr->n = j;
643 parts->index = mult_elt;
644 parts->step = wide_int_to_tree (sizetype, best_mult);
647 /* Splits address ADDR for a memory access of type TYPE into PARTS.
648 If BASE_HINT is non-NULL, it specifies an SSA name to be used
649 preferentially as base of the reference, and IV_CAND is the selected
650 iv candidate used in ADDR.
652 TODO -- be more clever about the distribution of the elements of ADDR
653 to PARTS. Some architectures do not support anything but single
654 register in address, possibly with a small integer offset; while
655 create_mem_ref will simplify the address to an acceptable shape
656 later, it would be more efficient to know that asking for complicated
657 addressing modes is useless. */
659 static void
660 addr_to_parts (tree type, aff_tree *addr, tree iv_cand,
661 tree base_hint, struct mem_address *parts,
662 bool speed)
664 tree part;
665 unsigned i;
667 parts->symbol = NULL_TREE;
668 parts->base = NULL_TREE;
669 parts->index = NULL_TREE;
670 parts->step = NULL_TREE;
672 if (addr->offset != 0)
673 parts->offset = wide_int_to_tree (sizetype, addr->offset);
674 else
675 parts->offset = NULL_TREE;
677 /* Try to find a symbol. */
678 move_fixed_address_to_symbol (parts, addr);
680 /* No need to do address parts reassociation if the number of parts
681 is <= 2 -- in that case, no loop invariant code motion can be
682 exposed. */
684 if (!base_hint && (addr->n > 2))
685 move_variant_to_index (parts, addr, iv_cand);
687 /* First move the most expensive feasible multiplication
688 to index. */
689 if (!parts->index)
690 most_expensive_mult_to_index (type, parts, addr, speed);
692 /* Try to find a base of the reference. Since at the moment
693 there is no reliable way how to distinguish between pointer and its
694 offset, this is just a guess. */
695 if (!parts->symbol && base_hint)
696 move_hint_to_base (type, parts, base_hint, addr);
697 if (!parts->symbol && !parts->base)
698 move_pointer_to_base (parts, addr);
700 /* Then try to process the remaining elements. */
701 for (i = 0; i < addr->n; i++)
703 part = fold_convert (sizetype, addr->elts[i].val);
704 if (addr->elts[i].coef != 1)
705 part = fold_build2 (MULT_EXPR, sizetype, part,
706 wide_int_to_tree (sizetype, addr->elts[i].coef));
707 add_to_parts (parts, part);
709 if (addr->rest)
710 add_to_parts (parts, fold_convert (sizetype, addr->rest));
713 /* Force the PARTS to register. */
715 static void
716 gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
718 if (parts->base)
719 parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
720 is_gimple_mem_ref_addr, NULL_TREE,
721 true, GSI_SAME_STMT);
722 if (parts->index)
723 parts->index = force_gimple_operand_gsi (gsi, parts->index,
724 true, NULL_TREE,
725 true, GSI_SAME_STMT);
728 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
729 computations are emitted in front of GSI. TYPE is the mode
730 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
731 and BASE_HINT is non NULL if IV_CAND comes from a base address
732 object. */
734 tree
735 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
736 tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
738 tree mem_ref, tmp;
739 struct mem_address parts;
741 addr_to_parts (type, addr, iv_cand, base_hint, &parts, speed);
742 gimplify_mem_ref_parts (gsi, &parts);
743 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
744 if (mem_ref)
745 return mem_ref;
747 /* The expression is too complicated. Try making it simpler. */
749 if (parts.step && !integer_onep (parts.step))
751 /* Move the multiplication to index. */
752 gcc_assert (parts.index);
753 parts.index = force_gimple_operand_gsi (gsi,
754 fold_build2 (MULT_EXPR, sizetype,
755 parts.index, parts.step),
756 true, NULL_TREE, true, GSI_SAME_STMT);
757 parts.step = NULL_TREE;
759 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
760 if (mem_ref)
761 return mem_ref;
764 if (parts.symbol)
766 tmp = parts.symbol;
767 gcc_assert (is_gimple_val (tmp));
769 /* Add the symbol to base, eventually forcing it to register. */
770 if (parts.base)
772 gcc_assert (useless_type_conversion_p
773 (sizetype, TREE_TYPE (parts.base)));
775 if (parts.index)
777 parts.base = force_gimple_operand_gsi_1 (gsi,
778 fold_build_pointer_plus (tmp, parts.base),
779 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
781 else
783 parts.index = parts.base;
784 parts.base = tmp;
787 else
788 parts.base = tmp;
789 parts.symbol = NULL_TREE;
791 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
792 if (mem_ref)
793 return mem_ref;
796 if (parts.index)
798 /* Add index to base. */
799 if (parts.base)
801 parts.base = force_gimple_operand_gsi_1 (gsi,
802 fold_build_pointer_plus (parts.base, parts.index),
803 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
805 else
806 parts.base = parts.index;
807 parts.index = NULL_TREE;
809 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
810 if (mem_ref)
811 return mem_ref;
814 if (parts.offset && !integer_zerop (parts.offset))
816 /* Try adding offset to base. */
817 if (parts.base)
819 parts.base = force_gimple_operand_gsi_1 (gsi,
820 fold_build_pointer_plus (parts.base, parts.offset),
821 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
823 else
824 parts.base = parts.offset;
826 parts.offset = NULL_TREE;
828 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
829 if (mem_ref)
830 return mem_ref;
833 /* Verify that the address is in the simplest possible shape
834 (only a register). If we cannot create such a memory reference,
835 something is really wrong. */
836 gcc_assert (parts.symbol == NULL_TREE);
837 gcc_assert (parts.index == NULL_TREE);
838 gcc_assert (!parts.step || integer_onep (parts.step));
839 gcc_assert (!parts.offset || integer_zerop (parts.offset));
840 gcc_unreachable ();
843 /* Copies components of the address from OP to ADDR. */
845 void
846 get_address_description (tree op, struct mem_address *addr)
848 if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
850 addr->symbol = TMR_BASE (op);
851 addr->base = TMR_INDEX2 (op);
853 else
855 addr->symbol = NULL_TREE;
856 if (TMR_INDEX2 (op))
858 gcc_assert (integer_zerop (TMR_BASE (op)));
859 addr->base = TMR_INDEX2 (op);
861 else
862 addr->base = TMR_BASE (op);
864 addr->index = TMR_INDEX (op);
865 addr->step = TMR_STEP (op);
866 addr->offset = TMR_OFFSET (op);
869 /* Copies the reference information from OLD_REF to NEW_REF, where
870 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
872 void
873 copy_ref_info (tree new_ref, tree old_ref)
875 tree new_ptr_base = NULL_TREE;
877 gcc_assert (TREE_CODE (new_ref) == MEM_REF
878 || TREE_CODE (new_ref) == TARGET_MEM_REF);
880 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
881 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
883 new_ptr_base = TREE_OPERAND (new_ref, 0);
885 /* We can transfer points-to information from an old pointer
886 or decl base to the new one. */
887 if (new_ptr_base
888 && TREE_CODE (new_ptr_base) == SSA_NAME
889 && !SSA_NAME_PTR_INFO (new_ptr_base))
891 tree base = get_base_address (old_ref);
892 if (!base)
894 else if ((TREE_CODE (base) == MEM_REF
895 || TREE_CODE (base) == TARGET_MEM_REF)
896 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
897 && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
899 struct ptr_info_def *new_pi;
900 unsigned int align, misalign;
902 duplicate_ssa_name_ptr_info
903 (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
904 new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
905 /* We have to be careful about transferring alignment information. */
906 if (get_ptr_info_alignment (new_pi, &align, &misalign)
907 && TREE_CODE (old_ref) == MEM_REF
908 && !(TREE_CODE (new_ref) == TARGET_MEM_REF
909 && (TMR_INDEX2 (new_ref)
910 || (TMR_STEP (new_ref)
911 && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
912 < align)))))
914 unsigned int inc = (mem_ref_offset (old_ref).to_short_addr ()
915 - mem_ref_offset (new_ref).to_short_addr ());
916 adjust_ptr_info_misalignment (new_pi, inc);
918 else
919 mark_ptr_info_alignment_unknown (new_pi);
921 else if (TREE_CODE (base) == VAR_DECL
922 || TREE_CODE (base) == PARM_DECL
923 || TREE_CODE (base) == RESULT_DECL)
925 struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
926 pt_solution_set_var (&pi->pt, base);
931 /* Move constants in target_mem_ref REF to offset. Returns the new target
932 mem ref if anything changes, NULL_TREE otherwise. */
934 tree
935 maybe_fold_tmr (tree ref)
937 struct mem_address addr;
938 bool changed = false;
939 tree new_ref, off;
941 get_address_description (ref, &addr);
943 if (addr.base
944 && TREE_CODE (addr.base) == INTEGER_CST
945 && !integer_zerop (addr.base))
947 addr.offset = fold_binary_to_constant (PLUS_EXPR,
948 TREE_TYPE (addr.offset),
949 addr.offset, addr.base);
950 addr.base = NULL_TREE;
951 changed = true;
954 if (addr.symbol
955 && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
957 addr.offset = fold_binary_to_constant
958 (PLUS_EXPR, TREE_TYPE (addr.offset),
959 addr.offset,
960 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
961 addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
962 changed = true;
964 else if (addr.symbol
965 && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
967 HOST_WIDE_INT offset;
968 addr.symbol = build_fold_addr_expr
969 (get_addr_base_and_unit_offset
970 (TREE_OPERAND (addr.symbol, 0), &offset));
971 addr.offset = int_const_binop (PLUS_EXPR,
972 addr.offset, size_int (offset));
973 changed = true;
976 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
978 off = addr.index;
979 if (addr.step)
981 off = fold_binary_to_constant (MULT_EXPR, sizetype,
982 off, addr.step);
983 addr.step = NULL_TREE;
986 addr.offset = fold_binary_to_constant (PLUS_EXPR,
987 TREE_TYPE (addr.offset),
988 addr.offset, off);
989 addr.index = NULL_TREE;
990 changed = true;
993 if (!changed)
994 return NULL_TREE;
996 /* If we have propagated something into this TARGET_MEM_REF and thus
997 ended up folding it, always create a new TARGET_MEM_REF regardless
998 if it is valid in this for on the target - the propagation result
999 wouldn't be anyway. */
1000 new_ref = create_mem_ref_raw (TREE_TYPE (ref),
1001 TREE_TYPE (addr.offset), &addr, false);
1002 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
1003 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
1004 return new_ref;
1007 /* Dump PARTS to FILE. */
1009 extern void dump_mem_address (FILE *, struct mem_address *);
1010 void
1011 dump_mem_address (FILE *file, struct mem_address *parts)
1013 if (parts->symbol)
1015 fprintf (file, "symbol: ");
1016 print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
1017 fprintf (file, "\n");
1019 if (parts->base)
1021 fprintf (file, "base: ");
1022 print_generic_expr (file, parts->base, TDF_SLIM);
1023 fprintf (file, "\n");
1025 if (parts->index)
1027 fprintf (file, "index: ");
1028 print_generic_expr (file, parts->index, TDF_SLIM);
1029 fprintf (file, "\n");
1031 if (parts->step)
1033 fprintf (file, "step: ");
1034 print_generic_expr (file, parts->step, TDF_SLIM);
1035 fprintf (file, "\n");
1037 if (parts->offset)
1039 fprintf (file, "offset: ");
1040 print_generic_expr (file, parts->offset, TDF_SLIM);
1041 fprintf (file, "\n");
1045 #include "gt-tree-ssa-address.h"