gcc/testsuite/
[official-gcc.git] / gcc / tree-ssa-address.c
blob866afcf02658105b75efaecf41e0448659c0abd5
1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "stor-layout.h"
29 #include "tm_p.h"
30 #include "basic-block.h"
31 #include "tree-pretty-print.h"
32 #include "tree-ssa-alias.h"
33 #include "internal-fn.h"
34 #include "gimple-expr.h"
35 #include "is-a.h"
36 #include "gimple.h"
37 #include "gimple-iterator.h"
38 #include "gimplify-me.h"
39 #include "stringpool.h"
40 #include "tree-ssanames.h"
41 #include "tree-ssa-loop-ivopts.h"
42 #include "expr.h"
43 #include "tree-dfa.h"
44 #include "dumpfile.h"
45 #include "flags.h"
46 #include "tree-inline.h"
47 #include "tree-affine.h"
49 /* FIXME: We compute address costs using RTL. */
50 #include "insn-config.h"
51 #include "rtl.h"
52 #include "recog.h"
53 #include "expr.h"
54 #include "target.h"
55 #include "expmed.h"
56 #include "tree-ssa-address.h"
58 /* TODO -- handling of symbols (according to Richard Hendersons
59 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
61 There are at least 5 different kinds of symbols that we can run up against:
63 (1) binds_local_p, small data area.
64 (2) binds_local_p, eg local statics
65 (3) !binds_local_p, eg global variables
66 (4) thread local, local_exec
67 (5) thread local, !local_exec
69 Now, (1) won't appear often in an array context, but it certainly can.
70 All you have to do is set -GN high enough, or explicitly mark any
71 random object __attribute__((section (".sdata"))).
73 All of these affect whether or not a symbol is in fact a valid address.
74 The only one tested here is (3). And that result may very well
75 be incorrect for (4) or (5).
77 An incorrect result here does not cause incorrect results out the
78 back end, because the expander in expr.c validizes the address. However
79 it would be nice to improve the handling here in order to produce more
80 precise results. */
82 /* A "template" for memory address, used to determine whether the address is
83 valid for mode. */
85 typedef struct GTY (()) mem_addr_template {
86 rtx ref; /* The template. */
87 rtx * GTY ((skip)) step_p; /* The point in template where the step should be
88 filled in. */
89 rtx * GTY ((skip)) off_p; /* The point in template where the offset should
90 be filled in. */
91 } mem_addr_template;
94 /* The templates. Each of the low five bits of the index corresponds to one
95 component of TARGET_MEM_REF being present, while the high bits identify
96 the address space. See TEMPL_IDX. */
98 static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
100 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
101 (((int) (AS) << 5) \
102 | ((SYMBOL != 0) << 4) \
103 | ((BASE != 0) << 3) \
104 | ((INDEX != 0) << 2) \
105 | ((STEP != 0) << 1) \
106 | (OFFSET != 0))
108 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
109 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
110 to where step is placed to *STEP_P and offset to *OFFSET_P. */
112 static void
113 gen_addr_rtx (enum machine_mode address_mode,
114 rtx symbol, rtx base, rtx index, rtx step, rtx offset,
115 rtx *addr, rtx **step_p, rtx **offset_p)
117 rtx act_elem;
119 *addr = NULL_RTX;
120 if (step_p)
121 *step_p = NULL;
122 if (offset_p)
123 *offset_p = NULL;
125 if (index)
127 act_elem = index;
128 if (step)
130 act_elem = gen_rtx_MULT (address_mode, act_elem, step);
132 if (step_p)
133 *step_p = &XEXP (act_elem, 1);
136 *addr = act_elem;
139 if (base && base != const0_rtx)
141 if (*addr)
142 *addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
143 else
144 *addr = base;
147 if (symbol)
149 act_elem = symbol;
150 if (offset)
152 act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
154 if (offset_p)
155 *offset_p = &XEXP (act_elem, 1);
157 if (GET_CODE (symbol) == SYMBOL_REF
158 || GET_CODE (symbol) == LABEL_REF
159 || GET_CODE (symbol) == CONST)
160 act_elem = gen_rtx_CONST (address_mode, act_elem);
163 if (*addr)
164 *addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
165 else
166 *addr = act_elem;
168 else if (offset)
170 if (*addr)
172 *addr = gen_rtx_PLUS (address_mode, *addr, offset);
173 if (offset_p)
174 *offset_p = &XEXP (*addr, 1);
176 else
178 *addr = offset;
179 if (offset_p)
180 *offset_p = addr;
184 if (!*addr)
185 *addr = const0_rtx;
188 /* Description of a memory address. */
190 struct mem_address
192 tree symbol, base, index, step, offset;
195 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
196 in address space AS.
197 If REALLY_EXPAND is false, just make fake registers instead
198 of really expanding the operands, and perform the expansion in-place
199 by using one of the "templates". */
202 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
203 bool really_expand)
205 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
206 enum machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
207 rtx address, sym, bse, idx, st, off;
208 struct mem_addr_template *templ;
210 if (addr->step && !integer_onep (addr->step))
211 st = immed_wide_int_const (addr->step, pointer_mode);
212 else
213 st = NULL_RTX;
215 if (addr->offset && !integer_zerop (addr->offset))
217 offset_int dc = offset_int::from (addr->offset, SIGNED);
218 off = immed_wide_int_const (dc, pointer_mode);
220 else
221 off = NULL_RTX;
223 if (!really_expand)
225 unsigned int templ_index
226 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
228 if (templ_index >= vec_safe_length (mem_addr_template_list))
229 vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
231 /* Reuse the templates for addresses, so that we do not waste memory. */
232 templ = &(*mem_addr_template_list)[templ_index];
233 if (!templ->ref)
235 sym = (addr->symbol ?
236 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
237 : NULL_RTX);
238 bse = (addr->base ?
239 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
240 : NULL_RTX);
241 idx = (addr->index ?
242 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
243 : NULL_RTX);
245 gen_addr_rtx (pointer_mode, sym, bse, idx,
246 st? const0_rtx : NULL_RTX,
247 off? const0_rtx : NULL_RTX,
248 &templ->ref,
249 &templ->step_p,
250 &templ->off_p);
253 if (st)
254 *templ->step_p = st;
255 if (off)
256 *templ->off_p = off;
258 return templ->ref;
261 /* Otherwise really expand the expressions. */
262 sym = (addr->symbol
263 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
264 : NULL_RTX);
265 bse = (addr->base
266 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
267 : NULL_RTX);
268 idx = (addr->index
269 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
270 : NULL_RTX);
272 gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
273 if (pointer_mode != address_mode)
274 address = convert_memory_address (address_mode, address);
275 return address;
278 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
279 the mem_address structure. */
282 addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
284 struct mem_address addr;
285 get_address_description (exp, &addr);
286 return addr_for_mem_ref (&addr, as, really_expand);
289 /* Returns address of MEM_REF in TYPE. */
291 tree
292 tree_mem_ref_addr (tree type, tree mem_ref)
294 tree addr;
295 tree act_elem;
296 tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
297 tree addr_base = NULL_TREE, addr_off = NULL_TREE;
299 addr_base = fold_convert (type, TMR_BASE (mem_ref));
301 act_elem = TMR_INDEX (mem_ref);
302 if (act_elem)
304 if (step)
305 act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
306 act_elem, step);
307 addr_off = act_elem;
310 act_elem = TMR_INDEX2 (mem_ref);
311 if (act_elem)
313 if (addr_off)
314 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
315 addr_off, act_elem);
316 else
317 addr_off = act_elem;
320 if (offset && !integer_zerop (offset))
322 if (addr_off)
323 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
324 fold_convert (TREE_TYPE (addr_off), offset));
325 else
326 addr_off = offset;
329 if (addr_off)
330 addr = fold_build_pointer_plus (addr_base, addr_off);
331 else
332 addr = addr_base;
334 return addr;
337 /* Returns true if a memory reference in MODE and with parameters given by
338 ADDR is valid on the current target. */
340 static bool
341 valid_mem_ref_p (enum machine_mode mode, addr_space_t as,
342 struct mem_address *addr)
344 rtx address;
346 address = addr_for_mem_ref (addr, as, false);
347 if (!address)
348 return false;
350 return memory_address_addr_space_p (mode, address, as);
353 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
354 is valid on the current target and if so, creates and returns the
355 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
357 static tree
358 create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
359 bool verify)
361 tree base, index2;
363 if (verify
364 && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
365 return NULL_TREE;
367 if (addr->step && integer_onep (addr->step))
368 addr->step = NULL_TREE;
370 if (addr->offset)
371 addr->offset = fold_convert (alias_ptr_type, addr->offset);
372 else
373 addr->offset = build_int_cst (alias_ptr_type, 0);
375 if (addr->symbol)
377 base = addr->symbol;
378 index2 = addr->base;
380 else if (addr->base
381 && POINTER_TYPE_P (TREE_TYPE (addr->base)))
383 base = addr->base;
384 index2 = NULL_TREE;
386 else
388 base = build_int_cst (ptr_type_node, 0);
389 index2 = addr->base;
392 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
393 ??? As IVOPTs does not follow restrictions to where the base
394 pointer may point to create a MEM_REF only if we know that
395 base is valid. */
396 if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
397 && (!index2 || integer_zerop (index2))
398 && (!addr->index || integer_zerop (addr->index)))
399 return fold_build2 (MEM_REF, type, base, addr->offset);
401 return build5 (TARGET_MEM_REF, type,
402 base, addr->offset, addr->index, addr->step, index2);
405 /* Returns true if OBJ is an object whose address is a link time constant. */
407 static bool
408 fixed_address_object_p (tree obj)
410 return (TREE_CODE (obj) == VAR_DECL
411 && (TREE_STATIC (obj)
412 || DECL_EXTERNAL (obj))
413 && ! DECL_DLLIMPORT_P (obj));
416 /* If ADDR contains an address of object that is a link time constant,
417 move it to PARTS->symbol. */
419 static void
420 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
422 unsigned i;
423 tree val = NULL_TREE;
425 for (i = 0; i < addr->n; i++)
427 if (addr->elts[i].coef != 1)
428 continue;
430 val = addr->elts[i].val;
431 if (TREE_CODE (val) == ADDR_EXPR
432 && fixed_address_object_p (TREE_OPERAND (val, 0)))
433 break;
436 if (i == addr->n)
437 return;
439 parts->symbol = val;
440 aff_combination_remove_elt (addr, i);
443 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
445 static void
446 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
447 aff_tree *addr)
449 unsigned i;
450 tree val = NULL_TREE;
451 int qual;
453 for (i = 0; i < addr->n; i++)
455 if (addr->elts[i].coef != 1)
456 continue;
458 val = addr->elts[i].val;
459 if (operand_equal_p (val, base_hint, 0))
460 break;
463 if (i == addr->n)
464 return;
466 /* Cast value to appropriate pointer type. We cannot use a pointer
467 to TYPE directly, as the back-end will assume registers of pointer
468 type are aligned, and just the base itself may not actually be.
469 We use void pointer to the type's address space instead. */
470 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
471 type = build_qualified_type (void_type_node, qual);
472 parts->base = fold_convert (build_pointer_type (type), val);
473 aff_combination_remove_elt (addr, i);
476 /* If ADDR contains an address of a dereferenced pointer, move it to
477 PARTS->base. */
479 static void
480 move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
482 unsigned i;
483 tree val = NULL_TREE;
485 for (i = 0; i < addr->n; i++)
487 if (addr->elts[i].coef != 1)
488 continue;
490 val = addr->elts[i].val;
491 if (POINTER_TYPE_P (TREE_TYPE (val)))
492 break;
495 if (i == addr->n)
496 return;
498 parts->base = val;
499 aff_combination_remove_elt (addr, i);
502 /* Moves the loop variant part V in linear address ADDR to be the index
503 of PARTS. */
505 static void
506 move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
508 unsigned i;
509 tree val = NULL_TREE;
511 gcc_assert (!parts->index);
512 for (i = 0; i < addr->n; i++)
514 val = addr->elts[i].val;
515 if (operand_equal_p (val, v, 0))
516 break;
519 if (i == addr->n)
520 return;
522 parts->index = fold_convert (sizetype, val);
523 parts->step = wide_int_to_tree (sizetype, addr->elts[i].coef);
524 aff_combination_remove_elt (addr, i);
527 /* Adds ELT to PARTS. */
529 static void
530 add_to_parts (struct mem_address *parts, tree elt)
532 tree type;
534 if (!parts->index)
536 parts->index = fold_convert (sizetype, elt);
537 return;
540 if (!parts->base)
542 parts->base = elt;
543 return;
546 /* Add ELT to base. */
547 type = TREE_TYPE (parts->base);
548 if (POINTER_TYPE_P (type))
549 parts->base = fold_build_pointer_plus (parts->base, elt);
550 else
551 parts->base = fold_build2 (PLUS_EXPR, type,
552 parts->base, elt);
555 /* Finds the most expensive multiplication in ADDR that can be
556 expressed in an addressing mode and move the corresponding
557 element(s) to PARTS. */
559 static void
560 most_expensive_mult_to_index (tree type, struct mem_address *parts,
561 aff_tree *addr, bool speed)
563 addr_space_t as = TYPE_ADDR_SPACE (type);
564 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
565 HOST_WIDE_INT coef;
566 unsigned best_mult_cost = 0, acost;
567 tree mult_elt = NULL_TREE, elt;
568 unsigned i, j;
569 enum tree_code op_code;
571 offset_int best_mult = 0;
572 for (i = 0; i < addr->n; i++)
574 if (!wi::fits_shwi_p (addr->elts[i].coef))
575 continue;
577 coef = addr->elts[i].coef.to_shwi ();
578 if (coef == 1
579 || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
580 continue;
582 acost = mult_by_coeff_cost (coef, address_mode, speed);
584 if (acost > best_mult_cost)
586 best_mult_cost = acost;
587 best_mult = offset_int::from (addr->elts[i].coef, SIGNED);
591 if (!best_mult_cost)
592 return;
594 /* Collect elements multiplied by best_mult. */
595 for (i = j = 0; i < addr->n; i++)
597 offset_int amult = offset_int::from (addr->elts[i].coef, SIGNED);
598 offset_int amult_neg = -wi::sext (amult, TYPE_PRECISION (addr->type));
600 if (amult == best_mult)
601 op_code = PLUS_EXPR;
602 else if (amult_neg == best_mult)
603 op_code = MINUS_EXPR;
604 else
606 addr->elts[j] = addr->elts[i];
607 j++;
608 continue;
611 elt = fold_convert (sizetype, addr->elts[i].val);
612 if (mult_elt)
613 mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
614 else if (op_code == PLUS_EXPR)
615 mult_elt = elt;
616 else
617 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
619 addr->n = j;
621 parts->index = mult_elt;
622 parts->step = wide_int_to_tree (sizetype, best_mult);
625 /* Splits address ADDR for a memory access of type TYPE into PARTS.
626 If BASE_HINT is non-NULL, it specifies an SSA name to be used
627 preferentially as base of the reference, and IV_CAND is the selected
628 iv candidate used in ADDR.
630 TODO -- be more clever about the distribution of the elements of ADDR
631 to PARTS. Some architectures do not support anything but single
632 register in address, possibly with a small integer offset; while
633 create_mem_ref will simplify the address to an acceptable shape
634 later, it would be more efficient to know that asking for complicated
635 addressing modes is useless. */
637 static void
638 addr_to_parts (tree type, aff_tree *addr, tree iv_cand,
639 tree base_hint, struct mem_address *parts,
640 bool speed)
642 tree part;
643 unsigned i;
645 parts->symbol = NULL_TREE;
646 parts->base = NULL_TREE;
647 parts->index = NULL_TREE;
648 parts->step = NULL_TREE;
650 if (addr->offset != 0)
651 parts->offset = wide_int_to_tree (sizetype, addr->offset);
652 else
653 parts->offset = NULL_TREE;
655 /* Try to find a symbol. */
656 move_fixed_address_to_symbol (parts, addr);
658 /* No need to do address parts reassociation if the number of parts
659 is <= 2 -- in that case, no loop invariant code motion can be
660 exposed. */
662 if (!base_hint && (addr->n > 2))
663 move_variant_to_index (parts, addr, iv_cand);
665 /* First move the most expensive feasible multiplication
666 to index. */
667 if (!parts->index)
668 most_expensive_mult_to_index (type, parts, addr, speed);
670 /* Try to find a base of the reference. Since at the moment
671 there is no reliable way how to distinguish between pointer and its
672 offset, this is just a guess. */
673 if (!parts->symbol && base_hint)
674 move_hint_to_base (type, parts, base_hint, addr);
675 if (!parts->symbol && !parts->base)
676 move_pointer_to_base (parts, addr);
678 /* Then try to process the remaining elements. */
679 for (i = 0; i < addr->n; i++)
681 part = fold_convert (sizetype, addr->elts[i].val);
682 if (addr->elts[i].coef != 1)
683 part = fold_build2 (MULT_EXPR, sizetype, part,
684 wide_int_to_tree (sizetype, addr->elts[i].coef));
685 add_to_parts (parts, part);
687 if (addr->rest)
688 add_to_parts (parts, fold_convert (sizetype, addr->rest));
691 /* Force the PARTS to register. */
693 static void
694 gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
696 if (parts->base)
697 parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
698 is_gimple_mem_ref_addr, NULL_TREE,
699 true, GSI_SAME_STMT);
700 if (parts->index)
701 parts->index = force_gimple_operand_gsi (gsi, parts->index,
702 true, NULL_TREE,
703 true, GSI_SAME_STMT);
706 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
707 computations are emitted in front of GSI. TYPE is the mode
708 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
709 and BASE_HINT is non NULL if IV_CAND comes from a base address
710 object. */
712 tree
713 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
714 tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
716 tree mem_ref, tmp;
717 struct mem_address parts;
719 addr_to_parts (type, addr, iv_cand, base_hint, &parts, speed);
720 gimplify_mem_ref_parts (gsi, &parts);
721 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
722 if (mem_ref)
723 return mem_ref;
725 /* The expression is too complicated. Try making it simpler. */
727 if (parts.step && !integer_onep (parts.step))
729 /* Move the multiplication to index. */
730 gcc_assert (parts.index);
731 parts.index = force_gimple_operand_gsi (gsi,
732 fold_build2 (MULT_EXPR, sizetype,
733 parts.index, parts.step),
734 true, NULL_TREE, true, GSI_SAME_STMT);
735 parts.step = NULL_TREE;
737 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
738 if (mem_ref)
739 return mem_ref;
742 if (parts.symbol)
744 tmp = parts.symbol;
745 gcc_assert (is_gimple_val (tmp));
747 /* Add the symbol to base, eventually forcing it to register. */
748 if (parts.base)
750 gcc_assert (useless_type_conversion_p
751 (sizetype, TREE_TYPE (parts.base)));
753 if (parts.index)
755 parts.base = force_gimple_operand_gsi_1 (gsi,
756 fold_build_pointer_plus (tmp, parts.base),
757 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
759 else
761 parts.index = parts.base;
762 parts.base = tmp;
765 else
766 parts.base = tmp;
767 parts.symbol = NULL_TREE;
769 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
770 if (mem_ref)
771 return mem_ref;
774 if (parts.index)
776 /* Add index to base. */
777 if (parts.base)
779 parts.base = force_gimple_operand_gsi_1 (gsi,
780 fold_build_pointer_plus (parts.base, parts.index),
781 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
783 else
784 parts.base = parts.index;
785 parts.index = NULL_TREE;
787 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
788 if (mem_ref)
789 return mem_ref;
792 if (parts.offset && !integer_zerop (parts.offset))
794 /* Try adding offset to base. */
795 if (parts.base)
797 parts.base = force_gimple_operand_gsi_1 (gsi,
798 fold_build_pointer_plus (parts.base, parts.offset),
799 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
801 else
802 parts.base = parts.offset;
804 parts.offset = NULL_TREE;
806 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
807 if (mem_ref)
808 return mem_ref;
811 /* Verify that the address is in the simplest possible shape
812 (only a register). If we cannot create such a memory reference,
813 something is really wrong. */
814 gcc_assert (parts.symbol == NULL_TREE);
815 gcc_assert (parts.index == NULL_TREE);
816 gcc_assert (!parts.step || integer_onep (parts.step));
817 gcc_assert (!parts.offset || integer_zerop (parts.offset));
818 gcc_unreachable ();
821 /* Copies components of the address from OP to ADDR. */
823 void
824 get_address_description (tree op, struct mem_address *addr)
826 if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
828 addr->symbol = TMR_BASE (op);
829 addr->base = TMR_INDEX2 (op);
831 else
833 addr->symbol = NULL_TREE;
834 if (TMR_INDEX2 (op))
836 gcc_assert (integer_zerop (TMR_BASE (op)));
837 addr->base = TMR_INDEX2 (op);
839 else
840 addr->base = TMR_BASE (op);
842 addr->index = TMR_INDEX (op);
843 addr->step = TMR_STEP (op);
844 addr->offset = TMR_OFFSET (op);
847 /* Copies the reference information from OLD_REF to NEW_REF, where
848 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
850 void
851 copy_ref_info (tree new_ref, tree old_ref)
853 tree new_ptr_base = NULL_TREE;
855 gcc_assert (TREE_CODE (new_ref) == MEM_REF
856 || TREE_CODE (new_ref) == TARGET_MEM_REF);
858 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
859 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
861 new_ptr_base = TREE_OPERAND (new_ref, 0);
863 /* We can transfer points-to information from an old pointer
864 or decl base to the new one. */
865 if (new_ptr_base
866 && TREE_CODE (new_ptr_base) == SSA_NAME
867 && !SSA_NAME_PTR_INFO (new_ptr_base))
869 tree base = get_base_address (old_ref);
870 if (!base)
872 else if ((TREE_CODE (base) == MEM_REF
873 || TREE_CODE (base) == TARGET_MEM_REF)
874 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
875 && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
877 struct ptr_info_def *new_pi;
878 unsigned int align, misalign;
880 duplicate_ssa_name_ptr_info
881 (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
882 new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
883 /* We have to be careful about transferring alignment information. */
884 if (get_ptr_info_alignment (new_pi, &align, &misalign)
885 && TREE_CODE (old_ref) == MEM_REF
886 && !(TREE_CODE (new_ref) == TARGET_MEM_REF
887 && (TMR_INDEX2 (new_ref)
888 || (TMR_STEP (new_ref)
889 && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
890 < align)))))
892 unsigned int inc = (mem_ref_offset (old_ref).to_short_addr ()
893 - mem_ref_offset (new_ref).to_short_addr ());
894 adjust_ptr_info_misalignment (new_pi, inc);
896 else
897 mark_ptr_info_alignment_unknown (new_pi);
899 else if (TREE_CODE (base) == VAR_DECL
900 || TREE_CODE (base) == PARM_DECL
901 || TREE_CODE (base) == RESULT_DECL)
903 struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
904 pt_solution_set_var (&pi->pt, base);
909 /* Move constants in target_mem_ref REF to offset. Returns the new target
910 mem ref if anything changes, NULL_TREE otherwise. */
912 tree
913 maybe_fold_tmr (tree ref)
915 struct mem_address addr;
916 bool changed = false;
917 tree new_ref, off;
919 get_address_description (ref, &addr);
921 if (addr.base
922 && TREE_CODE (addr.base) == INTEGER_CST
923 && !integer_zerop (addr.base))
925 addr.offset = fold_binary_to_constant (PLUS_EXPR,
926 TREE_TYPE (addr.offset),
927 addr.offset, addr.base);
928 addr.base = NULL_TREE;
929 changed = true;
932 if (addr.symbol
933 && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
935 addr.offset = fold_binary_to_constant
936 (PLUS_EXPR, TREE_TYPE (addr.offset),
937 addr.offset,
938 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
939 addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
940 changed = true;
942 else if (addr.symbol
943 && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
945 HOST_WIDE_INT offset;
946 addr.symbol = build_fold_addr_expr
947 (get_addr_base_and_unit_offset
948 (TREE_OPERAND (addr.symbol, 0), &offset));
949 addr.offset = int_const_binop (PLUS_EXPR,
950 addr.offset, size_int (offset));
951 changed = true;
954 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
956 off = addr.index;
957 if (addr.step)
959 off = fold_binary_to_constant (MULT_EXPR, sizetype,
960 off, addr.step);
961 addr.step = NULL_TREE;
964 addr.offset = fold_binary_to_constant (PLUS_EXPR,
965 TREE_TYPE (addr.offset),
966 addr.offset, off);
967 addr.index = NULL_TREE;
968 changed = true;
971 if (!changed)
972 return NULL_TREE;
974 /* If we have propagated something into this TARGET_MEM_REF and thus
975 ended up folding it, always create a new TARGET_MEM_REF regardless
976 if it is valid in this for on the target - the propagation result
977 wouldn't be anyway. */
978 new_ref = create_mem_ref_raw (TREE_TYPE (ref),
979 TREE_TYPE (addr.offset), &addr, false);
980 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
981 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
982 return new_ref;
985 /* Dump PARTS to FILE. */
987 extern void dump_mem_address (FILE *, struct mem_address *);
988 void
989 dump_mem_address (FILE *file, struct mem_address *parts)
991 if (parts->symbol)
993 fprintf (file, "symbol: ");
994 print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
995 fprintf (file, "\n");
997 if (parts->base)
999 fprintf (file, "base: ");
1000 print_generic_expr (file, parts->base, TDF_SLIM);
1001 fprintf (file, "\n");
1003 if (parts->index)
1005 fprintf (file, "index: ");
1006 print_generic_expr (file, parts->index, TDF_SLIM);
1007 fprintf (file, "\n");
1009 if (parts->step)
1011 fprintf (file, "step: ");
1012 print_generic_expr (file, parts->step, TDF_SLIM);
1013 fprintf (file, "\n");
1015 if (parts->offset)
1017 fprintf (file, "offset: ");
1018 print_generic_expr (file, parts->offset, TDF_SLIM);
1019 fprintf (file, "\n");
1023 #include "gt-tree-ssa-address.h"