Merge the ./config directory between GCC and Binutils
[official-gcc.git] / gcc / tree-ssa-address.c
blob29ce38b22d7ea61ab7d207cb3adb6e4477fd845a
1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "tree.h"
28 #include "stor-layout.h"
29 #include "tm_p.h"
30 #include "predict.h"
31 #include "vec.h"
32 #include "hashtab.h"
33 #include "hash-set.h"
34 #include "machmode.h"
35 #include "hard-reg-set.h"
36 #include "input.h"
37 #include "function.h"
38 #include "basic-block.h"
39 #include "tree-pretty-print.h"
40 #include "tree-ssa-alias.h"
41 #include "internal-fn.h"
42 #include "gimple-expr.h"
43 #include "is-a.h"
44 #include "gimple.h"
45 #include "gimple-iterator.h"
46 #include "gimplify-me.h"
47 #include "stringpool.h"
48 #include "tree-ssanames.h"
49 #include "tree-ssa-loop-ivopts.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "dumpfile.h"
53 #include "flags.h"
54 #include "tree-inline.h"
55 #include "tree-affine.h"
57 /* FIXME: We compute address costs using RTL. */
58 #include "insn-config.h"
59 #include "rtl.h"
60 #include "recog.h"
61 #include "expr.h"
62 #include "target.h"
63 #include "expmed.h"
64 #include "tree-ssa-address.h"
66 /* TODO -- handling of symbols (according to Richard Hendersons
67 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
69 There are at least 5 different kinds of symbols that we can run up against:
71 (1) binds_local_p, small data area.
72 (2) binds_local_p, eg local statics
73 (3) !binds_local_p, eg global variables
74 (4) thread local, local_exec
75 (5) thread local, !local_exec
77 Now, (1) won't appear often in an array context, but it certainly can.
78 All you have to do is set -GN high enough, or explicitly mark any
79 random object __attribute__((section (".sdata"))).
81 All of these affect whether or not a symbol is in fact a valid address.
82 The only one tested here is (3). And that result may very well
83 be incorrect for (4) or (5).
85 An incorrect result here does not cause incorrect results out the
86 back end, because the expander in expr.c validizes the address. However
87 it would be nice to improve the handling here in order to produce more
88 precise results. */
90 /* A "template" for memory address, used to determine whether the address is
91 valid for mode. */
93 typedef struct GTY (()) mem_addr_template {
94 rtx ref; /* The template. */
95 rtx * GTY ((skip)) step_p; /* The point in template where the step should be
96 filled in. */
97 rtx * GTY ((skip)) off_p; /* The point in template where the offset should
98 be filled in. */
99 } mem_addr_template;
102 /* The templates. Each of the low five bits of the index corresponds to one
103 component of TARGET_MEM_REF being present, while the high bits identify
104 the address space. See TEMPL_IDX. */
106 static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
108 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
109 (((int) (AS) << 5) \
110 | ((SYMBOL != 0) << 4) \
111 | ((BASE != 0) << 3) \
112 | ((INDEX != 0) << 2) \
113 | ((STEP != 0) << 1) \
114 | (OFFSET != 0))
116 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
117 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
118 to where step is placed to *STEP_P and offset to *OFFSET_P. */
120 static void
121 gen_addr_rtx (machine_mode address_mode,
122 rtx symbol, rtx base, rtx index, rtx step, rtx offset,
123 rtx *addr, rtx **step_p, rtx **offset_p)
125 rtx act_elem;
127 *addr = NULL_RTX;
128 if (step_p)
129 *step_p = NULL;
130 if (offset_p)
131 *offset_p = NULL;
133 if (index)
135 act_elem = index;
136 if (step)
138 act_elem = gen_rtx_MULT (address_mode, act_elem, step);
140 if (step_p)
141 *step_p = &XEXP (act_elem, 1);
144 *addr = act_elem;
147 if (base && base != const0_rtx)
149 if (*addr)
150 *addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
151 else
152 *addr = base;
155 if (symbol)
157 act_elem = symbol;
158 if (offset)
160 act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
162 if (offset_p)
163 *offset_p = &XEXP (act_elem, 1);
165 if (GET_CODE (symbol) == SYMBOL_REF
166 || GET_CODE (symbol) == LABEL_REF
167 || GET_CODE (symbol) == CONST)
168 act_elem = gen_rtx_CONST (address_mode, act_elem);
171 if (*addr)
172 *addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
173 else
174 *addr = act_elem;
176 else if (offset)
178 if (*addr)
180 *addr = gen_rtx_PLUS (address_mode, *addr, offset);
181 if (offset_p)
182 *offset_p = &XEXP (*addr, 1);
184 else
186 *addr = offset;
187 if (offset_p)
188 *offset_p = addr;
192 if (!*addr)
193 *addr = const0_rtx;
196 /* Description of a memory address. */
198 struct mem_address
200 tree symbol, base, index, step, offset;
203 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
204 in address space AS.
205 If REALLY_EXPAND is false, just make fake registers instead
206 of really expanding the operands, and perform the expansion in-place
207 by using one of the "templates". */
210 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
211 bool really_expand)
213 machine_mode address_mode = targetm.addr_space.address_mode (as);
214 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
215 rtx address, sym, bse, idx, st, off;
216 struct mem_addr_template *templ;
218 if (addr->step && !integer_onep (addr->step))
219 st = immed_wide_int_const (addr->step, pointer_mode);
220 else
221 st = NULL_RTX;
223 if (addr->offset && !integer_zerop (addr->offset))
225 offset_int dc = offset_int::from (addr->offset, SIGNED);
226 off = immed_wide_int_const (dc, pointer_mode);
228 else
229 off = NULL_RTX;
231 if (!really_expand)
233 unsigned int templ_index
234 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
236 if (templ_index >= vec_safe_length (mem_addr_template_list))
237 vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
239 /* Reuse the templates for addresses, so that we do not waste memory. */
240 templ = &(*mem_addr_template_list)[templ_index];
241 if (!templ->ref)
243 sym = (addr->symbol ?
244 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
245 : NULL_RTX);
246 bse = (addr->base ?
247 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
248 : NULL_RTX);
249 idx = (addr->index ?
250 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
251 : NULL_RTX);
253 gen_addr_rtx (pointer_mode, sym, bse, idx,
254 st? const0_rtx : NULL_RTX,
255 off? const0_rtx : NULL_RTX,
256 &templ->ref,
257 &templ->step_p,
258 &templ->off_p);
261 if (st)
262 *templ->step_p = st;
263 if (off)
264 *templ->off_p = off;
266 return templ->ref;
269 /* Otherwise really expand the expressions. */
270 sym = (addr->symbol
271 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
272 : NULL_RTX);
273 bse = (addr->base
274 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
275 : NULL_RTX);
276 idx = (addr->index
277 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
278 : NULL_RTX);
280 gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
281 if (pointer_mode != address_mode)
282 address = convert_memory_address (address_mode, address);
283 return address;
286 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
287 the mem_address structure. */
290 addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
292 struct mem_address addr;
293 get_address_description (exp, &addr);
294 return addr_for_mem_ref (&addr, as, really_expand);
297 /* Returns address of MEM_REF in TYPE. */
299 tree
300 tree_mem_ref_addr (tree type, tree mem_ref)
302 tree addr;
303 tree act_elem;
304 tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
305 tree addr_base = NULL_TREE, addr_off = NULL_TREE;
307 addr_base = fold_convert (type, TMR_BASE (mem_ref));
309 act_elem = TMR_INDEX (mem_ref);
310 if (act_elem)
312 if (step)
313 act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
314 act_elem, step);
315 addr_off = act_elem;
318 act_elem = TMR_INDEX2 (mem_ref);
319 if (act_elem)
321 if (addr_off)
322 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
323 addr_off, act_elem);
324 else
325 addr_off = act_elem;
328 if (offset && !integer_zerop (offset))
330 if (addr_off)
331 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
332 fold_convert (TREE_TYPE (addr_off), offset));
333 else
334 addr_off = offset;
337 if (addr_off)
338 addr = fold_build_pointer_plus (addr_base, addr_off);
339 else
340 addr = addr_base;
342 return addr;
345 /* Returns true if a memory reference in MODE and with parameters given by
346 ADDR is valid on the current target. */
348 static bool
349 valid_mem_ref_p (machine_mode mode, addr_space_t as,
350 struct mem_address *addr)
352 rtx address;
354 address = addr_for_mem_ref (addr, as, false);
355 if (!address)
356 return false;
358 return memory_address_addr_space_p (mode, address, as);
361 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
362 is valid on the current target and if so, creates and returns the
363 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
365 static tree
366 create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
367 bool verify)
369 tree base, index2;
371 if (verify
372 && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
373 return NULL_TREE;
375 if (addr->step && integer_onep (addr->step))
376 addr->step = NULL_TREE;
378 if (addr->offset)
379 addr->offset = fold_convert (alias_ptr_type, addr->offset);
380 else
381 addr->offset = build_int_cst (alias_ptr_type, 0);
383 if (addr->symbol)
385 base = addr->symbol;
386 index2 = addr->base;
388 else if (addr->base
389 && POINTER_TYPE_P (TREE_TYPE (addr->base)))
391 base = addr->base;
392 index2 = NULL_TREE;
394 else
396 base = build_int_cst (ptr_type_node, 0);
397 index2 = addr->base;
400 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
401 ??? As IVOPTs does not follow restrictions to where the base
402 pointer may point to create a MEM_REF only if we know that
403 base is valid. */
404 if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
405 && (!index2 || integer_zerop (index2))
406 && (!addr->index || integer_zerop (addr->index)))
407 return fold_build2 (MEM_REF, type, base, addr->offset);
409 return build5 (TARGET_MEM_REF, type,
410 base, addr->offset, addr->index, addr->step, index2);
413 /* Returns true if OBJ is an object whose address is a link time constant. */
415 static bool
416 fixed_address_object_p (tree obj)
418 return (TREE_CODE (obj) == VAR_DECL
419 && (TREE_STATIC (obj)
420 || DECL_EXTERNAL (obj))
421 && ! DECL_DLLIMPORT_P (obj));
424 /* If ADDR contains an address of object that is a link time constant,
425 move it to PARTS->symbol. */
427 static void
428 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
430 unsigned i;
431 tree val = NULL_TREE;
433 for (i = 0; i < addr->n; i++)
435 if (addr->elts[i].coef != 1)
436 continue;
438 val = addr->elts[i].val;
439 if (TREE_CODE (val) == ADDR_EXPR
440 && fixed_address_object_p (TREE_OPERAND (val, 0)))
441 break;
444 if (i == addr->n)
445 return;
447 parts->symbol = val;
448 aff_combination_remove_elt (addr, i);
451 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
453 static void
454 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
455 aff_tree *addr)
457 unsigned i;
458 tree val = NULL_TREE;
459 int qual;
461 for (i = 0; i < addr->n; i++)
463 if (addr->elts[i].coef != 1)
464 continue;
466 val = addr->elts[i].val;
467 if (operand_equal_p (val, base_hint, 0))
468 break;
471 if (i == addr->n)
472 return;
474 /* Cast value to appropriate pointer type. We cannot use a pointer
475 to TYPE directly, as the back-end will assume registers of pointer
476 type are aligned, and just the base itself may not actually be.
477 We use void pointer to the type's address space instead. */
478 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
479 type = build_qualified_type (void_type_node, qual);
480 parts->base = fold_convert (build_pointer_type (type), val);
481 aff_combination_remove_elt (addr, i);
484 /* If ADDR contains an address of a dereferenced pointer, move it to
485 PARTS->base. */
487 static void
488 move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
490 unsigned i;
491 tree val = NULL_TREE;
493 for (i = 0; i < addr->n; i++)
495 if (addr->elts[i].coef != 1)
496 continue;
498 val = addr->elts[i].val;
499 if (POINTER_TYPE_P (TREE_TYPE (val)))
500 break;
503 if (i == addr->n)
504 return;
506 parts->base = val;
507 aff_combination_remove_elt (addr, i);
510 /* Moves the loop variant part V in linear address ADDR to be the index
511 of PARTS. */
513 static void
514 move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
516 unsigned i;
517 tree val = NULL_TREE;
519 gcc_assert (!parts->index);
520 for (i = 0; i < addr->n; i++)
522 val = addr->elts[i].val;
523 if (operand_equal_p (val, v, 0))
524 break;
527 if (i == addr->n)
528 return;
530 parts->index = fold_convert (sizetype, val);
531 parts->step = wide_int_to_tree (sizetype, addr->elts[i].coef);
532 aff_combination_remove_elt (addr, i);
535 /* Adds ELT to PARTS. */
537 static void
538 add_to_parts (struct mem_address *parts, tree elt)
540 tree type;
542 if (!parts->index)
544 parts->index = fold_convert (sizetype, elt);
545 return;
548 if (!parts->base)
550 parts->base = elt;
551 return;
554 /* Add ELT to base. */
555 type = TREE_TYPE (parts->base);
556 if (POINTER_TYPE_P (type))
557 parts->base = fold_build_pointer_plus (parts->base, elt);
558 else
559 parts->base = fold_build2 (PLUS_EXPR, type,
560 parts->base, elt);
563 /* Finds the most expensive multiplication in ADDR that can be
564 expressed in an addressing mode and move the corresponding
565 element(s) to PARTS. */
567 static void
568 most_expensive_mult_to_index (tree type, struct mem_address *parts,
569 aff_tree *addr, bool speed)
571 addr_space_t as = TYPE_ADDR_SPACE (type);
572 machine_mode address_mode = targetm.addr_space.address_mode (as);
573 HOST_WIDE_INT coef;
574 unsigned best_mult_cost = 0, acost;
575 tree mult_elt = NULL_TREE, elt;
576 unsigned i, j;
577 enum tree_code op_code;
579 offset_int best_mult = 0;
580 for (i = 0; i < addr->n; i++)
582 if (!wi::fits_shwi_p (addr->elts[i].coef))
583 continue;
585 coef = addr->elts[i].coef.to_shwi ();
586 if (coef == 1
587 || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
588 continue;
590 acost = mult_by_coeff_cost (coef, address_mode, speed);
592 if (acost > best_mult_cost)
594 best_mult_cost = acost;
595 best_mult = offset_int::from (addr->elts[i].coef, SIGNED);
599 if (!best_mult_cost)
600 return;
602 /* Collect elements multiplied by best_mult. */
603 for (i = j = 0; i < addr->n; i++)
605 offset_int amult = offset_int::from (addr->elts[i].coef, SIGNED);
606 offset_int amult_neg = -wi::sext (amult, TYPE_PRECISION (addr->type));
608 if (amult == best_mult)
609 op_code = PLUS_EXPR;
610 else if (amult_neg == best_mult)
611 op_code = MINUS_EXPR;
612 else
614 addr->elts[j] = addr->elts[i];
615 j++;
616 continue;
619 elt = fold_convert (sizetype, addr->elts[i].val);
620 if (mult_elt)
621 mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
622 else if (op_code == PLUS_EXPR)
623 mult_elt = elt;
624 else
625 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
627 addr->n = j;
629 parts->index = mult_elt;
630 parts->step = wide_int_to_tree (sizetype, best_mult);
633 /* Splits address ADDR for a memory access of type TYPE into PARTS.
634 If BASE_HINT is non-NULL, it specifies an SSA name to be used
635 preferentially as base of the reference, and IV_CAND is the selected
636 iv candidate used in ADDR.
638 TODO -- be more clever about the distribution of the elements of ADDR
639 to PARTS. Some architectures do not support anything but single
640 register in address, possibly with a small integer offset; while
641 create_mem_ref will simplify the address to an acceptable shape
642 later, it would be more efficient to know that asking for complicated
643 addressing modes is useless. */
645 static void
646 addr_to_parts (tree type, aff_tree *addr, tree iv_cand,
647 tree base_hint, struct mem_address *parts,
648 bool speed)
650 tree part;
651 unsigned i;
653 parts->symbol = NULL_TREE;
654 parts->base = NULL_TREE;
655 parts->index = NULL_TREE;
656 parts->step = NULL_TREE;
658 if (addr->offset != 0)
659 parts->offset = wide_int_to_tree (sizetype, addr->offset);
660 else
661 parts->offset = NULL_TREE;
663 /* Try to find a symbol. */
664 move_fixed_address_to_symbol (parts, addr);
666 /* No need to do address parts reassociation if the number of parts
667 is <= 2 -- in that case, no loop invariant code motion can be
668 exposed. */
670 if (!base_hint && (addr->n > 2))
671 move_variant_to_index (parts, addr, iv_cand);
673 /* First move the most expensive feasible multiplication
674 to index. */
675 if (!parts->index)
676 most_expensive_mult_to_index (type, parts, addr, speed);
678 /* Try to find a base of the reference. Since at the moment
679 there is no reliable way how to distinguish between pointer and its
680 offset, this is just a guess. */
681 if (!parts->symbol && base_hint)
682 move_hint_to_base (type, parts, base_hint, addr);
683 if (!parts->symbol && !parts->base)
684 move_pointer_to_base (parts, addr);
686 /* Then try to process the remaining elements. */
687 for (i = 0; i < addr->n; i++)
689 part = fold_convert (sizetype, addr->elts[i].val);
690 if (addr->elts[i].coef != 1)
691 part = fold_build2 (MULT_EXPR, sizetype, part,
692 wide_int_to_tree (sizetype, addr->elts[i].coef));
693 add_to_parts (parts, part);
695 if (addr->rest)
696 add_to_parts (parts, fold_convert (sizetype, addr->rest));
699 /* Force the PARTS to register. */
701 static void
702 gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
704 if (parts->base)
705 parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
706 is_gimple_mem_ref_addr, NULL_TREE,
707 true, GSI_SAME_STMT);
708 if (parts->index)
709 parts->index = force_gimple_operand_gsi (gsi, parts->index,
710 true, NULL_TREE,
711 true, GSI_SAME_STMT);
714 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
715 computations are emitted in front of GSI. TYPE is the mode
716 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
717 and BASE_HINT is non NULL if IV_CAND comes from a base address
718 object. */
720 tree
721 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
722 tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
724 tree mem_ref, tmp;
725 struct mem_address parts;
727 addr_to_parts (type, addr, iv_cand, base_hint, &parts, speed);
728 gimplify_mem_ref_parts (gsi, &parts);
729 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
730 if (mem_ref)
731 return mem_ref;
733 /* The expression is too complicated. Try making it simpler. */
735 if (parts.step && !integer_onep (parts.step))
737 /* Move the multiplication to index. */
738 gcc_assert (parts.index);
739 parts.index = force_gimple_operand_gsi (gsi,
740 fold_build2 (MULT_EXPR, sizetype,
741 parts.index, parts.step),
742 true, NULL_TREE, true, GSI_SAME_STMT);
743 parts.step = NULL_TREE;
745 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
746 if (mem_ref)
747 return mem_ref;
750 if (parts.symbol)
752 tmp = parts.symbol;
753 gcc_assert (is_gimple_val (tmp));
755 /* Add the symbol to base, eventually forcing it to register. */
756 if (parts.base)
758 gcc_assert (useless_type_conversion_p
759 (sizetype, TREE_TYPE (parts.base)));
761 if (parts.index)
763 parts.base = force_gimple_operand_gsi_1 (gsi,
764 fold_build_pointer_plus (tmp, parts.base),
765 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
767 else
769 parts.index = parts.base;
770 parts.base = tmp;
773 else
774 parts.base = tmp;
775 parts.symbol = NULL_TREE;
777 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
778 if (mem_ref)
779 return mem_ref;
782 if (parts.index)
784 /* Add index to base. */
785 if (parts.base)
787 parts.base = force_gimple_operand_gsi_1 (gsi,
788 fold_build_pointer_plus (parts.base, parts.index),
789 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
791 else
792 parts.base = parts.index;
793 parts.index = NULL_TREE;
795 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
796 if (mem_ref)
797 return mem_ref;
800 if (parts.offset && !integer_zerop (parts.offset))
802 /* Try adding offset to base. */
803 if (parts.base)
805 parts.base = force_gimple_operand_gsi_1 (gsi,
806 fold_build_pointer_plus (parts.base, parts.offset),
807 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
809 else
810 parts.base = parts.offset;
812 parts.offset = NULL_TREE;
814 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
815 if (mem_ref)
816 return mem_ref;
819 /* Verify that the address is in the simplest possible shape
820 (only a register). If we cannot create such a memory reference,
821 something is really wrong. */
822 gcc_assert (parts.symbol == NULL_TREE);
823 gcc_assert (parts.index == NULL_TREE);
824 gcc_assert (!parts.step || integer_onep (parts.step));
825 gcc_assert (!parts.offset || integer_zerop (parts.offset));
826 gcc_unreachable ();
829 /* Copies components of the address from OP to ADDR. */
831 void
832 get_address_description (tree op, struct mem_address *addr)
834 if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
836 addr->symbol = TMR_BASE (op);
837 addr->base = TMR_INDEX2 (op);
839 else
841 addr->symbol = NULL_TREE;
842 if (TMR_INDEX2 (op))
844 gcc_assert (integer_zerop (TMR_BASE (op)));
845 addr->base = TMR_INDEX2 (op);
847 else
848 addr->base = TMR_BASE (op);
850 addr->index = TMR_INDEX (op);
851 addr->step = TMR_STEP (op);
852 addr->offset = TMR_OFFSET (op);
855 /* Copies the reference information from OLD_REF to NEW_REF, where
856 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
858 void
859 copy_ref_info (tree new_ref, tree old_ref)
861 tree new_ptr_base = NULL_TREE;
863 gcc_assert (TREE_CODE (new_ref) == MEM_REF
864 || TREE_CODE (new_ref) == TARGET_MEM_REF);
866 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
867 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
869 new_ptr_base = TREE_OPERAND (new_ref, 0);
871 /* We can transfer points-to information from an old pointer
872 or decl base to the new one. */
873 if (new_ptr_base
874 && TREE_CODE (new_ptr_base) == SSA_NAME
875 && !SSA_NAME_PTR_INFO (new_ptr_base))
877 tree base = get_base_address (old_ref);
878 if (!base)
880 else if ((TREE_CODE (base) == MEM_REF
881 || TREE_CODE (base) == TARGET_MEM_REF)
882 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
883 && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
885 struct ptr_info_def *new_pi;
886 unsigned int align, misalign;
888 duplicate_ssa_name_ptr_info
889 (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
890 new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
891 /* We have to be careful about transferring alignment information. */
892 if (get_ptr_info_alignment (new_pi, &align, &misalign)
893 && TREE_CODE (old_ref) == MEM_REF
894 && !(TREE_CODE (new_ref) == TARGET_MEM_REF
895 && (TMR_INDEX2 (new_ref)
896 || (TMR_STEP (new_ref)
897 && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
898 < align)))))
900 unsigned int inc = (mem_ref_offset (old_ref).to_short_addr ()
901 - mem_ref_offset (new_ref).to_short_addr ());
902 adjust_ptr_info_misalignment (new_pi, inc);
904 else
905 mark_ptr_info_alignment_unknown (new_pi);
907 else if (TREE_CODE (base) == VAR_DECL
908 || TREE_CODE (base) == PARM_DECL
909 || TREE_CODE (base) == RESULT_DECL)
911 struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
912 pt_solution_set_var (&pi->pt, base);
917 /* Move constants in target_mem_ref REF to offset. Returns the new target
918 mem ref if anything changes, NULL_TREE otherwise. */
920 tree
921 maybe_fold_tmr (tree ref)
923 struct mem_address addr;
924 bool changed = false;
925 tree new_ref, off;
927 get_address_description (ref, &addr);
929 if (addr.base
930 && TREE_CODE (addr.base) == INTEGER_CST
931 && !integer_zerop (addr.base))
933 addr.offset = fold_binary_to_constant (PLUS_EXPR,
934 TREE_TYPE (addr.offset),
935 addr.offset, addr.base);
936 addr.base = NULL_TREE;
937 changed = true;
940 if (addr.symbol
941 && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
943 addr.offset = fold_binary_to_constant
944 (PLUS_EXPR, TREE_TYPE (addr.offset),
945 addr.offset,
946 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
947 addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
948 changed = true;
950 else if (addr.symbol
951 && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
953 HOST_WIDE_INT offset;
954 addr.symbol = build_fold_addr_expr
955 (get_addr_base_and_unit_offset
956 (TREE_OPERAND (addr.symbol, 0), &offset));
957 addr.offset = int_const_binop (PLUS_EXPR,
958 addr.offset, size_int (offset));
959 changed = true;
962 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
964 off = addr.index;
965 if (addr.step)
967 off = fold_binary_to_constant (MULT_EXPR, sizetype,
968 off, addr.step);
969 addr.step = NULL_TREE;
972 addr.offset = fold_binary_to_constant (PLUS_EXPR,
973 TREE_TYPE (addr.offset),
974 addr.offset, off);
975 addr.index = NULL_TREE;
976 changed = true;
979 if (!changed)
980 return NULL_TREE;
982 /* If we have propagated something into this TARGET_MEM_REF and thus
983 ended up folding it, always create a new TARGET_MEM_REF regardless
984 if it is valid in this for on the target - the propagation result
985 wouldn't be anyway. */
986 new_ref = create_mem_ref_raw (TREE_TYPE (ref),
987 TREE_TYPE (addr.offset), &addr, false);
988 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
989 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
990 return new_ref;
993 /* Dump PARTS to FILE. */
995 extern void dump_mem_address (FILE *, struct mem_address *);
996 void
997 dump_mem_address (FILE *file, struct mem_address *parts)
999 if (parts->symbol)
1001 fprintf (file, "symbol: ");
1002 print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
1003 fprintf (file, "\n");
1005 if (parts->base)
1007 fprintf (file, "base: ");
1008 print_generic_expr (file, parts->base, TDF_SLIM);
1009 fprintf (file, "\n");
1011 if (parts->index)
1013 fprintf (file, "index: ");
1014 print_generic_expr (file, parts->index, TDF_SLIM);
1015 fprintf (file, "\n");
1017 if (parts->step)
1019 fprintf (file, "step: ");
1020 print_generic_expr (file, parts->step, TDF_SLIM);
1021 fprintf (file, "\n");
1023 if (parts->offset)
1025 fprintf (file, "offset: ");
1026 print_generic_expr (file, parts->offset, TDF_SLIM);
1027 fprintf (file, "\n");
1031 #include "gt-tree-ssa-address.h"