* omp-low.c (lower_omp_target): Remove unreachable code & merge
[official-gcc.git] / gcc / tree-ssa-address.c
blob3d01e11f773c47dee8cc1391b507f940bc985ace
1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "tm_p.h"
32 #include "stringpool.h"
33 #include "tree-ssanames.h"
34 #include "expmed.h"
35 #include "insn-config.h"
36 #include "emit-rtl.h"
37 #include "recog.h"
38 #include "tree-pretty-print.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "internal-fn.h"
43 #include "gimple-iterator.h"
44 #include "gimplify-me.h"
45 #include "tree-ssa-loop-ivopts.h"
46 #include "flags.h"
47 #include "dojump.h"
48 #include "explow.h"
49 #include "calls.h"
50 #include "varasm.h"
51 #include "stmt.h"
52 #include "expr.h"
53 #include "tree-dfa.h"
54 #include "dumpfile.h"
55 #include "tree-inline.h"
56 #include "tree-affine.h"
58 /* FIXME: We compute address costs using RTL. */
59 #include "tree-ssa-address.h"
61 /* TODO -- handling of symbols (according to Richard Hendersons
62 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
64 There are at least 5 different kinds of symbols that we can run up against:
66 (1) binds_local_p, small data area.
67 (2) binds_local_p, eg local statics
68 (3) !binds_local_p, eg global variables
69 (4) thread local, local_exec
70 (5) thread local, !local_exec
72 Now, (1) won't appear often in an array context, but it certainly can.
73 All you have to do is set -GN high enough, or explicitly mark any
74 random object __attribute__((section (".sdata"))).
76 All of these affect whether or not a symbol is in fact a valid address.
77 The only one tested here is (3). And that result may very well
78 be incorrect for (4) or (5).
80 An incorrect result here does not cause incorrect results out the
81 back end, because the expander in expr.c validizes the address. However
82 it would be nice to improve the handling here in order to produce more
83 precise results. */
85 /* A "template" for memory address, used to determine whether the address is
86 valid for mode. */
88 struct GTY (()) mem_addr_template {
89 rtx ref; /* The template. */
90 rtx * GTY ((skip)) step_p; /* The point in template where the step should be
91 filled in. */
92 rtx * GTY ((skip)) off_p; /* The point in template where the offset should
93 be filled in. */
97 /* The templates. Each of the low five bits of the index corresponds to one
98 component of TARGET_MEM_REF being present, while the high bits identify
99 the address space. See TEMPL_IDX. */
101 static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
103 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
104 (((int) (AS) << 5) \
105 | ((SYMBOL != 0) << 4) \
106 | ((BASE != 0) << 3) \
107 | ((INDEX != 0) << 2) \
108 | ((STEP != 0) << 1) \
109 | (OFFSET != 0))
111 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
112 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
113 to where step is placed to *STEP_P and offset to *OFFSET_P. */
115 static void
116 gen_addr_rtx (machine_mode address_mode,
117 rtx symbol, rtx base, rtx index, rtx step, rtx offset,
118 rtx *addr, rtx **step_p, rtx **offset_p)
120 rtx act_elem;
122 *addr = NULL_RTX;
123 if (step_p)
124 *step_p = NULL;
125 if (offset_p)
126 *offset_p = NULL;
128 if (index)
130 act_elem = index;
131 if (step)
133 act_elem = gen_rtx_MULT (address_mode, act_elem, step);
135 if (step_p)
136 *step_p = &XEXP (act_elem, 1);
139 *addr = act_elem;
142 if (base && base != const0_rtx)
144 if (*addr)
145 *addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
146 else
147 *addr = base;
150 if (symbol)
152 act_elem = symbol;
153 if (offset)
155 act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
157 if (offset_p)
158 *offset_p = &XEXP (act_elem, 1);
160 if (GET_CODE (symbol) == SYMBOL_REF
161 || GET_CODE (symbol) == LABEL_REF
162 || GET_CODE (symbol) == CONST)
163 act_elem = gen_rtx_CONST (address_mode, act_elem);
166 if (*addr)
167 *addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
168 else
169 *addr = act_elem;
171 else if (offset)
173 if (*addr)
175 *addr = gen_rtx_PLUS (address_mode, *addr, offset);
176 if (offset_p)
177 *offset_p = &XEXP (*addr, 1);
179 else
181 *addr = offset;
182 if (offset_p)
183 *offset_p = addr;
187 if (!*addr)
188 *addr = const0_rtx;
191 /* Description of a memory address. */
193 struct mem_address
195 tree symbol, base, index, step, offset;
198 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
199 in address space AS.
200 If REALLY_EXPAND is false, just make fake registers instead
201 of really expanding the operands, and perform the expansion in-place
202 by using one of the "templates". */
205 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
206 bool really_expand)
208 machine_mode address_mode = targetm.addr_space.address_mode (as);
209 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
210 rtx address, sym, bse, idx, st, off;
211 struct mem_addr_template *templ;
213 if (addr->step && !integer_onep (addr->step))
214 st = immed_wide_int_const (addr->step, pointer_mode);
215 else
216 st = NULL_RTX;
218 if (addr->offset && !integer_zerop (addr->offset))
220 offset_int dc = offset_int::from (addr->offset, SIGNED);
221 off = immed_wide_int_const (dc, pointer_mode);
223 else
224 off = NULL_RTX;
226 if (!really_expand)
228 unsigned int templ_index
229 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
231 if (templ_index >= vec_safe_length (mem_addr_template_list))
232 vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
234 /* Reuse the templates for addresses, so that we do not waste memory. */
235 templ = &(*mem_addr_template_list)[templ_index];
236 if (!templ->ref)
238 sym = (addr->symbol ?
239 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
240 : NULL_RTX);
241 bse = (addr->base ?
242 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
243 : NULL_RTX);
244 idx = (addr->index ?
245 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
246 : NULL_RTX);
248 gen_addr_rtx (pointer_mode, sym, bse, idx,
249 st? const0_rtx : NULL_RTX,
250 off? const0_rtx : NULL_RTX,
251 &templ->ref,
252 &templ->step_p,
253 &templ->off_p);
256 if (st)
257 *templ->step_p = st;
258 if (off)
259 *templ->off_p = off;
261 return templ->ref;
264 /* Otherwise really expand the expressions. */
265 sym = (addr->symbol
266 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
267 : NULL_RTX);
268 bse = (addr->base
269 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
270 : NULL_RTX);
271 idx = (addr->index
272 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
273 : NULL_RTX);
275 gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
276 if (pointer_mode != address_mode)
277 address = convert_memory_address (address_mode, address);
278 return address;
281 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
282 the mem_address structure. */
285 addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
287 struct mem_address addr;
288 get_address_description (exp, &addr);
289 return addr_for_mem_ref (&addr, as, really_expand);
292 /* Returns address of MEM_REF in TYPE. */
294 tree
295 tree_mem_ref_addr (tree type, tree mem_ref)
297 tree addr;
298 tree act_elem;
299 tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
300 tree addr_base = NULL_TREE, addr_off = NULL_TREE;
302 addr_base = fold_convert (type, TMR_BASE (mem_ref));
304 act_elem = TMR_INDEX (mem_ref);
305 if (act_elem)
307 if (step)
308 act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
309 act_elem, step);
310 addr_off = act_elem;
313 act_elem = TMR_INDEX2 (mem_ref);
314 if (act_elem)
316 if (addr_off)
317 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
318 addr_off, act_elem);
319 else
320 addr_off = act_elem;
323 if (offset && !integer_zerop (offset))
325 if (addr_off)
326 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
327 fold_convert (TREE_TYPE (addr_off), offset));
328 else
329 addr_off = offset;
332 if (addr_off)
333 addr = fold_build_pointer_plus (addr_base, addr_off);
334 else
335 addr = addr_base;
337 return addr;
340 /* Returns true if a memory reference in MODE and with parameters given by
341 ADDR is valid on the current target. */
343 static bool
344 valid_mem_ref_p (machine_mode mode, addr_space_t as,
345 struct mem_address *addr)
347 rtx address;
349 address = addr_for_mem_ref (addr, as, false);
350 if (!address)
351 return false;
353 return memory_address_addr_space_p (mode, address, as);
356 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
357 is valid on the current target and if so, creates and returns the
358 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
360 static tree
361 create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
362 bool verify)
364 tree base, index2;
366 if (verify
367 && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
368 return NULL_TREE;
370 if (addr->step && integer_onep (addr->step))
371 addr->step = NULL_TREE;
373 if (addr->offset)
374 addr->offset = fold_convert (alias_ptr_type, addr->offset);
375 else
376 addr->offset = build_int_cst (alias_ptr_type, 0);
378 if (addr->symbol)
380 base = addr->symbol;
381 index2 = addr->base;
383 else if (addr->base
384 && POINTER_TYPE_P (TREE_TYPE (addr->base)))
386 base = addr->base;
387 index2 = NULL_TREE;
389 else
391 base = build_int_cst (ptr_type_node, 0);
392 index2 = addr->base;
395 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
396 ??? As IVOPTs does not follow restrictions to where the base
397 pointer may point to create a MEM_REF only if we know that
398 base is valid. */
399 if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
400 && (!index2 || integer_zerop (index2))
401 && (!addr->index || integer_zerop (addr->index)))
402 return fold_build2 (MEM_REF, type, base, addr->offset);
404 return build5 (TARGET_MEM_REF, type,
405 base, addr->offset, addr->index, addr->step, index2);
408 /* Returns true if OBJ is an object whose address is a link time constant. */
410 static bool
411 fixed_address_object_p (tree obj)
413 return (TREE_CODE (obj) == VAR_DECL
414 && (TREE_STATIC (obj)
415 || DECL_EXTERNAL (obj))
416 && ! DECL_DLLIMPORT_P (obj));
419 /* If ADDR contains an address of object that is a link time constant,
420 move it to PARTS->symbol. */
422 static void
423 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
425 unsigned i;
426 tree val = NULL_TREE;
428 for (i = 0; i < addr->n; i++)
430 if (addr->elts[i].coef != 1)
431 continue;
433 val = addr->elts[i].val;
434 if (TREE_CODE (val) == ADDR_EXPR
435 && fixed_address_object_p (TREE_OPERAND (val, 0)))
436 break;
439 if (i == addr->n)
440 return;
442 parts->symbol = val;
443 aff_combination_remove_elt (addr, i);
446 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
448 static void
449 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
450 aff_tree *addr)
452 unsigned i;
453 tree val = NULL_TREE;
454 int qual;
456 for (i = 0; i < addr->n; i++)
458 if (addr->elts[i].coef != 1)
459 continue;
461 val = addr->elts[i].val;
462 if (operand_equal_p (val, base_hint, 0))
463 break;
466 if (i == addr->n)
467 return;
469 /* Cast value to appropriate pointer type. We cannot use a pointer
470 to TYPE directly, as the back-end will assume registers of pointer
471 type are aligned, and just the base itself may not actually be.
472 We use void pointer to the type's address space instead. */
473 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
474 type = build_qualified_type (void_type_node, qual);
475 parts->base = fold_convert (build_pointer_type (type), val);
476 aff_combination_remove_elt (addr, i);
479 /* If ADDR contains an address of a dereferenced pointer, move it to
480 PARTS->base. */
482 static void
483 move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
485 unsigned i;
486 tree val = NULL_TREE;
488 for (i = 0; i < addr->n; i++)
490 if (addr->elts[i].coef != 1)
491 continue;
493 val = addr->elts[i].val;
494 if (POINTER_TYPE_P (TREE_TYPE (val)))
495 break;
498 if (i == addr->n)
499 return;
501 parts->base = val;
502 aff_combination_remove_elt (addr, i);
505 /* Moves the loop variant part V in linear address ADDR to be the index
506 of PARTS. */
508 static void
509 move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
511 unsigned i;
512 tree val = NULL_TREE;
514 gcc_assert (!parts->index);
515 for (i = 0; i < addr->n; i++)
517 val = addr->elts[i].val;
518 if (operand_equal_p (val, v, 0))
519 break;
522 if (i == addr->n)
523 return;
525 parts->index = fold_convert (sizetype, val);
526 parts->step = wide_int_to_tree (sizetype, addr->elts[i].coef);
527 aff_combination_remove_elt (addr, i);
530 /* Adds ELT to PARTS. */
532 static void
533 add_to_parts (struct mem_address *parts, tree elt)
535 tree type;
537 if (!parts->index)
539 parts->index = fold_convert (sizetype, elt);
540 return;
543 if (!parts->base)
545 parts->base = elt;
546 return;
549 /* Add ELT to base. */
550 type = TREE_TYPE (parts->base);
551 if (POINTER_TYPE_P (type))
552 parts->base = fold_build_pointer_plus (parts->base, elt);
553 else
554 parts->base = fold_build2 (PLUS_EXPR, type,
555 parts->base, elt);
558 /* Finds the most expensive multiplication in ADDR that can be
559 expressed in an addressing mode and move the corresponding
560 element(s) to PARTS. */
562 static void
563 most_expensive_mult_to_index (tree type, struct mem_address *parts,
564 aff_tree *addr, bool speed)
566 addr_space_t as = TYPE_ADDR_SPACE (type);
567 machine_mode address_mode = targetm.addr_space.address_mode (as);
568 HOST_WIDE_INT coef;
569 unsigned best_mult_cost = 0, acost;
570 tree mult_elt = NULL_TREE, elt;
571 unsigned i, j;
572 enum tree_code op_code;
574 offset_int best_mult = 0;
575 for (i = 0; i < addr->n; i++)
577 if (!wi::fits_shwi_p (addr->elts[i].coef))
578 continue;
580 coef = addr->elts[i].coef.to_shwi ();
581 if (coef == 1
582 || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
583 continue;
585 acost = mult_by_coeff_cost (coef, address_mode, speed);
587 if (acost > best_mult_cost)
589 best_mult_cost = acost;
590 best_mult = offset_int::from (addr->elts[i].coef, SIGNED);
594 if (!best_mult_cost)
595 return;
597 /* Collect elements multiplied by best_mult. */
598 for (i = j = 0; i < addr->n; i++)
600 offset_int amult = offset_int::from (addr->elts[i].coef, SIGNED);
601 offset_int amult_neg = -wi::sext (amult, TYPE_PRECISION (addr->type));
603 if (amult == best_mult)
604 op_code = PLUS_EXPR;
605 else if (amult_neg == best_mult)
606 op_code = MINUS_EXPR;
607 else
609 addr->elts[j] = addr->elts[i];
610 j++;
611 continue;
614 elt = fold_convert (sizetype, addr->elts[i].val);
615 if (mult_elt)
616 mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
617 else if (op_code == PLUS_EXPR)
618 mult_elt = elt;
619 else
620 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
622 addr->n = j;
624 parts->index = mult_elt;
625 parts->step = wide_int_to_tree (sizetype, best_mult);
628 /* Splits address ADDR for a memory access of type TYPE into PARTS.
629 If BASE_HINT is non-NULL, it specifies an SSA name to be used
630 preferentially as base of the reference, and IV_CAND is the selected
631 iv candidate used in ADDR.
633 TODO -- be more clever about the distribution of the elements of ADDR
634 to PARTS. Some architectures do not support anything but single
635 register in address, possibly with a small integer offset; while
636 create_mem_ref will simplify the address to an acceptable shape
637 later, it would be more efficient to know that asking for complicated
638 addressing modes is useless. */
640 static void
641 addr_to_parts (tree type, aff_tree *addr, tree iv_cand,
642 tree base_hint, struct mem_address *parts,
643 bool speed)
645 tree part;
646 unsigned i;
648 parts->symbol = NULL_TREE;
649 parts->base = NULL_TREE;
650 parts->index = NULL_TREE;
651 parts->step = NULL_TREE;
653 if (addr->offset != 0)
654 parts->offset = wide_int_to_tree (sizetype, addr->offset);
655 else
656 parts->offset = NULL_TREE;
658 /* Try to find a symbol. */
659 move_fixed_address_to_symbol (parts, addr);
661 /* No need to do address parts reassociation if the number of parts
662 is <= 2 -- in that case, no loop invariant code motion can be
663 exposed. */
665 if (!base_hint && (addr->n > 2))
666 move_variant_to_index (parts, addr, iv_cand);
668 /* First move the most expensive feasible multiplication
669 to index. */
670 if (!parts->index)
671 most_expensive_mult_to_index (type, parts, addr, speed);
673 /* Try to find a base of the reference. Since at the moment
674 there is no reliable way how to distinguish between pointer and its
675 offset, this is just a guess. */
676 if (!parts->symbol && base_hint)
677 move_hint_to_base (type, parts, base_hint, addr);
678 if (!parts->symbol && !parts->base)
679 move_pointer_to_base (parts, addr);
681 /* Then try to process the remaining elements. */
682 for (i = 0; i < addr->n; i++)
684 part = fold_convert (sizetype, addr->elts[i].val);
685 if (addr->elts[i].coef != 1)
686 part = fold_build2 (MULT_EXPR, sizetype, part,
687 wide_int_to_tree (sizetype, addr->elts[i].coef));
688 add_to_parts (parts, part);
690 if (addr->rest)
691 add_to_parts (parts, fold_convert (sizetype, addr->rest));
694 /* Force the PARTS to register. */
696 static void
697 gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
699 if (parts->base)
700 parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
701 is_gimple_mem_ref_addr, NULL_TREE,
702 true, GSI_SAME_STMT);
703 if (parts->index)
704 parts->index = force_gimple_operand_gsi (gsi, parts->index,
705 true, NULL_TREE,
706 true, GSI_SAME_STMT);
709 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
710 computations are emitted in front of GSI. TYPE is the mode
711 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
712 and BASE_HINT is non NULL if IV_CAND comes from a base address
713 object. */
715 tree
716 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
717 tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
719 tree mem_ref, tmp;
720 struct mem_address parts;
722 addr_to_parts (type, addr, iv_cand, base_hint, &parts, speed);
723 gimplify_mem_ref_parts (gsi, &parts);
724 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
725 if (mem_ref)
726 return mem_ref;
728 /* The expression is too complicated. Try making it simpler. */
730 if (parts.step && !integer_onep (parts.step))
732 /* Move the multiplication to index. */
733 gcc_assert (parts.index);
734 parts.index = force_gimple_operand_gsi (gsi,
735 fold_build2 (MULT_EXPR, sizetype,
736 parts.index, parts.step),
737 true, NULL_TREE, true, GSI_SAME_STMT);
738 parts.step = NULL_TREE;
740 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
741 if (mem_ref)
742 return mem_ref;
745 if (parts.symbol)
747 tmp = parts.symbol;
748 gcc_assert (is_gimple_val (tmp));
750 /* Add the symbol to base, eventually forcing it to register. */
751 if (parts.base)
753 gcc_assert (useless_type_conversion_p
754 (sizetype, TREE_TYPE (parts.base)));
756 if (parts.index)
758 parts.base = force_gimple_operand_gsi_1 (gsi,
759 fold_build_pointer_plus (tmp, parts.base),
760 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
762 else
764 parts.index = parts.base;
765 parts.base = tmp;
768 else
769 parts.base = tmp;
770 parts.symbol = NULL_TREE;
772 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
773 if (mem_ref)
774 return mem_ref;
777 if (parts.index)
779 /* Add index to base. */
780 if (parts.base)
782 parts.base = force_gimple_operand_gsi_1 (gsi,
783 fold_build_pointer_plus (parts.base, parts.index),
784 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
786 else
787 parts.base = parts.index;
788 parts.index = NULL_TREE;
790 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
791 if (mem_ref)
792 return mem_ref;
795 if (parts.offset && !integer_zerop (parts.offset))
797 /* Try adding offset to base. */
798 if (parts.base)
800 parts.base = force_gimple_operand_gsi_1 (gsi,
801 fold_build_pointer_plus (parts.base, parts.offset),
802 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
804 else
805 parts.base = parts.offset;
807 parts.offset = NULL_TREE;
809 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
810 if (mem_ref)
811 return mem_ref;
814 /* Verify that the address is in the simplest possible shape
815 (only a register). If we cannot create such a memory reference,
816 something is really wrong. */
817 gcc_assert (parts.symbol == NULL_TREE);
818 gcc_assert (parts.index == NULL_TREE);
819 gcc_assert (!parts.step || integer_onep (parts.step));
820 gcc_assert (!parts.offset || integer_zerop (parts.offset));
821 gcc_unreachable ();
824 /* Copies components of the address from OP to ADDR. */
826 void
827 get_address_description (tree op, struct mem_address *addr)
829 if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
831 addr->symbol = TMR_BASE (op);
832 addr->base = TMR_INDEX2 (op);
834 else
836 addr->symbol = NULL_TREE;
837 if (TMR_INDEX2 (op))
839 gcc_assert (integer_zerop (TMR_BASE (op)));
840 addr->base = TMR_INDEX2 (op);
842 else
843 addr->base = TMR_BASE (op);
845 addr->index = TMR_INDEX (op);
846 addr->step = TMR_STEP (op);
847 addr->offset = TMR_OFFSET (op);
850 /* Copies the reference information from OLD_REF to NEW_REF, where
851 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
853 void
854 copy_ref_info (tree new_ref, tree old_ref)
856 tree new_ptr_base = NULL_TREE;
858 gcc_assert (TREE_CODE (new_ref) == MEM_REF
859 || TREE_CODE (new_ref) == TARGET_MEM_REF);
861 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
862 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
864 new_ptr_base = TREE_OPERAND (new_ref, 0);
866 /* We can transfer points-to information from an old pointer
867 or decl base to the new one. */
868 if (new_ptr_base
869 && TREE_CODE (new_ptr_base) == SSA_NAME
870 && !SSA_NAME_PTR_INFO (new_ptr_base))
872 tree base = get_base_address (old_ref);
873 if (!base)
875 else if ((TREE_CODE (base) == MEM_REF
876 || TREE_CODE (base) == TARGET_MEM_REF)
877 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
878 && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
880 struct ptr_info_def *new_pi;
881 unsigned int align, misalign;
883 duplicate_ssa_name_ptr_info
884 (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
885 new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
886 /* We have to be careful about transferring alignment information. */
887 if (get_ptr_info_alignment (new_pi, &align, &misalign)
888 && TREE_CODE (old_ref) == MEM_REF
889 && !(TREE_CODE (new_ref) == TARGET_MEM_REF
890 && (TMR_INDEX2 (new_ref)
891 || (TMR_STEP (new_ref)
892 && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
893 < align)))))
895 unsigned int inc = (mem_ref_offset (old_ref).to_short_addr ()
896 - mem_ref_offset (new_ref).to_short_addr ());
897 adjust_ptr_info_misalignment (new_pi, inc);
899 else
900 mark_ptr_info_alignment_unknown (new_pi);
902 else if (TREE_CODE (base) == VAR_DECL
903 || TREE_CODE (base) == PARM_DECL
904 || TREE_CODE (base) == RESULT_DECL)
906 struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
907 pt_solution_set_var (&pi->pt, base);
912 /* Move constants in target_mem_ref REF to offset. Returns the new target
913 mem ref if anything changes, NULL_TREE otherwise. */
915 tree
916 maybe_fold_tmr (tree ref)
918 struct mem_address addr;
919 bool changed = false;
920 tree new_ref, off;
922 get_address_description (ref, &addr);
924 if (addr.base
925 && TREE_CODE (addr.base) == INTEGER_CST
926 && !integer_zerop (addr.base))
928 addr.offset = fold_binary_to_constant (PLUS_EXPR,
929 TREE_TYPE (addr.offset),
930 addr.offset, addr.base);
931 addr.base = NULL_TREE;
932 changed = true;
935 if (addr.symbol
936 && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
938 addr.offset = fold_binary_to_constant
939 (PLUS_EXPR, TREE_TYPE (addr.offset),
940 addr.offset,
941 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
942 addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
943 changed = true;
945 else if (addr.symbol
946 && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
948 HOST_WIDE_INT offset;
949 addr.symbol = build_fold_addr_expr
950 (get_addr_base_and_unit_offset
951 (TREE_OPERAND (addr.symbol, 0), &offset));
952 addr.offset = int_const_binop (PLUS_EXPR,
953 addr.offset, size_int (offset));
954 changed = true;
957 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
959 off = addr.index;
960 if (addr.step)
962 off = fold_binary_to_constant (MULT_EXPR, sizetype,
963 off, addr.step);
964 addr.step = NULL_TREE;
967 addr.offset = fold_binary_to_constant (PLUS_EXPR,
968 TREE_TYPE (addr.offset),
969 addr.offset, off);
970 addr.index = NULL_TREE;
971 changed = true;
974 if (!changed)
975 return NULL_TREE;
977 /* If we have propagated something into this TARGET_MEM_REF and thus
978 ended up folding it, always create a new TARGET_MEM_REF regardless
979 if it is valid in this for on the target - the propagation result
980 wouldn't be anyway. */
981 new_ref = create_mem_ref_raw (TREE_TYPE (ref),
982 TREE_TYPE (addr.offset), &addr, false);
983 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
984 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
985 return new_ref;
988 /* Dump PARTS to FILE. */
990 extern void dump_mem_address (FILE *, struct mem_address *);
991 void
992 dump_mem_address (FILE *file, struct mem_address *parts)
994 if (parts->symbol)
996 fprintf (file, "symbol: ");
997 print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
998 fprintf (file, "\n");
1000 if (parts->base)
1002 fprintf (file, "base: ");
1003 print_generic_expr (file, parts->base, TDF_SLIM);
1004 fprintf (file, "\n");
1006 if (parts->index)
1008 fprintf (file, "index: ");
1009 print_generic_expr (file, parts->index, TDF_SLIM);
1010 fprintf (file, "\n");
1012 if (parts->step)
1014 fprintf (file, "step: ");
1015 print_generic_expr (file, parts->step, TDF_SLIM);
1016 fprintf (file, "\n");
1018 if (parts->offset)
1020 fprintf (file, "offset: ");
1021 print_generic_expr (file, parts->offset, TDF_SLIM);
1022 fprintf (file, "\n");
1026 #include "gt-tree-ssa-address.h"