clang-format: Enhance list of FOR_EACH macros
[official-gcc.git] / gcc / gimple-expr.c
blobe9c631d884d7faecebd63cefe4baa44ae19d72d5
1 /* Gimple decl, type, and expression support functions.
3 Copyright (C) 2007-2015 Free Software Foundation, Inc.
4 Contributed by Aldy Hernandez <aldyh@redhat.com>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "stringpool.h"
29 #include "gimple-ssa.h"
30 #include "fold-const.h"
31 #include "tree-eh.h"
32 #include "gimplify.h"
33 #include "stor-layout.h"
34 #include "demangle.h"
35 #include "hash-set.h"
36 #include "rtl.h"
38 /* ----- Type related ----- */
40 /* Return true if the conversion from INNER_TYPE to OUTER_TYPE is a
41 useless type conversion, otherwise return false.
43 This function implicitly defines the middle-end type system. With
44 the notion of 'a < b' meaning that useless_type_conversion_p (a, b)
45 holds and 'a > b' meaning that useless_type_conversion_p (b, a) holds,
46 the following invariants shall be fulfilled:
48 1) useless_type_conversion_p is transitive.
49 If a < b and b < c then a < c.
51 2) useless_type_conversion_p is not symmetric.
52 From a < b does not follow a > b.
54 3) Types define the available set of operations applicable to values.
55 A type conversion is useless if the operations for the target type
56 is a subset of the operations for the source type. For example
57 casts to void* are useless, casts from void* are not (void* can't
58 be dereferenced or offsetted, but copied, hence its set of operations
59 is a strict subset of that of all other data pointer types). Casts
60 to const T* are useless (can't be written to), casts from const T*
61 to T* are not. */
63 bool
64 useless_type_conversion_p (tree outer_type, tree inner_type)
66 /* Do the following before stripping toplevel qualifiers. */
67 if (POINTER_TYPE_P (inner_type)
68 && POINTER_TYPE_P (outer_type))
70 /* Do not lose casts between pointers to different address spaces. */
71 if (TYPE_ADDR_SPACE (TREE_TYPE (outer_type))
72 != TYPE_ADDR_SPACE (TREE_TYPE (inner_type)))
73 return false;
74 /* Do not lose casts to function pointer types. */
75 if ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE
76 || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE)
77 && !(TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE
78 || TREE_CODE (TREE_TYPE (inner_type)) == METHOD_TYPE))
79 return false;
82 /* From now on qualifiers on value types do not matter. */
83 inner_type = TYPE_MAIN_VARIANT (inner_type);
84 outer_type = TYPE_MAIN_VARIANT (outer_type);
86 if (inner_type == outer_type)
87 return true;
89 /* Changes in machine mode are never useless conversions because the RTL
90 middle-end expects explicit conversions between modes. */
91 if (TYPE_MODE (inner_type) != TYPE_MODE (outer_type))
92 return false;
94 /* If both the inner and outer types are integral types, then the
95 conversion is not necessary if they have the same mode and
96 signedness and precision, and both or neither are boolean. */
97 if (INTEGRAL_TYPE_P (inner_type)
98 && INTEGRAL_TYPE_P (outer_type))
100 /* Preserve changes in signedness or precision. */
101 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
102 || TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
103 return false;
105 /* Preserve conversions to/from BOOLEAN_TYPE if types are not
106 of precision one. */
107 if (((TREE_CODE (inner_type) == BOOLEAN_TYPE)
108 != (TREE_CODE (outer_type) == BOOLEAN_TYPE))
109 && TYPE_PRECISION (outer_type) != 1)
110 return false;
112 /* We don't need to preserve changes in the types minimum or
113 maximum value in general as these do not generate code
114 unless the types precisions are different. */
115 return true;
118 /* Scalar floating point types with the same mode are compatible. */
119 else if (SCALAR_FLOAT_TYPE_P (inner_type)
120 && SCALAR_FLOAT_TYPE_P (outer_type))
121 return true;
123 /* Fixed point types with the same mode are compatible. */
124 else if (FIXED_POINT_TYPE_P (inner_type)
125 && FIXED_POINT_TYPE_P (outer_type))
126 return true;
128 /* We need to take special care recursing to pointed-to types. */
129 else if (POINTER_TYPE_P (inner_type)
130 && POINTER_TYPE_P (outer_type))
132 /* We do not care for const qualification of the pointed-to types
133 as const qualification has no semantic value to the middle-end. */
135 /* Otherwise pointers/references are equivalent. */
136 return true;
139 /* Recurse for complex types. */
140 else if (TREE_CODE (inner_type) == COMPLEX_TYPE
141 && TREE_CODE (outer_type) == COMPLEX_TYPE)
142 return useless_type_conversion_p (TREE_TYPE (outer_type),
143 TREE_TYPE (inner_type));
145 /* Recurse for vector types with the same number of subparts. */
146 else if (TREE_CODE (inner_type) == VECTOR_TYPE
147 && TREE_CODE (outer_type) == VECTOR_TYPE
148 && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
149 return useless_type_conversion_p (TREE_TYPE (outer_type),
150 TREE_TYPE (inner_type));
152 else if (TREE_CODE (inner_type) == ARRAY_TYPE
153 && TREE_CODE (outer_type) == ARRAY_TYPE)
155 /* Preserve various attributes. */
156 if (TYPE_REVERSE_STORAGE_ORDER (inner_type)
157 != TYPE_REVERSE_STORAGE_ORDER (outer_type))
158 return false;
159 if (TYPE_STRING_FLAG (inner_type) != TYPE_STRING_FLAG (outer_type))
160 return false;
162 /* Conversions from array types with unknown extent to
163 array types with known extent are not useless. */
164 if (!TYPE_DOMAIN (inner_type) && TYPE_DOMAIN (outer_type))
165 return false;
167 /* Nor are conversions from array types with non-constant size to
168 array types with constant size or to different size. */
169 if (TYPE_SIZE (outer_type)
170 && TREE_CODE (TYPE_SIZE (outer_type)) == INTEGER_CST
171 && (!TYPE_SIZE (inner_type)
172 || TREE_CODE (TYPE_SIZE (inner_type)) != INTEGER_CST
173 || !tree_int_cst_equal (TYPE_SIZE (outer_type),
174 TYPE_SIZE (inner_type))))
175 return false;
177 /* Check conversions between arrays with partially known extents.
178 If the array min/max values are constant they have to match.
179 Otherwise allow conversions to unknown and variable extents.
180 In particular this declares conversions that may change the
181 mode to BLKmode as useless. */
182 if (TYPE_DOMAIN (inner_type)
183 && TYPE_DOMAIN (outer_type)
184 && TYPE_DOMAIN (inner_type) != TYPE_DOMAIN (outer_type))
186 tree inner_min = TYPE_MIN_VALUE (TYPE_DOMAIN (inner_type));
187 tree outer_min = TYPE_MIN_VALUE (TYPE_DOMAIN (outer_type));
188 tree inner_max = TYPE_MAX_VALUE (TYPE_DOMAIN (inner_type));
189 tree outer_max = TYPE_MAX_VALUE (TYPE_DOMAIN (outer_type));
191 /* After gimplification a variable min/max value carries no
192 additional information compared to a NULL value. All that
193 matters has been lowered to be part of the IL. */
194 if (inner_min && TREE_CODE (inner_min) != INTEGER_CST)
195 inner_min = NULL_TREE;
196 if (outer_min && TREE_CODE (outer_min) != INTEGER_CST)
197 outer_min = NULL_TREE;
198 if (inner_max && TREE_CODE (inner_max) != INTEGER_CST)
199 inner_max = NULL_TREE;
200 if (outer_max && TREE_CODE (outer_max) != INTEGER_CST)
201 outer_max = NULL_TREE;
203 /* Conversions NULL / variable <- cst are useless, but not
204 the other way around. */
205 if (outer_min
206 && (!inner_min
207 || !tree_int_cst_equal (inner_min, outer_min)))
208 return false;
209 if (outer_max
210 && (!inner_max
211 || !tree_int_cst_equal (inner_max, outer_max)))
212 return false;
215 /* Recurse on the element check. */
216 return useless_type_conversion_p (TREE_TYPE (outer_type),
217 TREE_TYPE (inner_type));
220 else if ((TREE_CODE (inner_type) == FUNCTION_TYPE
221 || TREE_CODE (inner_type) == METHOD_TYPE)
222 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
224 tree outer_parm, inner_parm;
226 /* If the return types are not compatible bail out. */
227 if (!useless_type_conversion_p (TREE_TYPE (outer_type),
228 TREE_TYPE (inner_type)))
229 return false;
231 /* Method types should belong to a compatible base class. */
232 if (TREE_CODE (inner_type) == METHOD_TYPE
233 && !useless_type_conversion_p (TYPE_METHOD_BASETYPE (outer_type),
234 TYPE_METHOD_BASETYPE (inner_type)))
235 return false;
237 /* A conversion to an unprototyped argument list is ok. */
238 if (!prototype_p (outer_type))
239 return true;
241 /* If the unqualified argument types are compatible the conversion
242 is useless. */
243 if (TYPE_ARG_TYPES (outer_type) == TYPE_ARG_TYPES (inner_type))
244 return true;
246 for (outer_parm = TYPE_ARG_TYPES (outer_type),
247 inner_parm = TYPE_ARG_TYPES (inner_type);
248 outer_parm && inner_parm;
249 outer_parm = TREE_CHAIN (outer_parm),
250 inner_parm = TREE_CHAIN (inner_parm))
251 if (!useless_type_conversion_p
252 (TYPE_MAIN_VARIANT (TREE_VALUE (outer_parm)),
253 TYPE_MAIN_VARIANT (TREE_VALUE (inner_parm))))
254 return false;
256 /* If there is a mismatch in the number of arguments the functions
257 are not compatible. */
258 if (outer_parm || inner_parm)
259 return false;
261 /* Defer to the target if necessary. */
262 if (TYPE_ATTRIBUTES (inner_type) || TYPE_ATTRIBUTES (outer_type))
263 return comp_type_attributes (outer_type, inner_type) != 0;
265 return true;
268 /* For aggregates we rely on TYPE_CANONICAL exclusively and require
269 explicit conversions for types involving to be structurally
270 compared types. */
271 else if (AGGREGATE_TYPE_P (inner_type)
272 && TREE_CODE (inner_type) == TREE_CODE (outer_type))
273 return TYPE_CANONICAL (inner_type)
274 && TYPE_CANONICAL (inner_type) == TYPE_CANONICAL (outer_type);
276 else if (TREE_CODE (inner_type) == OFFSET_TYPE
277 && TREE_CODE (outer_type) == OFFSET_TYPE)
278 return useless_type_conversion_p (TREE_TYPE (outer_type),
279 TREE_TYPE (inner_type))
280 && useless_type_conversion_p
281 (TYPE_OFFSET_BASETYPE (outer_type),
282 TYPE_OFFSET_BASETYPE (inner_type));
284 return false;
288 /* ----- Decl related ----- */
290 /* Set sequence SEQ to be the GIMPLE body for function FN. */
292 void
293 gimple_set_body (tree fndecl, gimple_seq seq)
295 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
296 if (fn == NULL)
298 /* If FNDECL still does not have a function structure associated
299 with it, then it does not make sense for it to receive a
300 GIMPLE body. */
301 gcc_assert (seq == NULL);
303 else
304 fn->gimple_body = seq;
308 /* Return the body of GIMPLE statements for function FN. After the
309 CFG pass, the function body doesn't exist anymore because it has
310 been split up into basic blocks. In this case, it returns
311 NULL. */
313 gimple_seq
314 gimple_body (tree fndecl)
316 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
317 return fn ? fn->gimple_body : NULL;
320 /* Return true when FNDECL has Gimple body either in unlowered
321 or CFG form. */
322 bool
323 gimple_has_body_p (tree fndecl)
325 struct function *fn = DECL_STRUCT_FUNCTION (fndecl);
326 return (gimple_body (fndecl) || (fn && fn->cfg));
329 /* Return a printable name for symbol DECL. */
331 const char *
332 gimple_decl_printable_name (tree decl, int verbosity)
334 if (!DECL_NAME (decl))
335 return NULL;
337 if (DECL_ASSEMBLER_NAME_SET_P (decl))
339 const char *str, *mangled_str;
340 int dmgl_opts = DMGL_NO_OPTS;
342 if (verbosity >= 2)
344 dmgl_opts = DMGL_VERBOSE
345 | DMGL_ANSI
346 | DMGL_GNU_V3
347 | DMGL_RET_POSTFIX;
348 if (TREE_CODE (decl) == FUNCTION_DECL)
349 dmgl_opts |= DMGL_PARAMS;
352 mangled_str = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
353 str = cplus_demangle_v3 (mangled_str, dmgl_opts);
354 return (str) ? str : mangled_str;
357 return IDENTIFIER_POINTER (DECL_NAME (decl));
361 /* Create a new VAR_DECL and copy information from VAR to it. */
363 tree
364 copy_var_decl (tree var, tree name, tree type)
366 tree copy = build_decl (DECL_SOURCE_LOCATION (var), VAR_DECL, name, type);
368 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (var);
369 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (var);
370 DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (var);
371 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (var);
372 DECL_IGNORED_P (copy) = DECL_IGNORED_P (var);
373 DECL_CONTEXT (copy) = DECL_CONTEXT (var);
374 TREE_NO_WARNING (copy) = TREE_NO_WARNING (var);
375 TREE_USED (copy) = 1;
376 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
377 DECL_ATTRIBUTES (copy) = DECL_ATTRIBUTES (var);
379 return copy;
382 /* Strip off a legitimate source ending from the input string NAME of
383 length LEN. Rather than having to know the names used by all of
384 our front ends, we strip off an ending of a period followed by
385 up to five characters. (Java uses ".class".) */
387 static inline void
388 remove_suffix (char *name, int len)
390 int i;
392 for (i = 2; i < 8 && len > i; i++)
394 if (name[len - i] == '.')
396 name[len - i] = '\0';
397 break;
402 /* Create a new temporary name with PREFIX. Return an identifier. */
404 static GTY(()) unsigned int tmp_var_id_num;
406 tree
407 create_tmp_var_name (const char *prefix)
409 char *tmp_name;
411 if (prefix)
413 char *preftmp = ASTRDUP (prefix);
415 remove_suffix (preftmp, strlen (preftmp));
416 clean_symbol_name (preftmp);
418 prefix = preftmp;
421 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
422 return get_identifier (tmp_name);
425 /* Create a new temporary variable declaration of type TYPE.
426 Do NOT push it into the current binding. */
428 tree
429 create_tmp_var_raw (tree type, const char *prefix)
431 tree tmp_var;
433 tmp_var = build_decl (input_location,
434 VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
435 type);
437 /* The variable was declared by the compiler. */
438 DECL_ARTIFICIAL (tmp_var) = 1;
439 /* And we don't want debug info for it. */
440 DECL_IGNORED_P (tmp_var) = 1;
442 /* Make the variable writable. */
443 TREE_READONLY (tmp_var) = 0;
445 DECL_EXTERNAL (tmp_var) = 0;
446 TREE_STATIC (tmp_var) = 0;
447 TREE_USED (tmp_var) = 1;
449 return tmp_var;
452 /* Create a new temporary variable declaration of type TYPE. DO push the
453 variable into the current binding. Further, assume that this is called
454 only from gimplification or optimization, at which point the creation of
455 certain types are bugs. */
457 tree
458 create_tmp_var (tree type, const char *prefix)
460 tree tmp_var;
462 /* We don't allow types that are addressable (meaning we can't make copies),
463 or incomplete. We also used to reject every variable size objects here,
464 but now support those for which a constant upper bound can be obtained.
465 The processing for variable sizes is performed in gimple_add_tmp_var,
466 point at which it really matters and possibly reached via paths not going
467 through this function, e.g. after direct calls to create_tmp_var_raw. */
468 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
470 tmp_var = create_tmp_var_raw (type, prefix);
471 gimple_add_tmp_var (tmp_var);
472 return tmp_var;
475 /* Create a new temporary variable declaration of type TYPE by calling
476 create_tmp_var and if TYPE is a vector or a complex number, mark the new
477 temporary as gimple register. */
479 tree
480 create_tmp_reg (tree type, const char *prefix)
482 tree tmp;
484 tmp = create_tmp_var (type, prefix);
485 if (TREE_CODE (type) == COMPLEX_TYPE
486 || TREE_CODE (type) == VECTOR_TYPE)
487 DECL_GIMPLE_REG_P (tmp) = 1;
489 return tmp;
492 /* Create a new temporary variable declaration of type TYPE by calling
493 create_tmp_var and if TYPE is a vector or a complex number, mark the new
494 temporary as gimple register. */
496 tree
497 create_tmp_reg_fn (struct function *fn, tree type, const char *prefix)
499 tree tmp;
501 tmp = create_tmp_var_raw (type, prefix);
502 gimple_add_tmp_var_fn (fn, tmp);
503 if (TREE_CODE (type) == COMPLEX_TYPE
504 || TREE_CODE (type) == VECTOR_TYPE)
505 DECL_GIMPLE_REG_P (tmp) = 1;
507 return tmp;
511 /* ----- Expression related ----- */
513 /* Extract the operands and code for expression EXPR into *SUBCODE_P,
514 *OP1_P, *OP2_P and *OP3_P respectively. */
516 void
517 extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
518 tree *op2_p, tree *op3_p)
520 enum gimple_rhs_class grhs_class;
522 *subcode_p = TREE_CODE (expr);
523 grhs_class = get_gimple_rhs_class (*subcode_p);
525 if (grhs_class == GIMPLE_TERNARY_RHS)
527 *op1_p = TREE_OPERAND (expr, 0);
528 *op2_p = TREE_OPERAND (expr, 1);
529 *op3_p = TREE_OPERAND (expr, 2);
531 else if (grhs_class == GIMPLE_BINARY_RHS)
533 *op1_p = TREE_OPERAND (expr, 0);
534 *op2_p = TREE_OPERAND (expr, 1);
535 *op3_p = NULL_TREE;
537 else if (grhs_class == GIMPLE_UNARY_RHS)
539 *op1_p = TREE_OPERAND (expr, 0);
540 *op2_p = NULL_TREE;
541 *op3_p = NULL_TREE;
543 else if (grhs_class == GIMPLE_SINGLE_RHS)
545 *op1_p = expr;
546 *op2_p = NULL_TREE;
547 *op3_p = NULL_TREE;
549 else
550 gcc_unreachable ();
553 /* Extract operands for a GIMPLE_COND statement out of COND_EXPR tree COND. */
555 void
556 gimple_cond_get_ops_from_tree (tree cond, enum tree_code *code_p,
557 tree *lhs_p, tree *rhs_p)
559 gcc_assert (COMPARISON_CLASS_P (cond)
560 || TREE_CODE (cond) == TRUTH_NOT_EXPR
561 || is_gimple_min_invariant (cond)
562 || SSA_VAR_P (cond));
564 extract_ops_from_tree (cond, code_p, lhs_p, rhs_p);
566 /* Canonicalize conditionals of the form 'if (!VAL)'. */
567 if (*code_p == TRUTH_NOT_EXPR)
569 *code_p = EQ_EXPR;
570 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
571 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
573 /* Canonicalize conditionals of the form 'if (VAL)' */
574 else if (TREE_CODE_CLASS (*code_p) != tcc_comparison)
576 *code_p = NE_EXPR;
577 gcc_assert (*lhs_p && *rhs_p == NULL_TREE);
578 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
582 /* Return true if T is a valid LHS for a GIMPLE assignment expression. */
584 bool
585 is_gimple_lvalue (tree t)
587 return (is_gimple_addressable (t)
588 || TREE_CODE (t) == WITH_SIZE_EXPR
589 /* These are complex lvalues, but don't have addresses, so they
590 go here. */
591 || TREE_CODE (t) == BIT_FIELD_REF);
594 /* Return true if T is a GIMPLE condition. */
596 bool
597 is_gimple_condexpr (tree t)
599 return (is_gimple_val (t) || (COMPARISON_CLASS_P (t)
600 && !tree_could_throw_p (t)
601 && is_gimple_val (TREE_OPERAND (t, 0))
602 && is_gimple_val (TREE_OPERAND (t, 1))));
605 /* Return true if T is a gimple address. */
607 bool
608 is_gimple_address (const_tree t)
610 tree op;
612 if (TREE_CODE (t) != ADDR_EXPR)
613 return false;
615 op = TREE_OPERAND (t, 0);
616 while (handled_component_p (op))
618 if ((TREE_CODE (op) == ARRAY_REF
619 || TREE_CODE (op) == ARRAY_RANGE_REF)
620 && !is_gimple_val (TREE_OPERAND (op, 1)))
621 return false;
623 op = TREE_OPERAND (op, 0);
626 if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
627 return true;
629 switch (TREE_CODE (op))
631 case PARM_DECL:
632 case RESULT_DECL:
633 case LABEL_DECL:
634 case FUNCTION_DECL:
635 case VAR_DECL:
636 case CONST_DECL:
637 return true;
639 default:
640 return false;
644 /* Return true if T is a gimple invariant address. */
646 bool
647 is_gimple_invariant_address (const_tree t)
649 const_tree op;
651 if (TREE_CODE (t) != ADDR_EXPR)
652 return false;
654 op = strip_invariant_refs (TREE_OPERAND (t, 0));
655 if (!op)
656 return false;
658 if (TREE_CODE (op) == MEM_REF)
660 const_tree op0 = TREE_OPERAND (op, 0);
661 return (TREE_CODE (op0) == ADDR_EXPR
662 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
663 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
666 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
669 /* Return true if T is a gimple invariant address at IPA level
670 (so addresses of variables on stack are not allowed). */
672 bool
673 is_gimple_ip_invariant_address (const_tree t)
675 const_tree op;
677 if (TREE_CODE (t) != ADDR_EXPR)
678 return false;
680 op = strip_invariant_refs (TREE_OPERAND (t, 0));
681 if (!op)
682 return false;
684 if (TREE_CODE (op) == MEM_REF)
686 const_tree op0 = TREE_OPERAND (op, 0);
687 return (TREE_CODE (op0) == ADDR_EXPR
688 && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
689 || decl_address_ip_invariant_p (TREE_OPERAND (op0, 0))));
692 return CONSTANT_CLASS_P (op) || decl_address_ip_invariant_p (op);
695 /* Return true if T is a GIMPLE minimal invariant. It's a restricted
696 form of function invariant. */
698 bool
699 is_gimple_min_invariant (const_tree t)
701 if (TREE_CODE (t) == ADDR_EXPR)
702 return is_gimple_invariant_address (t);
704 return is_gimple_constant (t);
707 /* Return true if T is a GIMPLE interprocedural invariant. It's a restricted
708 form of gimple minimal invariant. */
710 bool
711 is_gimple_ip_invariant (const_tree t)
713 if (TREE_CODE (t) == ADDR_EXPR)
714 return is_gimple_ip_invariant_address (t);
716 return is_gimple_constant (t);
719 /* Return true if T is a non-aggregate register variable. */
721 bool
722 is_gimple_reg (tree t)
724 if (virtual_operand_p (t))
725 return false;
727 if (TREE_CODE (t) == SSA_NAME)
728 return true;
730 if (!is_gimple_variable (t))
731 return false;
733 if (!is_gimple_reg_type (TREE_TYPE (t)))
734 return false;
736 /* A volatile decl is not acceptable because we can't reuse it as
737 needed. We need to copy it into a temp first. */
738 if (TREE_THIS_VOLATILE (t))
739 return false;
741 /* We define "registers" as things that can be renamed as needed,
742 which with our infrastructure does not apply to memory. */
743 if (needs_to_live_in_memory (t))
744 return false;
746 /* Hard register variables are an interesting case. For those that
747 are call-clobbered, we don't know where all the calls are, since
748 we don't (want to) take into account which operations will turn
749 into libcalls at the rtl level. For those that are call-saved,
750 we don't currently model the fact that calls may in fact change
751 global hard registers, nor do we examine ASM_CLOBBERS at the tree
752 level, and so miss variable changes that might imply. All around,
753 it seems safest to not do too much optimization with these at the
754 tree level at all. We'll have to rely on the rtl optimizers to
755 clean this up, as there we've got all the appropriate bits exposed. */
756 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
757 return false;
759 /* Complex and vector values must have been put into SSA-like form.
760 That is, no assignments to the individual components. */
761 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
762 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
763 return DECL_GIMPLE_REG_P (t);
765 return true;
769 /* Return true if T is a GIMPLE rvalue, i.e. an identifier or a constant. */
771 bool
772 is_gimple_val (tree t)
774 /* Make loads from volatiles and memory vars explicit. */
775 if (is_gimple_variable (t)
776 && is_gimple_reg_type (TREE_TYPE (t))
777 && !is_gimple_reg (t))
778 return false;
780 return (is_gimple_variable (t) || is_gimple_min_invariant (t));
783 /* Similarly, but accept hard registers as inputs to asm statements. */
785 bool
786 is_gimple_asm_val (tree t)
788 if (TREE_CODE (t) == VAR_DECL && DECL_HARD_REGISTER (t))
789 return true;
791 return is_gimple_val (t);
794 /* Return true if T is a GIMPLE minimal lvalue. */
796 bool
797 is_gimple_min_lval (tree t)
799 if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
800 return false;
801 return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
804 /* Return true if T is a valid function operand of a CALL_EXPR. */
806 bool
807 is_gimple_call_addr (tree t)
809 return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
812 /* Return true if T is a valid address operand of a MEM_REF. */
814 bool
815 is_gimple_mem_ref_addr (tree t)
817 return (is_gimple_reg (t)
818 || TREE_CODE (t) == INTEGER_CST
819 || (TREE_CODE (t) == ADDR_EXPR
820 && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
821 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
824 /* Hold trees marked addressable during expand. */
826 static hash_set<tree> *mark_addressable_queue;
828 /* Mark X as addressable or queue it up if called during expand. We
829 don't want to apply it immediately during expand because decls are
830 made addressable at that point due to RTL-only concerns, such as
831 uses of memcpy for block moves, and TREE_ADDRESSABLE changes
832 is_gimple_reg, which might make it seem like a variable that used
833 to be a gimple_reg shouldn't have been an SSA name. So we queue up
834 this flag setting and only apply it when we're done with GIMPLE and
835 only RTL issues matter. */
837 static void
838 mark_addressable_1 (tree x)
840 if (!currently_expanding_to_rtl)
842 TREE_ADDRESSABLE (x) = 1;
843 return;
846 if (!mark_addressable_queue)
847 mark_addressable_queue = new hash_set<tree>();
848 mark_addressable_queue->add (x);
851 /* Adaptor for mark_addressable_1 for use in hash_set traversal. */
853 bool
854 mark_addressable_2 (tree const &x, void * ATTRIBUTE_UNUSED = NULL)
856 mark_addressable_1 (x);
857 return false;
860 /* Mark all queued trees as addressable, and empty the queue. To be
861 called right after clearing CURRENTLY_EXPANDING_TO_RTL. */
863 void
864 flush_mark_addressable_queue ()
866 gcc_assert (!currently_expanding_to_rtl);
867 if (mark_addressable_queue)
869 mark_addressable_queue->traverse<void*, mark_addressable_2> (NULL);
870 delete mark_addressable_queue;
871 mark_addressable_queue = NULL;
875 /* Mark X addressable. Unlike the langhook we expect X to be in gimple
876 form and we don't do any syntax checking. */
878 void
879 mark_addressable (tree x)
881 while (handled_component_p (x))
882 x = TREE_OPERAND (x, 0);
883 if (TREE_CODE (x) == MEM_REF
884 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
885 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
886 if (TREE_CODE (x) != VAR_DECL
887 && TREE_CODE (x) != PARM_DECL
888 && TREE_CODE (x) != RESULT_DECL)
889 return;
890 mark_addressable_1 (x);
892 /* Also mark the artificial SSA_NAME that points to the partition of X. */
893 if (TREE_CODE (x) == VAR_DECL
894 && !DECL_EXTERNAL (x)
895 && !TREE_STATIC (x)
896 && cfun->gimple_df != NULL
897 && cfun->gimple_df->decls_to_pointers != NULL)
899 tree *namep = cfun->gimple_df->decls_to_pointers->get (x);
900 if (namep)
901 mark_addressable_1 (*namep);
905 /* Returns true iff T is a valid RHS for an assignment to a renamed
906 user -- or front-end generated artificial -- variable. */
908 bool
909 is_gimple_reg_rhs (tree t)
911 return get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS;
914 #include "gt-gimple-expr.h"