hppa: Fix bug in atomic_storedi_1 pattern
[official-gcc.git] / gcc / ipa-strub.cc
blob0ee063c9eddc442c0db53ca10934669068ed32d8
1 /* strub (stack scrubbing) support.
2 Copyright (C) 2021-2024 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <oliva@adacore.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "gimplify.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "gimple-iterator.h"
31 #include "gimplify-me.h"
32 #include "tree-into-ssa.h"
33 #include "tree-ssa.h"
34 #include "tree-cfg.h"
35 #include "cfghooks.h"
36 #include "cfgloop.h"
37 #include "cfgcleanup.h"
38 #include "tree-eh.h"
39 #include "except.h"
40 #include "builtins.h"
41 #include "attribs.h"
42 #include "tree-inline.h"
43 #include "cgraph.h"
44 #include "alloc-pool.h"
45 #include "symbol-summary.h"
46 #include "ipa-prop.h"
47 #include "ipa-fnsummary.h"
48 #include "gimple-fold.h"
49 #include "fold-const.h"
50 #include "gimple-walk.h"
51 #include "tree-dfa.h"
52 #include "langhooks.h"
53 #include "calls.h"
54 #include "vec.h"
55 #include "stor-layout.h"
56 #include "varasm.h"
57 #include "alias.h"
58 #include "diagnostic.h"
59 #include "intl.h"
60 #include "ipa-strub.h"
61 #include "symtab-thunks.h"
62 #include "attr-fnspec.h"
63 #include "target.h"
65 /* This file introduces two passes that, together, implement
66 machine-independent stack scrubbing, strub for short. It arranges
67 for stack frames that have strub enabled to be zeroed-out after
68 relinquishing control to a caller, whether by returning or by
69 propagating an exception. This admittedly unusual design decision
70 was driven by exception support (one needs a stack frame to be
71 active to propagate exceptions out of it), and it enabled an
72 implementation that is entirely machine-independent (no custom
73 epilogue code is required).
75 Strub modes can be selected for stack frames by attaching attribute
76 strub to functions or to variables (to their types, actually).
77 Different strub modes, with different implementation details, are
78 available, and they can be selected by an argument to the strub
79 attribute. When enabled by strub-enabled variables, whether by
80 accessing (as in reading from) statically-allocated ones, or by
81 introducing (as in declaring) automatically-allocated ones, a
82 suitable mode is selected automatically.
84 At-calls mode modifies the interface of a function, adding a stack
85 watermark argument, that callers use to clean up the stack frame of
86 the called function. Because of the interface change, it can only
87 be used when explicitly selected, or when a function is internal to
88 a translation unit. Strub-at-calls function types are distinct
89 from their original types (they're not modified in-place), and they
90 are not interchangeable with other function types.
92 Internal mode, in turn, does not modify the type or the interface
93 of a function. It is currently implemented by turning the function
94 into a wrapper, moving the function body to a separate wrapped
95 function, and scrubbing the wrapped body's stack in the wrapper.
96 Internal-strub function types are mostly interface-compatible with
97 other strub modes, namely callable (from strub functions, though
98 not strub-enabled) and disabled (not callable from strub
99 functions).
101 Always_inline functions can be strub functions, but they can only
102 be called from other strub functions, because strub functions must
103 never be inlined into non-strub functions. Internal and at-calls
104 modes are indistinguishable when it comes to always_inline
105 functions: they will necessarily be inlined into another strub
106 function, and will thus be integrated into the caller's stack
107 frame, whatever the mode. (Contrast with non-always_inline strub
108 functions: an at-calls function can be called from other strub
109 functions, ensuring no discontinuity in stack erasing, whereas an
110 internal-strub function can only be called from other strub
111 functions if it happens to be inlined, or if -fstrub=relaxed mode
112 is in effect (that's the default). In -fstrub=strict mode,
113 internal-strub functions are not callable from strub functions,
114 because the wrapper itself is not strubbed.
116 The implementation involves two simple-IPA passes. The earliest
117 one, strub-mode, assigns strub modes to functions. It needs to run
118 before any inlining, so that we can prevent inlining of strub
119 functions into non-strub functions. It notes explicit strub mode
120 requests, enables strub in response to strub variables and testing
121 options, and flags unsatisfiable requests.
123 Three possibilities of unsatisfiable requests come to mind: (a)
124 when a strub mode is explicitly selected, but the function uses
125 features that make it ineligible for that mode (e.g. at-calls rules
126 out calling __builtin_apply_args, because of the interface changes,
127 and internal mode rules out noclone or otherwise non-versionable
128 functions, non-default varargs, non-local or forced labels, and
129 functions with far too many arguments); (b) when some strub mode
130 must be enabled because of a strub variable, but the function is
131 not eligible or not viable for any mode; and (c) when
132 -fstrub=strict is enabled, and calls are found in strub functions
133 to functions that are not callable from strub contexts.
134 compute_strub_mode implements (a) and (b), and verify_strub
135 implements (c).
137 The second IPA pass modifies interfaces of at-calls-strub functions
138 and types, introduces strub calls in and around them. and splits
139 internal-strub functions. It is placed after early inlining, so
140 that even internal-strub functions get a chance of being inlined
141 into other strub functions, but before non-early inlining, so that
142 internal-strub wrapper functions still get a chance of inlining
143 after splitting.
145 Wrappers avoid duplicating the copying of large arguments again by
146 passing them by reference to the wrapped bodies. This involves
147 occasional SSA rewriting of address computations, because of the
148 additional indirection. Besides these changes, and the
149 introduction of the stack watermark parameter, wrappers and wrapped
150 functions cooperate to handle variable argument lists (performing
151 va_start in the wrapper, passing the list as an argument, and
152 replacing va_start calls in the wrapped body with va_copy), and
153 __builtin_apply_args (also called in the wrapper and passed to the
154 wrapped body as an argument).
156 Strub bodies (both internal-mode wrapped bodies, and at-calls
157 functions) always start by adjusting the watermark parameter, by
158 calling __builtin___strub_update. The compiler inserts them in the
159 main strub pass. Allocations of additional stack space for the
160 frame (__builtin_alloca) are also followed by watermark updates.
161 Stack space temporarily allocated to pass arguments to other
162 functions, released right after the call, is not regarded as part
163 of the frame. Around calls to them, i.e., in internal-mode
164 wrappers and at-calls callers (even calls through pointers), calls
165 to __builtin___strub_enter and __builtin___strub_leave are
166 inserted, the latter as a __finally block, so that it runs at
167 regular and exceptional exit paths. strub_enter only initializes
168 the stack watermark, and strub_leave is where the scrubbing takes
169 place, overwriting with zeros the stack space from the top of the
170 stack to the watermark.
172 These calls can be optimized in various cases. In
173 pass_ipa_strub::adjust_at_calls_call, for example, we enable
174 tail-calling and other optimized calls from one strub body to
175 another by passing on the watermark parameter. The builtins
176 themselves may undergo inline substitution during expansion,
177 dependign on optimization levels. This involves dealing with stack
178 red zones (when the builtins are called out-of-line, the red zone
179 cannot be used) and other ugly details related with inlining strub
180 bodies into other strub bodies (see expand_builtin_strub_update).
181 expand_builtin_strub_leave may even perform partial inline
182 substitution. */
184 /* Const and pure functions that gain a watermark parameter for strub purposes
185 are still regarded as such, which may cause the inline expansions of the
186 __strub builtins to malfunction. Ideally, attribute "fn spec" would enable
187 us to inform the backend about requirements and side effects of the call, but
188 call_fusage building in calls.c:expand_call does not even look at
189 attr_fnspec, so we resort to asm loads and updates to attain an equivalent
190 effect. Once expand_call gains the ability to issue extra memory uses and
191 clobbers based on pure/const function's fnspec, we can define this to 1. */
192 #define ATTR_FNSPEC_DECONST_WATERMARK 0
194 enum strub_mode {
195 /* This mode denotes a regular function, that does not require stack
196 scrubbing (strubbing). It may call any other functions, but if
197 it calls AT_CALLS (or WRAPPED) ones, strubbing logic is
198 automatically introduced around those calls (the latter, by
199 inlining INTERNAL wrappers). */
200 STRUB_DISABLED = 0,
202 /* This denotes a function whose signature is (to be) modified to
203 take an extra parameter, for stack use annotation, and its
204 callers must initialize and pass that argument, and perform the
205 strubbing. Functions that are explicitly marked with attribute
206 strub must have the mark visible wherever the function is,
207 including aliases, and overriders and overriding methods.
208 Functions that are implicitly marked for strubbing, for accessing
209 variables explicitly marked as such, will only select this
210 strubbing method if they are internal to a translation unit. It
211 can only be inlined into other strubbing functions, i.e.,
212 STRUB_AT_CALLS or STRUB_WRAPPED. */
213 STRUB_AT_CALLS = 1,
215 /* This denotes a function that is to perform strubbing internally,
216 without any changes to its interface (the function is turned into
217 a strubbing wrapper, and its original body is moved to a separate
218 STRUB_WRAPPED function, with a modified interface). Functions
219 may be explicitly marked with attribute strub(2), and the
220 attribute must be visible at the point of definition. Functions
221 that are explicitly marked for strubbing, for accessing variables
222 explicitly marked as such, may select this strubbing mode if
223 their interface cannot change, e.g. because its interface is
224 visible to other translation units, directly, by indirection
225 (having its address taken), inheritance, etc. Functions that use
226 this method must not have the noclone attribute, nor the noipa
227 one. Functions marked as always_inline may select this mode, but
228 they are NOT wrapped, they remain unchanged, and are only inlined
229 into strubbed contexts. Once non-always_inline functions are
230 wrapped, the wrapper becomes STRUB_WRAPPER, and the wrapped becomes
231 STRUB_WRAPPED. */
232 STRUB_INTERNAL = 2,
234 /* This denotes a function whose stack is not strubbed, but that is
235 nevertheless explicitly or implicitly marked as callable from strubbing
236 functions. Normally, only STRUB_AT_CALLS (and STRUB_INTERNAL ->
237 STRUB_WRAPPED) functions can be called from strubbing contexts (bodies of
238 STRUB_AT_CALLS, STRUB_INTERNAL and STRUB_WRAPPED functions), but attribute
239 strub(3) enables other functions to be (indirectly) called from these
240 contexts. Some builtins and internal functions may be implicitly marked as
241 STRUB_CALLABLE. */
242 STRUB_CALLABLE = 3,
244 /* This denotes the function that took over the body of a
245 STRUB_INTERNAL function. At first, it's only called by its
246 wrapper, but the wrapper may be inlined. The wrapped function,
247 in turn, can only be inlined into other functions whose stack
248 frames are strubbed, i.e., that are STRUB_WRAPPED or
249 STRUB_AT_CALLS. */
250 STRUB_WRAPPED = -1,
252 /* This denotes the wrapper function that replaced the STRUB_INTERNAL
253 function. This mode overrides the STRUB_INTERNAL mode at the time the
254 internal to-be-wrapped function becomes a wrapper, so that inlining logic
255 can tell one from the other. */
256 STRUB_WRAPPER = -2,
258 /* This denotes an always_inline function that requires strubbing. It can
259 only be called from, and inlined into, other strubbing contexts. */
260 STRUB_INLINABLE = -3,
262 /* This denotes a function that accesses strub variables, so it would call for
263 internal strubbing (whether or not it's eligible for that), but since
264 at-calls strubbing is viable, that's selected as an optimization. This
265 mode addresses the inconvenience that such functions may have different
266 modes selected depending on optimization flags, and get a different
267 callable status depending on that choice: if we assigned them
268 STRUB_AT_CALLS mode, they would be callable when optimizing, whereas
269 STRUB_INTERNAL would not be callable. */
270 STRUB_AT_CALLS_OPT = -4,
274 /* Look up a strub attribute in TYPE, and return it. */
276 static tree
277 get_strub_attr_from_type (tree type)
279 return lookup_attribute ("strub", TYPE_ATTRIBUTES (type));
282 /* Look up a strub attribute in DECL or in its type, and return it. */
284 static tree
285 get_strub_attr_from_decl (tree decl)
287 tree ret = lookup_attribute ("strub", DECL_ATTRIBUTES (decl));
288 if (ret)
289 return ret;
290 return get_strub_attr_from_type (TREE_TYPE (decl));
293 #define STRUB_ID_COUNT 8
294 #define STRUB_IDENT_COUNT 3
295 #define STRUB_TYPE_COUNT 5
297 #define STRUB_ID_BASE 0
298 #define STRUB_IDENT_BASE (STRUB_ID_BASE + STRUB_ID_COUNT)
299 #define STRUB_TYPE_BASE (STRUB_IDENT_BASE + STRUB_IDENT_COUNT)
300 #define STRUB_CACHE_SIZE (STRUB_TYPE_BASE + STRUB_TYPE_COUNT)
302 /* Keep the strub mode and temp identifiers and types from being GC'd. */
303 static GTY((deletable)) tree strub_cache[STRUB_CACHE_SIZE];
305 /* Define a function to cache identifier ID, to be used as a strub attribute
306 parameter for a strub mode named after NAME. */
307 #define DEF_STRUB_IDS(IDX, NAME, ID) \
308 static inline tree get_strub_mode_id_ ## NAME () { \
309 int idx = STRUB_ID_BASE + IDX; \
310 tree identifier = strub_cache[idx]; \
311 if (!identifier) \
312 strub_cache[idx] = identifier = get_identifier (ID); \
313 return identifier; \
315 /* Same as DEF_STRUB_IDS, but use the string expansion of NAME as ID. */
316 #define DEF_STRUB_ID(IDX, NAME) \
317 DEF_STRUB_IDS (IDX, NAME, #NAME)
319 /* Define functions for each of the strub mode identifiers.
320 Expose dashes rather than underscores. */
321 DEF_STRUB_ID (0, disabled)
322 DEF_STRUB_IDS (1, at_calls, "at-calls")
323 DEF_STRUB_ID (2, internal)
324 DEF_STRUB_ID (3, callable)
325 DEF_STRUB_ID (4, wrapped)
326 DEF_STRUB_ID (5, wrapper)
327 DEF_STRUB_ID (6, inlinable)
328 DEF_STRUB_IDS (7, at_calls_opt, "at-calls-opt")
330 /* Release the temporary macro names. */
331 #undef DEF_STRUB_IDS
332 #undef DEF_STRUB_ID
334 /* Return the identifier corresponding to strub MODE. */
336 static tree
337 get_strub_mode_attr_parm (enum strub_mode mode)
339 switch (mode)
341 case STRUB_DISABLED:
342 return get_strub_mode_id_disabled ();
344 case STRUB_AT_CALLS:
345 return get_strub_mode_id_at_calls ();
347 case STRUB_INTERNAL:
348 return get_strub_mode_id_internal ();
350 case STRUB_CALLABLE:
351 return get_strub_mode_id_callable ();
353 case STRUB_WRAPPED:
354 return get_strub_mode_id_wrapped ();
356 case STRUB_WRAPPER:
357 return get_strub_mode_id_wrapper ();
359 case STRUB_INLINABLE:
360 return get_strub_mode_id_inlinable ();
362 case STRUB_AT_CALLS_OPT:
363 return get_strub_mode_id_at_calls_opt ();
365 default:
366 gcc_unreachable ();
370 /* Return the parmeters (TREE_VALUE) for a strub attribute of MODE.
371 We know we use a single parameter, so we bypass the creation of a
372 tree list. */
374 static tree
375 get_strub_mode_attr_value (enum strub_mode mode)
377 return get_strub_mode_attr_parm (mode);
380 /* Determine whether ID is a well-formed strub mode-specifying attribute
381 parameter for a function (type). Only user-visible modes are accepted, and
382 ID must be non-NULL.
384 For unacceptable parms, return 0, otherwise a nonzero value as below.
386 If the parm enables strub, return positive, otherwise negative.
388 If the affected type must be a distinct, incompatible type,return an integer
389 of absolute value 2, otherwise 1. */
392 strub_validate_fn_attr_parm (tree id)
394 int ret;
395 const char *s = NULL;
396 size_t len = 0;
398 /* do NOT test for NULL. This is only to be called with non-NULL arguments.
399 We assume that the strub parameter applies to a function, because only
400 functions accept an explicit argument. If we accepted NULL, and we
401 happened to be called to verify the argument for a variable, our return
402 values would be wrong. */
403 if (TREE_CODE (id) == STRING_CST)
405 s = TREE_STRING_POINTER (id);
406 len = TREE_STRING_LENGTH (id) - 1;
408 else if (TREE_CODE (id) == IDENTIFIER_NODE)
410 s = IDENTIFIER_POINTER (id);
411 len = IDENTIFIER_LENGTH (id);
413 else
414 return 0;
416 enum strub_mode mode;
418 if (len != 8)
419 return 0;
421 switch (s[0])
423 case 'd':
424 mode = STRUB_DISABLED;
425 ret = -1;
426 break;
428 case 'a':
429 mode = STRUB_AT_CALLS;
430 ret = 2;
431 break;
433 case 'i':
434 mode = STRUB_INTERNAL;
435 ret = 1;
436 break;
438 case 'c':
439 mode = STRUB_CALLABLE;
440 ret = -2;
441 break;
443 default:
444 /* Other parms are for internal use only. */
445 return 0;
448 tree mode_id = get_strub_mode_attr_parm (mode);
450 if (TREE_CODE (id) == IDENTIFIER_NODE
451 ? id != mode_id
452 : strncmp (s, IDENTIFIER_POINTER (mode_id), len) != 0)
453 return 0;
455 return ret;
458 /* Return the strub mode from STRUB_ATTR. VAR_P should be TRUE if the attribute
459 is taken from a variable, rather than from a function, or a type thereof. */
461 static enum strub_mode
462 get_strub_mode_from_attr (tree strub_attr, bool var_p = false)
464 enum strub_mode mode = STRUB_DISABLED;
466 if (strub_attr)
468 if (!TREE_VALUE (strub_attr))
469 mode = !var_p ? STRUB_AT_CALLS : STRUB_INTERNAL;
470 else
472 gcc_checking_assert (!var_p);
473 tree id = TREE_VALUE (strub_attr);
474 if (TREE_CODE (id) == TREE_LIST)
475 id = TREE_VALUE (id);
476 const char *s = (TREE_CODE (id) == STRING_CST
477 ? TREE_STRING_POINTER (id)
478 : IDENTIFIER_POINTER (id));
479 size_t len = (TREE_CODE (id) == STRING_CST
480 ? TREE_STRING_LENGTH (id) - 1
481 : IDENTIFIER_LENGTH (id));
483 switch (len)
485 case 7:
486 switch (s[6])
488 case 'r':
489 mode = STRUB_WRAPPER;
490 break;
492 case 'd':
493 mode = STRUB_WRAPPED;
494 break;
496 default:
497 gcc_unreachable ();
499 break;
501 case 8:
502 switch (s[0])
504 case 'd':
505 mode = STRUB_DISABLED;
506 break;
508 case 'a':
509 mode = STRUB_AT_CALLS;
510 break;
512 case 'i':
513 mode = STRUB_INTERNAL;
514 break;
516 case 'c':
517 mode = STRUB_CALLABLE;
518 break;
520 default:
521 gcc_unreachable ();
523 break;
525 case 9:
526 mode = STRUB_INLINABLE;
527 break;
529 case 12:
530 mode = STRUB_AT_CALLS_OPT;
531 break;
533 default:
534 gcc_unreachable ();
537 gcc_checking_assert (TREE_CODE (id) == IDENTIFIER_NODE
538 ? id == get_strub_mode_attr_parm (mode)
539 : strncmp (IDENTIFIER_POINTER
540 (get_strub_mode_attr_parm (mode)),
541 s, len) == 0);
545 return mode;
548 /* Look up, decode and return the strub mode associated with FNDECL. */
550 static enum strub_mode
551 get_strub_mode_from_fndecl (tree fndecl)
553 return get_strub_mode_from_attr (get_strub_attr_from_decl (fndecl));
556 /* Look up, decode and return the strub mode associated with NODE. */
558 static enum strub_mode
559 get_strub_mode (cgraph_node *node)
561 return get_strub_mode_from_fndecl (node->decl);
564 /* Look up, decode and return the strub mode associated with TYPE. */
566 static enum strub_mode
567 get_strub_mode_from_type (tree type)
569 bool var_p = !FUNC_OR_METHOD_TYPE_P (type);
570 tree attr = get_strub_attr_from_type (type);
572 if (attr)
573 return get_strub_mode_from_attr (attr, var_p);
575 if (flag_strub >= -1 && !var_p)
576 return STRUB_CALLABLE;
578 return STRUB_DISABLED;
582 /* Return TRUE iff NODE calls builtin va_start. */
584 static bool
585 calls_builtin_va_start_p (cgraph_node *node)
587 bool result = false;
589 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
591 tree cdecl = e->callee->decl;
592 if (fndecl_built_in_p (cdecl, BUILT_IN_VA_START))
593 return true;
596 return result;
599 /* Return TRUE iff NODE calls builtin apply_args, and optionally REPORT it. */
601 static bool
602 calls_builtin_apply_args_p (cgraph_node *node, bool report = false)
604 bool result = false;
606 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
608 tree cdecl = e->callee->decl;
609 if (!fndecl_built_in_p (cdecl, BUILT_IN_APPLY_ARGS))
610 continue;
612 result = true;
614 if (!report)
615 break;
617 sorry_at (e->call_stmt
618 ? gimple_location (e->call_stmt)
619 : DECL_SOURCE_LOCATION (node->decl),
620 "at-calls %<strub%> does not support call to %qD",
621 cdecl);
624 return result;
627 /* Return TRUE iff NODE carries the always_inline attribute. */
629 static inline bool
630 strub_always_inline_p (cgraph_node *node)
632 return lookup_attribute ("always_inline", DECL_ATTRIBUTES (node->decl));
635 /* Return TRUE iff the target has strub support for T, a function
636 decl, or a type used in an indirect call, and optionally REPORT the
637 reasons for ineligibility. If T is a type and error REPORTing is
638 enabled, the LOCation (of the indirect call) should be provided. */
639 static inline bool
640 strub_target_support_p (tree t, bool report = false,
641 location_t loc = UNKNOWN_LOCATION)
643 bool result = true;
645 if (!targetm.have_strub_support_for (t))
647 result = false;
649 if (!report)
650 return result;
652 if (DECL_P (t))
653 sorry_at (DECL_SOURCE_LOCATION (t),
654 "%qD is not eligible for %<strub%>"
655 " on the target system", t);
656 else
657 sorry_at (loc,
658 "unsupported %<strub%> call"
659 " on the target system");
662 return result;
665 /* Return TRUE iff NODE is potentially eligible for any strub-enabled mode, and
666 optionally REPORT the reasons for ineligibility. */
668 static inline bool
669 can_strub_p (cgraph_node *node, bool report = false)
671 bool result = strub_target_support_p (node->decl, report);
673 if (!report && (!result || strub_always_inline_p (node)))
674 return result;
676 if (flag_split_stack)
678 result = false;
680 if (!report)
681 return result;
683 sorry_at (DECL_SOURCE_LOCATION (node->decl),
684 "%qD is not eligible for %<strub%>"
685 " because %<-fsplit-stack%> is enabled",
686 node->decl);
689 if (lookup_attribute ("noipa", DECL_ATTRIBUTES (node->decl)))
691 result = false;
693 if (!report)
694 return result;
696 sorry_at (DECL_SOURCE_LOCATION (node->decl),
697 "%qD is not eligible for %<strub%>"
698 " because of attribute %<noipa%>",
699 node->decl);
702 /* We can't, and don't want to vectorize the watermark and other
703 strub-introduced parms. */
704 if (lookup_attribute ("simd", DECL_ATTRIBUTES (node->decl)))
706 result = false;
708 if (!report)
709 return result;
711 sorry_at (DECL_SOURCE_LOCATION (node->decl),
712 "%qD is not eligible for %<strub%>"
713 " because of attribute %<simd%>",
714 node->decl);
717 return result;
720 /* Return TRUE iff NODE is eligible for at-calls strub, and optionally REPORT
721 the reasons for ineligibility. Besides general non-eligibility for
722 strub-enabled modes, at-calls rules out calling builtin apply_args. */
724 static bool
725 can_strub_at_calls_p (cgraph_node *node, bool report = false)
727 bool result = !report || can_strub_p (node, report);
729 if (!result && !report)
730 return result;
732 return !calls_builtin_apply_args_p (node, report);
735 /* Return TRUE iff the called function (pointer or, if available,
736 decl) undergoes a significant type conversion for the call. Strub
737 mode changes between function types, and other non-useless type
738 conversions, are regarded as significant. When the function type
739 is overridden, the effective strub mode for the call is that of the
740 call fntype, rather than that of the pointer or of the decl.
741 Functions called with type overrides cannot undergo type changes;
742 it's as if their address was taken, so they're considered
743 non-viable for implicit at-calls strub mode. */
745 static inline bool
746 strub_call_fntype_override_p (const gcall *gs)
748 if (gimple_call_internal_p (gs))
749 return false;
750 tree fn_type = TREE_TYPE (TREE_TYPE (gimple_call_fn (gs)));
751 if (tree decl = gimple_call_fndecl (gs))
752 fn_type = TREE_TYPE (decl);
754 /* We do NOT want to take the mode from the decl here. This
755 function is used to tell whether we can change the strub mode of
756 a function, and whether the effective mode for the call is to be
757 taken from the decl or from an overrider type. When the strub
758 mode is explicitly declared, or overridden with a type cast, the
759 difference will be noticed in function types. However, if the
760 strub mode is implicit due to e.g. strub variables or -fstrub=*
761 command-line flags, we will adjust call types along with function
762 types. In either case, the presence of type or strub mode
763 overriders in calls will prevent a function from having its strub
764 modes changed in ways that would imply type changes, but taking
765 strub modes from decls would defeat this, since we set strub
766 modes and then call this function to tell whether the original
767 type was overridden to decide whether to adjust the call. We
768 need the answer to be about the type, not the decl. */
769 enum strub_mode mode = get_strub_mode_from_type (fn_type);
770 return (get_strub_mode_from_type (gs->u.fntype) != mode
771 || !useless_type_conversion_p (gs->u.fntype, fn_type));
774 /* Return TRUE iff NODE is called directly with a type override. */
776 static bool
777 called_directly_with_type_override_p (cgraph_node *node, void *)
779 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
780 if (e->call_stmt && strub_call_fntype_override_p (e->call_stmt))
781 return true;
783 return false;
786 /* Return TRUE iff NODE or any other nodes aliased to it are called
787 with type overrides. We can't safely change the type of such
788 functions. */
790 static bool
791 called_with_type_override_p (cgraph_node *node)
793 return (node->call_for_symbol_thunks_and_aliases
794 (called_directly_with_type_override_p, NULL, true, true));
797 /* Symbolic macro for the max number of arguments that internal strub may add to
798 a function. */
800 #define STRUB_INTERNAL_MAX_EXTRA_ARGS 3
802 /* We can't perform internal strubbing if the function body involves certain
803 features:
805 - a non-default __builtin_va_start (e.g. x86's __builtin_ms_va_start) is
806 currently unsupported because we can't discover the corresponding va_copy and
807 va_end decls in the wrapper, and we don't convey the alternate variable
808 arguments ABI to the modified wrapped function. The default
809 __builtin_va_start is supported by calling va_start/va_end at the wrapper,
810 that takes variable arguments, passing a pointer to the va_list object to the
811 wrapped function, that runs va_copy from it where the original function ran
812 va_start.
814 __builtin_next_arg is currently unsupported because the wrapped function
815 won't be a variable argument function. We could process it in the wrapper,
816 that remains a variable argument function, and replace calls in the wrapped
817 body, but we currently don't.
819 __builtin_return_address is rejected because it's generally used when the
820 actual caller matters, and introducing a wrapper breaks such uses as those in
821 the unwinder. */
823 static bool
824 can_strub_internally_p (cgraph_node *node, bool report = false)
826 bool result = !report || can_strub_p (node, report);
828 if (!result && !report)
829 return result;
831 if (!report && strub_always_inline_p (node))
832 return result;
834 /* Since we're not changing the function identity proper, just
835 moving its full implementation, we *could* disable
836 fun->cannot_be_copied_reason and/or temporarily drop a noclone
837 attribute, but we'd have to prevent remapping of the labels. */
838 if (lookup_attribute ("noclone", DECL_ATTRIBUTES (node->decl)))
840 result = false;
842 if (!report)
843 return result;
845 sorry_at (DECL_SOURCE_LOCATION (node->decl),
846 "%qD is not eligible for internal %<strub%>"
847 " because of attribute %<noclone%>",
848 node->decl);
851 if (node->has_gimple_body_p ())
853 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
855 tree cdecl = e->callee->decl;
856 if (!((fndecl_built_in_p (cdecl, BUILT_IN_VA_START)
857 && cdecl != builtin_decl_explicit (BUILT_IN_VA_START))
858 || fndecl_built_in_p (cdecl, BUILT_IN_NEXT_ARG)
859 || fndecl_built_in_p (cdecl, BUILT_IN_RETURN_ADDRESS)))
860 continue;
862 result = false;
864 if (!report)
865 return result;
867 sorry_at (e->call_stmt
868 ? gimple_location (e->call_stmt)
869 : DECL_SOURCE_LOCATION (node->decl),
870 "%qD is not eligible for internal %<strub%> "
871 "because it calls %qD",
872 node->decl, cdecl);
875 struct function *fun = DECL_STRUCT_FUNCTION (node->decl);
876 if (fun->has_nonlocal_label)
878 result = false;
880 if (!report)
881 return result;
883 sorry_at (DECL_SOURCE_LOCATION (node->decl),
884 "%qD is not eligible for internal %<strub%> "
885 "because it contains a non-local goto target",
886 node->decl);
889 if (fun->has_forced_label_in_static)
891 result = false;
893 if (!report)
894 return result;
896 sorry_at (DECL_SOURCE_LOCATION (node->decl),
897 "%qD is not eligible for internal %<strub%> "
898 "because the address of a local label escapes",
899 node->decl);
902 /* Catch any other case that would prevent versioning/cloning
903 so as to also have it covered above. */
904 gcc_checking_assert (!result /* || !node->has_gimple_body_p () */
905 || tree_versionable_function_p (node->decl));
908 /* Label values references are not preserved when copying. If referenced
909 in nested functions, as in 920415-1.c and 920721-4.c their decls get
910 remapped independently. The exclusion below might be too broad, in
911 that we might be able to support correctly cases in which the labels
912 are only used internally in a function, but disconnecting forced labels
913 from their original declarations is undesirable in general. */
914 basic_block bb;
915 FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (node->decl))
916 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
917 !gsi_end_p (gsi); gsi_next (&gsi))
919 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
920 tree target;
922 if (!label_stmt)
923 break;
925 target = gimple_label_label (label_stmt);
927 if (!FORCED_LABEL (target))
928 continue;
930 result = false;
932 if (!report)
933 return result;
935 sorry_at (gimple_location (label_stmt),
936 "internal %<strub%> does not support forced labels");
940 if (list_length (TYPE_ARG_TYPES (TREE_TYPE (node->decl)))
941 >= (((HOST_WIDE_INT) 1 << IPA_PARAM_MAX_INDEX_BITS)
942 - STRUB_INTERNAL_MAX_EXTRA_ARGS))
944 result = false;
946 if (!report)
947 return result;
949 sorry_at (DECL_SOURCE_LOCATION (node->decl),
950 "%qD has too many arguments for internal %<strub%>",
951 node->decl);
954 return result;
957 /* Return TRUE iff NODE has any strub-requiring local variable, or accesses (as
958 in reading) any variable through a strub-requiring type. */
960 static bool
961 strub_from_body_p (cgraph_node *node)
963 if (!node->has_gimple_body_p ())
964 return false;
966 /* If any local variable is marked for strub... */
967 unsigned i;
968 tree var;
969 FOR_EACH_LOCAL_DECL (DECL_STRUCT_FUNCTION (node->decl),
970 i, var)
971 if (get_strub_mode_from_type (TREE_TYPE (var))
972 != STRUB_DISABLED)
973 return true;
975 /* Now scan the body for loads with strub-requiring types.
976 ??? Compound types don't propagate the strub requirement to
977 component types. */
978 basic_block bb;
979 FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (node->decl))
980 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
981 !gsi_end_p (gsi); gsi_next (&gsi))
983 gimple *stmt = gsi_stmt (gsi);
985 if (!gimple_assign_load_p (stmt))
986 continue;
988 tree rhs = gimple_assign_rhs1 (stmt);
989 if (get_strub_mode_from_type (TREE_TYPE (rhs))
990 != STRUB_DISABLED)
991 return true;
994 return false;
997 /* Return TRUE iff node is associated with a builtin that should be callable
998 from strub contexts. */
1000 static inline bool
1001 strub_callable_builtin_p (cgraph_node *node)
1003 if (DECL_BUILT_IN_CLASS (node->decl) != BUILT_IN_NORMAL)
1004 return false;
1006 enum built_in_function fcode = DECL_FUNCTION_CODE (node->decl);
1008 switch (fcode)
1010 case BUILT_IN_NONE:
1011 gcc_unreachable ();
1013 /* This temporarily allocates stack for the call, and we can't reasonably
1014 update the watermark for that. Besides, we don't check the actual call
1015 target, nor its signature, and it seems to be overkill to as much as
1016 try to do so. */
1017 case BUILT_IN_APPLY:
1018 return false;
1020 /* Conversely, this shouldn't be called from within strub contexts, since
1021 the caller may have had its signature modified. STRUB_INTERNAL is ok,
1022 the call will remain in the STRUB_WRAPPER, and removed from the
1023 STRUB_WRAPPED clone. */
1024 case BUILT_IN_APPLY_ARGS:
1025 return false;
1027 /* ??? Make all other builtins callable. We wish to make any builtin call
1028 the compiler might introduce on its own callable. Anything that is
1029 predictable enough as to be known not to allow stack data that should
1030 be strubbed to unintentionally escape to non-strub contexts can be
1031 allowed, and pretty much every builtin appears to fit this description.
1032 The exceptions to this rule seem to be rare, and only available as
1033 explicit __builtin calls, so let's keep it simple and allow all of
1034 them... */
1035 default:
1036 return true;
1040 /* Compute the strub mode to be used for NODE. STRUB_ATTR should be the strub
1041 attribute,found for NODE, if any. */
1043 static enum strub_mode
1044 compute_strub_mode (cgraph_node *node, tree strub_attr)
1046 enum strub_mode req_mode = get_strub_mode_from_attr (strub_attr);
1048 gcc_checking_assert (flag_strub >= -2 && flag_strub <= 3);
1050 /* Symbolic encodings of the -fstrub-* flags. */
1051 /* Enable strub when explicitly requested through attributes to functions or
1052 variables, reporting errors if the requests cannot be satisfied. */
1053 const bool strub_flag_auto = flag_strub < 0;
1054 /* strub_flag_auto with strub call verification; without this, functions are
1055 implicitly callable. */
1056 const bool strub_flag_strict = flag_strub < -1;
1057 /* Disable strub altogether, ignore attributes entirely. */
1058 const bool strub_flag_disabled = flag_strub == 0;
1059 /* On top of _auto, also enable strub implicitly for functions that can
1060 safely undergo at-calls strubbing. Internal mode will still be used in
1061 functions that request it explicitly with attribute strub(2), or when the
1062 function body requires strubbing and at-calls strubbing is not viable. */
1063 const bool strub_flag_at_calls = flag_strub == 1;
1064 /* On top of default, also enable strub implicitly for functions that can
1065 safely undergo internal strubbing. At-calls mode will still be used in
1066 functions that requiest it explicitly with attribute strub() or strub(1),
1067 or when the function body requires strubbing and internal strubbing is not
1068 viable. */
1069 const bool strub_flag_internal = flag_strub == 2;
1070 /* On top of default, also enable strub implicitly for functions that can
1071 safely undergo strubbing in either mode. When both modes are viable,
1072 at-calls is preferred. */
1073 const bool strub_flag_either = flag_strub == 3;
1074 /* Besides the default behavior, enable strub implicitly for all viable
1075 functions. */
1076 const bool strub_flag_viable = flag_strub > 0;
1078 /* The consider_* variables should be TRUE if selecting the corresponding
1079 strub modes would be consistent with requests from attributes and command
1080 line flags. Attributes associated with functions pretty much mandate a
1081 selection, and should report an error if not satisfied; strub_flag_auto
1082 implicitly enables some viable strub mode if that's required by references
1083 to variables marked for strub; strub_flag_viable enables strub if viable
1084 (even when favoring one mode, body-requested strub can still be satisfied
1085 by either mode), and falls back to callable, silently unless variables
1086 require strubbing. */
1088 const bool consider_at_calls
1089 = (!strub_flag_disabled
1090 && (strub_attr
1091 ? req_mode == STRUB_AT_CALLS
1092 : true));
1093 const bool consider_internal
1094 = (!strub_flag_disabled
1095 && (strub_attr
1096 ? req_mode == STRUB_INTERNAL
1097 : true));
1099 const bool consider_callable
1100 = (!strub_flag_disabled
1101 && (strub_attr
1102 ? req_mode == STRUB_CALLABLE
1103 : (!strub_flag_strict
1104 || strub_callable_builtin_p (node))));
1106 /* This is a shorthand for either strub-enabled mode. */
1107 const bool consider_strub
1108 = (consider_at_calls || consider_internal);
1110 /* We can cope with always_inline functions even with noipa and noclone,
1111 because we just leave them alone. */
1112 const bool is_always_inline
1113 = strub_always_inline_p (node);
1115 /* Strubbing in general, and each specific strub mode, may have its own set of
1116 requirements. We require noipa for strubbing, either because of cloning
1117 required for internal strub, or because of caller enumeration required for
1118 at-calls strub. We don't consider the at-calls mode eligible if it's not
1119 even considered, it has no further requirements. Internal mode requires
1120 cloning and the absence of certain features in the body and, like at-calls,
1121 it's not eligible if it's not even under consideration.
1123 ??? Do we need target hooks for further constraints? E.g., x86's
1124 "interrupt" attribute breaks internal strubbing because the wrapped clone
1125 carries the attribute and thus isn't callable; in this case, we could use a
1126 target hook to adjust the clone instead. */
1127 const bool strub_eligible
1128 = (consider_strub
1129 && (is_always_inline || can_strub_p (node)));
1130 const bool at_calls_eligible
1131 = (consider_at_calls && strub_eligible
1132 && can_strub_at_calls_p (node));
1133 const bool internal_eligible
1134 = (consider_internal && strub_eligible
1135 && (is_always_inline
1136 || can_strub_internally_p (node)));
1138 /* In addition to the strict eligibility requirements, some additional
1139 constraints are placed on implicit selection of certain modes. These do
1140 not prevent the selection of a mode if explicitly specified as part of a
1141 function interface (the strub attribute), but they may prevent modes from
1142 being selected by the command line or by function bodies. The only actual
1143 constraint is on at-calls mode: since we change the function's exposed
1144 signature, we won't do it implicitly if the function can possibly be used
1145 in ways that do not expect the signature change, e.g., if the function is
1146 available to or interposable by other units, if its address is taken,
1147 etc. */
1148 const bool at_calls_viable
1149 = (at_calls_eligible
1150 && (strub_attr
1151 || (node->has_gimple_body_p ()
1152 && (!node->externally_visible
1153 || (node->binds_to_current_def_p ()
1154 && node->can_be_local_p ()))
1155 && node->only_called_directly_p ()
1156 && !called_with_type_override_p (node))));
1157 const bool internal_viable
1158 = (internal_eligible);
1160 /* Shorthand. */
1161 const bool strub_viable
1162 = (at_calls_viable || internal_viable);
1164 /* We wish to analyze the body, to look for implicit requests for strub, both
1165 to implicitly enable it when the body calls for it, and to report errors if
1166 the body calls for it but neither mode is viable (even if that follows from
1167 non-eligibility because of the explicit specification of some non-strubbing
1168 mode). We can refrain from scanning the body only in rare circumstances:
1169 when strub is enabled by a function attribute (scanning might be redundant
1170 in telling us to also enable it), and when we are enabling strub implicitly
1171 but there are non-viable modes: we want to know whether strubbing is
1172 required, to fallback to another mode, even if we're only enabling a
1173 certain mode, or, when either mode would do, to report an error if neither
1174 happens to be viable. */
1175 const bool analyze_body
1176 = (strub_attr
1177 ? !consider_strub
1178 : (strub_flag_auto
1179 || (strub_flag_viable && (!at_calls_viable && !internal_viable))
1180 || (strub_flag_either && !strub_viable)));
1182 /* Cases in which strubbing is enabled or disabled by strub_flag_auto.
1183 Unsatisfiable requests ought to be reported. */
1184 const bool strub_required
1185 = ((strub_attr && consider_strub)
1186 || (analyze_body && strub_from_body_p (node)));
1188 /* Besides the required cases, we want to abide by the requests to enabling on
1189 an if-viable basis. */
1190 const bool strub_enable
1191 = (strub_required
1192 || (strub_flag_at_calls && at_calls_viable)
1193 || (strub_flag_internal && internal_viable)
1194 || (strub_flag_either && strub_viable));
1196 /* And now we're finally ready to select a mode that abides by the viability
1197 and eligibility constraints, and that satisfies the strubbing requirements
1198 and requests, subject to the constraints. If both modes are viable and
1199 strub is to be enabled, pick STRUB_AT_CALLS unless STRUB_INTERNAL was named
1200 as preferred. */
1201 const enum strub_mode mode
1202 = ((strub_enable && is_always_inline)
1203 ? (strub_required ? STRUB_INLINABLE : STRUB_CALLABLE)
1204 : (strub_enable && internal_viable
1205 && (strub_flag_internal || !at_calls_viable))
1206 ? STRUB_INTERNAL
1207 : (strub_enable && at_calls_viable)
1208 ? (strub_required && !strub_attr
1209 ? STRUB_AT_CALLS_OPT
1210 : STRUB_AT_CALLS)
1211 : consider_callable
1212 ? STRUB_CALLABLE
1213 : STRUB_DISABLED);
1215 switch (mode)
1217 case STRUB_CALLABLE:
1218 if (is_always_inline)
1219 break;
1220 /* Fall through. */
1222 case STRUB_DISABLED:
1223 if (strub_enable && !strub_attr)
1225 gcc_checking_assert (analyze_body);
1226 error_at (DECL_SOURCE_LOCATION (node->decl),
1227 "%qD requires %<strub%>,"
1228 " but no viable %<strub%> mode was found",
1229 node->decl);
1230 break;
1232 /* Fall through. */
1234 case STRUB_AT_CALLS:
1235 case STRUB_INTERNAL:
1236 case STRUB_INLINABLE:
1237 /* Differences from an mode requested through a function attribute are
1238 reported in set_strub_mode_to. */
1239 break;
1241 case STRUB_AT_CALLS_OPT:
1242 /* Functions that select this mode do so because of references to strub
1243 variables. Even if we choose at-calls as an optimization, the
1244 requirements for internal strub must still be satisfied. Optimization
1245 options may render implicit at-calls strub not viable (-O0 sets
1246 force_output for static non-inline functions), and it would not be good
1247 if changing optimization options turned a well-formed into an
1248 ill-formed one. */
1249 if (!internal_viable)
1250 can_strub_internally_p (node, true);
1251 break;
1253 case STRUB_WRAPPED:
1254 case STRUB_WRAPPER:
1255 default:
1256 gcc_unreachable ();
1259 return mode;
1262 /* Set FNDT's strub mode to MODE; FNDT may be a function decl or
1263 function type. If OVERRIDE, do not check whether a mode is already
1264 set. */
1266 static void
1267 strub_set_fndt_mode_to (tree fndt, enum strub_mode mode, bool override)
1269 gcc_checking_assert (override
1270 || !(DECL_P (fndt)
1271 ? get_strub_attr_from_decl (fndt)
1272 : get_strub_attr_from_type (fndt)));
1274 tree attr = tree_cons (get_identifier ("strub"),
1275 get_strub_mode_attr_value (mode),
1276 NULL_TREE);
1277 tree *attrp = NULL;
1278 if (DECL_P (fndt))
1280 gcc_checking_assert (FUNC_OR_METHOD_TYPE_P (TREE_TYPE (fndt)));
1281 attrp = &DECL_ATTRIBUTES (fndt);
1283 else if (FUNC_OR_METHOD_TYPE_P (fndt))
1284 attrp = &TYPE_ATTRIBUTES (fndt);
1285 else
1286 gcc_unreachable ();
1288 TREE_CHAIN (attr) = *attrp;
1289 *attrp = attr;
1292 /* Set FNDT's strub mode to callable.
1293 FNDT may be a function decl or a function type. */
1295 void
1296 strub_make_callable (tree fndt)
1298 strub_set_fndt_mode_to (fndt, STRUB_CALLABLE, false);
1301 /* Set NODE to strub MODE. Report incompatibilities between MODE and the mode
1302 requested through explicit attributes, and cases of non-eligibility. */
1304 static void
1305 set_strub_mode_to (cgraph_node *node, enum strub_mode mode)
1307 tree attr = get_strub_attr_from_decl (node->decl);
1308 enum strub_mode req_mode = get_strub_mode_from_attr (attr);
1310 if (attr)
1312 /* Check for and report incompatible mode changes. */
1313 if (mode != req_mode
1314 && !(req_mode == STRUB_INTERNAL
1315 && (mode == STRUB_WRAPPED
1316 || mode == STRUB_WRAPPER))
1317 && !((req_mode == STRUB_INTERNAL
1318 || req_mode == STRUB_AT_CALLS
1319 || req_mode == STRUB_CALLABLE)
1320 && mode == STRUB_INLINABLE))
1322 error_at (DECL_SOURCE_LOCATION (node->decl),
1323 "%<strub%> mode %qE selected for %qD, when %qE was requested",
1324 get_strub_mode_attr_parm (mode),
1325 node->decl,
1326 get_strub_mode_attr_parm (req_mode));
1327 if (node->alias)
1329 cgraph_node *target = node->ultimate_alias_target ();
1330 if (target != node)
1331 error_at (DECL_SOURCE_LOCATION (target->decl),
1332 "the incompatible selection was determined"
1333 " by ultimate alias target %qD",
1334 target->decl);
1337 /* Report any incompatibilities with explicitly-requested strub. */
1338 switch (req_mode)
1340 case STRUB_AT_CALLS:
1341 can_strub_at_calls_p (node, true);
1342 break;
1344 case STRUB_INTERNAL:
1345 can_strub_internally_p (node, true);
1346 break;
1348 default:
1349 break;
1353 /* Drop any incompatible strub attributes leading the decl attribute
1354 chain. Return if we find one with the mode we need. */
1355 for (;;)
1357 if (mode == req_mode)
1358 return;
1360 if (DECL_ATTRIBUTES (node->decl) != attr)
1361 break;
1363 DECL_ATTRIBUTES (node->decl) = TREE_CHAIN (attr);
1364 attr = get_strub_attr_from_decl (node->decl);
1365 if (!attr)
1366 break;
1368 req_mode = get_strub_mode_from_attr (attr);
1371 else if (mode == req_mode)
1372 return;
1374 strub_set_fndt_mode_to (node->decl, mode, attr);
1377 /* Compute and set NODE's strub mode. */
1379 static void
1380 set_strub_mode (cgraph_node *node)
1382 tree attr = get_strub_attr_from_decl (node->decl);
1384 if (attr)
1385 switch (get_strub_mode_from_attr (attr))
1387 /* These can't have been requested through user attributes, so we must
1388 have already gone through them. */
1389 case STRUB_WRAPPER:
1390 case STRUB_WRAPPED:
1391 case STRUB_INLINABLE:
1392 case STRUB_AT_CALLS_OPT:
1393 return;
1395 case STRUB_DISABLED:
1396 case STRUB_AT_CALLS:
1397 case STRUB_INTERNAL:
1398 case STRUB_CALLABLE:
1399 break;
1401 default:
1402 gcc_unreachable ();
1405 cgraph_node *xnode = node;
1406 if (node->alias)
1407 xnode = node->ultimate_alias_target ();
1408 /* Weakrefs may remain unresolved (the above will return node) if
1409 their targets are not defined, so make sure we compute a strub
1410 mode for them, instead of defaulting to STRUB_DISABLED and
1411 rendering them uncallable. */
1412 enum strub_mode mode = (xnode != node && !xnode->alias
1413 ? get_strub_mode (xnode)
1414 : compute_strub_mode (node, attr));
1416 set_strub_mode_to (node, mode);
1420 /* Non-strub functions shouldn't be called from within strub contexts,
1421 except through callable ones. Always inline strub functions can
1422 only be called from strub functions. */
1424 static bool
1425 strub_callable_from_p (strub_mode caller_mode, strub_mode callee_mode)
1427 switch (caller_mode)
1429 case STRUB_WRAPPED:
1430 case STRUB_AT_CALLS_OPT:
1431 case STRUB_AT_CALLS:
1432 case STRUB_INTERNAL:
1433 case STRUB_INLINABLE:
1434 break;
1436 case STRUB_WRAPPER:
1437 case STRUB_DISABLED:
1438 case STRUB_CALLABLE:
1439 return callee_mode != STRUB_INLINABLE;
1441 default:
1442 gcc_unreachable ();
1445 switch (callee_mode)
1447 case STRUB_WRAPPED:
1448 case STRUB_AT_CALLS:
1449 case STRUB_INLINABLE:
1450 break;
1452 case STRUB_AT_CALLS_OPT:
1453 case STRUB_INTERNAL:
1454 case STRUB_WRAPPER:
1455 return (flag_strub >= -1);
1457 case STRUB_DISABLED:
1458 return false;
1460 case STRUB_CALLABLE:
1461 break;
1463 default:
1464 gcc_unreachable ();
1467 return true;
1470 /* Return TRUE iff CALLEE can be inlined into CALLER. We wish to avoid inlining
1471 WRAPPED functions back into their WRAPPERs. More generally, we wish to avoid
1472 inlining strubbed functions into non-strubbed ones. CALLER doesn't have to
1473 be an immediate caller of CALLEE: the immediate caller may have already been
1474 cloned for inlining, and then CALLER may be further up the original call
1475 chain. ??? It would be nice if our own caller would retry inlining callee
1476 if caller gets inlined. */
1478 bool
1479 strub_inlinable_to_p (cgraph_node *callee, cgraph_node *caller)
1481 strub_mode callee_mode = get_strub_mode (callee);
1483 switch (callee_mode)
1485 case STRUB_WRAPPED:
1486 case STRUB_AT_CALLS:
1487 case STRUB_INTERNAL:
1488 case STRUB_INLINABLE:
1489 case STRUB_AT_CALLS_OPT:
1490 break;
1492 case STRUB_WRAPPER:
1493 case STRUB_DISABLED:
1494 case STRUB_CALLABLE:
1495 /* When we consider inlining, we've already verified callability, so we
1496 can even inline callable and then disabled into a strub context. That
1497 will get strubbed along with the context, so it's hopefully not a
1498 problem. */
1499 return true;
1501 default:
1502 gcc_unreachable ();
1505 strub_mode caller_mode = get_strub_mode (caller);
1507 switch (caller_mode)
1509 case STRUB_WRAPPED:
1510 case STRUB_AT_CALLS:
1511 case STRUB_INTERNAL:
1512 case STRUB_INLINABLE:
1513 case STRUB_AT_CALLS_OPT:
1514 return true;
1516 case STRUB_WRAPPER:
1517 case STRUB_DISABLED:
1518 case STRUB_CALLABLE:
1519 break;
1521 default:
1522 gcc_unreachable ();
1525 return false;
1528 /* Check that types T1 and T2 are strub-compatible. Return 1 if the strub modes
1529 are the same, 2 if they are interchangeable, and 0 otherwise. */
1532 strub_comptypes (tree t1, tree t2)
1534 if (TREE_CODE (t1) != TREE_CODE (t2))
1535 return 0;
1537 enum strub_mode m1 = get_strub_mode_from_type (t1);
1538 enum strub_mode m2 = get_strub_mode_from_type (t2);
1540 if (m1 == m2)
1541 return 1;
1543 /* We're dealing with types, so only strub modes that can be selected by
1544 attributes in the front end matter. If either mode is at-calls (for
1545 functions) or internal (for variables), the conversion is not
1546 compatible. */
1547 bool var_p = !FUNC_OR_METHOD_TYPE_P (t1);
1548 enum strub_mode mr = var_p ? STRUB_INTERNAL : STRUB_AT_CALLS;
1549 if (m1 == mr || m2 == mr)
1550 return 0;
1552 return 2;
1555 /* Return the effective strub mode used for CALL, and set *TYPEP to
1556 the effective type used for the call. The effective type and mode
1557 are those of the callee, unless the call involves a typecast. */
1559 static enum strub_mode
1560 effective_strub_mode_for_call (gcall *call, tree *typep)
1562 tree type;
1563 enum strub_mode mode;
1565 if (strub_call_fntype_override_p (call))
1567 type = gimple_call_fntype (call);
1568 mode = get_strub_mode_from_type (type);
1570 else
1572 type = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1573 tree decl = gimple_call_fndecl (call);
1574 if (decl)
1575 mode = get_strub_mode_from_fndecl (decl);
1576 else
1577 mode = get_strub_mode_from_type (type);
1580 if (typep)
1581 *typep = type;
1583 return mode;
1586 /* Create a distinct copy of the type of NODE's function, and change
1587 the fntype of all calls to it with the same main type to the new
1588 type. */
1590 static void
1591 distinctify_node_type (cgraph_node *node)
1593 tree old_type = TREE_TYPE (node->decl);
1594 tree new_type = build_distinct_type_copy (old_type);
1595 tree new_ptr_type = NULL_TREE;
1597 /* Remap any calls to node->decl that use old_type, or a variant
1598 thereof, to new_type as well. We don't look for aliases, their
1599 declarations will have their types changed independently, and
1600 we'll adjust their fntypes then. */
1601 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
1603 if (!e->call_stmt)
1604 continue;
1605 tree fnaddr = gimple_call_fn (e->call_stmt);
1606 gcc_checking_assert (TREE_CODE (fnaddr) == ADDR_EXPR
1607 && TREE_OPERAND (fnaddr, 0) == node->decl);
1608 if (strub_call_fntype_override_p (e->call_stmt))
1609 continue;
1610 if (!new_ptr_type)
1611 new_ptr_type = build_pointer_type (new_type);
1612 TREE_TYPE (fnaddr) = new_ptr_type;
1613 gimple_call_set_fntype (e->call_stmt, new_type);
1616 TREE_TYPE (node->decl) = new_type;
1619 /* Return TRUE iff TYPE and any variants have the same strub mode. */
1621 static bool
1622 same_strub_mode_in_variants_p (tree type)
1624 enum strub_mode mode = get_strub_mode_from_type (type);
1626 for (tree other = TYPE_MAIN_VARIANT (type);
1627 other != NULL_TREE; other = TYPE_NEXT_VARIANT (other))
1628 if (type != other && mode != get_strub_mode_from_type (other))
1629 return false;
1631 /* Check that the canonical type, if set, either is in the same
1632 variant chain, or has the same strub mode as type. Also check
1633 the variants of the canonical type. */
1634 if (TYPE_CANONICAL (type)
1635 && (TYPE_MAIN_VARIANT (TYPE_CANONICAL (type))
1636 != TYPE_MAIN_VARIANT (type)))
1638 if (mode != get_strub_mode_from_type (TYPE_CANONICAL (type)))
1639 return false;
1640 else
1641 return same_strub_mode_in_variants_p (TYPE_CANONICAL (type));
1644 return true;
1647 /* Check that strub functions don't call non-strub functions, and that
1648 always_inline strub functions are only called by strub
1649 functions. */
1651 static void
1652 verify_strub ()
1654 cgraph_node *node;
1656 /* It's expected that check strub-wise pointer type compatibility of variables
1657 and of functions is already taken care of by front-ends, on account of the
1658 attribute's being marked as affecting type identity and of the creation of
1659 distinct types. */
1661 /* Check that call targets in strub contexts have strub-callable types. */
1663 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
1665 enum strub_mode caller_mode = get_strub_mode (node);
1667 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
1669 gcc_checking_assert (e->indirect_unknown_callee);
1671 if (!e->call_stmt)
1672 continue;
1674 enum strub_mode callee_mode
1675 = effective_strub_mode_for_call (e->call_stmt, NULL);
1677 if (!strub_callable_from_p (caller_mode, callee_mode))
1678 error_at (gimple_location (e->call_stmt),
1679 "indirect non-%<strub%> call in %<strub%> context %qD",
1680 node->decl);
1683 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
1685 gcc_checking_assert (!e->indirect_unknown_callee);
1687 if (!e->call_stmt)
1688 continue;
1690 tree callee_fntype;
1691 enum strub_mode callee_mode
1692 = effective_strub_mode_for_call (e->call_stmt, &callee_fntype);
1694 if (!strub_callable_from_p (caller_mode, callee_mode))
1696 if (callee_mode == STRUB_INLINABLE)
1697 error_at (gimple_location (e->call_stmt),
1698 "calling %<always_inline%> %<strub%> %qD"
1699 " in non-%<strub%> context %qD",
1700 e->callee->decl, node->decl);
1701 else if (fndecl_built_in_p (e->callee->decl, BUILT_IN_APPLY_ARGS)
1702 && caller_mode == STRUB_INTERNAL)
1703 /* This is ok, it will be kept in the STRUB_WRAPPER, and removed
1704 from the STRUB_WRAPPED's strub context. */
1705 continue;
1706 else if (!strub_call_fntype_override_p (e->call_stmt))
1707 error_at (gimple_location (e->call_stmt),
1708 "calling non-%<strub%> %qD in %<strub%> context %qD",
1709 e->callee->decl, node->decl);
1710 else
1711 error_at (gimple_location (e->call_stmt),
1712 "calling %qD using non-%<strub%> type %qT"
1713 " in %<strub%> context %qD",
1714 e->callee->decl, callee_fntype, node->decl);
1720 namespace {
1722 /* Define a pass to compute strub modes. */
1723 const pass_data pass_data_ipa_strub_mode = {
1724 SIMPLE_IPA_PASS,
1725 "strubm",
1726 OPTGROUP_NONE,
1727 TV_NONE,
1728 PROP_cfg, // properties_required
1729 0, // properties_provided
1730 0, // properties_destroyed
1731 0, // properties_start
1732 0, // properties_finish
1735 class pass_ipa_strub_mode : public simple_ipa_opt_pass
1737 public:
1738 pass_ipa_strub_mode (gcc::context *ctxt)
1739 : simple_ipa_opt_pass (pass_data_ipa_strub_mode, ctxt)
1741 opt_pass *clone () { return new pass_ipa_strub_mode (m_ctxt); }
1742 virtual bool gate (function *) {
1743 /* In relaxed (-3) and strict (-4) settings, that only enable strub at a
1744 function or variable attribute's request, the attribute handler changes
1745 flag_strub to -1 or -2, respectively, if any strub-enabling occurence of
1746 the attribute is found. Therefore, if it remains at -3 or -4, nothing
1747 that would enable strub was found, so we can disable it and avoid the
1748 overhead. */
1749 if (flag_strub < -2)
1750 flag_strub = 0;
1751 return flag_strub;
1753 virtual unsigned int execute (function *);
1756 /* Define a pass to introduce strub transformations. */
1757 const pass_data pass_data_ipa_strub = {
1758 SIMPLE_IPA_PASS,
1759 "strub",
1760 OPTGROUP_NONE,
1761 TV_NONE,
1762 PROP_cfg | PROP_ssa, // properties_required
1763 0, // properties_provided
1764 0, // properties_destroyed
1765 0, // properties_start
1766 TODO_update_ssa
1767 | TODO_cleanup_cfg
1768 | TODO_rebuild_cgraph_edges
1769 | TODO_verify_il, // properties_finish
1772 class pass_ipa_strub : public simple_ipa_opt_pass
1774 public:
1775 pass_ipa_strub (gcc::context *ctxt)
1776 : simple_ipa_opt_pass (pass_data_ipa_strub, ctxt)
1778 opt_pass *clone () { return new pass_ipa_strub (m_ctxt); }
1779 virtual bool gate (function *) { return flag_strub && !seen_error (); }
1780 virtual unsigned int execute (function *);
1782 /* Define on demand and cache some types we use often. */
1783 #define DEF_TYPE(IDX, NAME, INIT) \
1784 static inline tree get_ ## NAME () { \
1785 int idx = STRUB_TYPE_BASE + IDX; \
1786 static tree type = strub_cache[idx]; \
1787 if (!type) \
1788 strub_cache[idx] = type = (INIT); \
1789 return type; \
1792 /* Use a distinct ptr_type_node to denote the watermark, so that we can
1793 recognize it in arg lists and avoid modifying types twice. */
1794 DEF_TYPE (0, wmt, build_variant_type_copy (ptr_type_node))
1796 DEF_TYPE (1, pwmt, build_reference_type (get_wmt ()))
1798 DEF_TYPE (2, qpwmt,
1799 build_qualified_type (get_pwmt (),
1800 TYPE_QUAL_RESTRICT
1801 /* | TYPE_QUAL_CONST */))
1803 DEF_TYPE (3, qptr,
1804 build_qualified_type (ptr_type_node,
1805 TYPE_QUAL_RESTRICT
1806 | TYPE_QUAL_CONST))
1808 DEF_TYPE (4, qpvalst,
1809 build_qualified_type (build_reference_type
1810 (va_list_type_node),
1811 TYPE_QUAL_RESTRICT
1812 /* | TYPE_QUAL_CONST */))
1814 #undef DEF_TYPE
1816 /* Define non-strub builtins on demand. */
1817 #define DEF_NM_BUILTIN(NAME, CODE, FNTYPELIST) \
1818 static tree get_ ## NAME () { \
1819 tree decl = builtin_decl_explicit (CODE); \
1820 if (!decl) \
1822 tree type = build_function_type_list FNTYPELIST; \
1823 decl = add_builtin_function \
1824 ("__builtin_" #NAME, \
1825 type, CODE, BUILT_IN_NORMAL, \
1826 NULL, NULL); \
1827 TREE_NOTHROW (decl) = true; \
1828 set_builtin_decl ((CODE), decl, true); \
1830 return decl; \
1833 DEF_NM_BUILTIN (stack_address,
1834 BUILT_IN_STACK_ADDRESS,
1835 (ptr_type_node, NULL))
1837 #undef DEF_NM_BUILTIN
1839 /* Define strub builtins on demand. */
1840 #define DEF_SS_BUILTIN(NAME, FNSPEC, CODE, FNTYPELIST) \
1841 static tree get_ ## NAME () { \
1842 tree decl = builtin_decl_explicit (CODE); \
1843 if (!decl) \
1845 tree type = build_function_type_list FNTYPELIST; \
1846 tree attrs = NULL; \
1847 if (FNSPEC) \
1848 attrs = tree_cons (get_identifier ("fn spec"), \
1849 build_tree_list \
1850 (NULL_TREE, \
1851 build_string (strlen (FNSPEC), \
1852 (FNSPEC))), \
1853 attrs); \
1854 decl = add_builtin_function_ext_scope \
1855 ("__builtin___strub_" #NAME, \
1856 type, CODE, BUILT_IN_NORMAL, \
1857 "__strub_" #NAME, attrs); \
1858 TREE_NOTHROW (decl) = true; \
1859 set_builtin_decl ((CODE), decl, true); \
1861 return decl; \
1864 DEF_SS_BUILTIN (enter, ". Ot",
1865 BUILT_IN___STRUB_ENTER,
1866 (void_type_node, get_qpwmt (), NULL))
1867 DEF_SS_BUILTIN (update, ". Wt",
1868 BUILT_IN___STRUB_UPDATE,
1869 (void_type_node, get_qpwmt (), NULL))
1870 DEF_SS_BUILTIN (leave, ". w ",
1871 BUILT_IN___STRUB_LEAVE,
1872 (void_type_node, get_qpwmt (), NULL))
1874 #undef DEF_SS_BUILTIN
1876 /* Define strub identifiers on demand. */
1877 #define DEF_IDENT(IDX, NAME) \
1878 static inline tree get_ ## NAME () { \
1879 int idx = STRUB_IDENT_BASE + IDX; \
1880 tree identifier = strub_cache[idx]; \
1881 if (!identifier) \
1882 strub_cache[idx] = identifier = get_identifier (".strub." #NAME); \
1883 return identifier; \
1886 DEF_IDENT (0, watermark_ptr)
1887 DEF_IDENT (1, va_list_ptr)
1888 DEF_IDENT (2, apply_args)
1890 #undef DEF_IDENT
1892 static inline int adjust_at_calls_type (tree);
1893 static inline void adjust_at_calls_call (cgraph_edge *, int, tree);
1894 static inline void adjust_at_calls_calls (cgraph_node *);
1896 /* Add to SEQ a call to the strub watermark update builtin, taking NODE's
1897 location if given. Optionally add the corresponding edge from NODE, with
1898 execution frequency COUNT. Return the modified SEQ. */
1900 static inline gimple_seq
1901 call_update_watermark (tree wmptr, cgraph_node *node, profile_count count,
1902 gimple_seq seq = NULL)
1904 tree uwm = get_update ();
1905 gcall *update = gimple_build_call (uwm, 1, wmptr);
1906 if (node)
1907 gimple_set_location (update, DECL_SOURCE_LOCATION (node->decl));
1908 gimple_seq_add_stmt (&seq, update);
1909 if (node)
1910 node->create_edge (cgraph_node::get_create (uwm), update, count, false);
1911 return seq;
1916 } // anon namespace
1918 /* Gather with this type a collection of parameters that we're turning into
1919 explicit references. */
1921 typedef hash_set<tree> indirect_parms_t;
1923 /* Dereference OP's incoming turned-into-reference parm if it's an
1924 INDIRECT_PARMS or an ADDR_EXPR thereof. Set *REC and return according to
1925 gimple-walking expectations. */
1927 static tree
1928 maybe_make_indirect (indirect_parms_t &indirect_parms, tree op, int *rec)
1930 if (DECL_P (op))
1932 *rec = 0;
1933 if (indirect_parms.contains (op))
1935 tree ret = gimple_fold_indirect_ref (op);
1936 if (!ret)
1937 ret = build2 (MEM_REF,
1938 TREE_TYPE (TREE_TYPE (op)),
1940 build_int_cst (TREE_TYPE (op), 0));
1941 return ret;
1944 else if (TREE_CODE (op) == ADDR_EXPR
1945 && DECL_P (TREE_OPERAND (op, 0)))
1947 *rec = 0;
1948 if (indirect_parms.contains (TREE_OPERAND (op, 0)))
1950 op = TREE_OPERAND (op, 0);
1951 return op;
1955 return NULL_TREE;
1958 /* A gimple-walking function that adds dereferencing to indirect parms. */
1960 static tree
1961 walk_make_indirect (tree *op, int *rec, void *arg)
1963 walk_stmt_info *wi = (walk_stmt_info *)arg;
1964 indirect_parms_t &indirect_parms = *(indirect_parms_t *)wi->info;
1966 if (!*op || TYPE_P (*op))
1968 *rec = 0;
1969 return NULL_TREE;
1972 if (tree repl = maybe_make_indirect (indirect_parms, *op, rec))
1974 *op = repl;
1975 wi->changed = true;
1978 return NULL_TREE;
1981 /* A gimple-walking function that turns any non-gimple-val ADDR_EXPRs into a
1982 separate SSA. Though addresses of e.g. parameters, and of members thereof,
1983 are gimple vals, turning parameters into references, with an extra layer of
1984 indirection and thus explicit dereferencing, need to be regimplified. */
1986 static tree
1987 walk_regimplify_addr_expr (tree *op, int *rec, void *arg)
1989 walk_stmt_info *wi = (walk_stmt_info *)arg;
1990 gimple_stmt_iterator &gsi = *(gimple_stmt_iterator *)wi->info;
1992 *rec = 0;
1994 if (!*op || TREE_CODE (*op) != ADDR_EXPR)
1995 return NULL_TREE;
1997 if (!is_gimple_val (*op))
1999 tree ret = force_gimple_operand_gsi (&gsi, *op, true,
2000 NULL_TREE, true, GSI_SAME_STMT);
2001 gcc_assert (ret != *op);
2002 *op = ret;
2003 wi->changed = true;
2006 return NULL_TREE;
2009 /* Turn STMT's PHI arg defs into separate SSA defs if they've become
2010 non-gimple_val. Return TRUE if any edge insertions need to be committed. */
2012 static bool
2013 walk_regimplify_phi (gphi *stmt)
2015 bool needs_commit = false;
2017 for (unsigned i = 0, n = gimple_phi_num_args (stmt); i < n; i++)
2019 tree op = gimple_phi_arg_def (stmt, i);
2020 if ((TREE_CODE (op) == ADDR_EXPR
2021 && !is_gimple_val (op))
2022 /* ??? A PARM_DECL that was addressable in the original function and
2023 had its address in PHI nodes, but that became a reference in the
2024 wrapped clone would NOT be updated by update_ssa in PHI nodes.
2025 Alas, if we were to create a default def for it now, update_ssa
2026 would complain that the symbol that needed rewriting already has
2027 SSA names associated with it. OTOH, leaving the PARM_DECL alone,
2028 it eventually causes errors because it remains unchanged in PHI
2029 nodes, but it gets rewritten as expected if it appears in other
2030 stmts. So we cheat a little here, and force the PARM_DECL out of
2031 the PHI node and into an assignment. It's a little expensive,
2032 because we insert it at the edge, which introduces a basic block
2033 that's entirely unnecessary, but it works, and the block will be
2034 removed as the default def gets propagated back into the PHI node,
2035 so the final optimized code looks just as expected. */
2036 || (TREE_CODE (op) == PARM_DECL
2037 && !TREE_ADDRESSABLE (op)))
2039 tree temp = make_ssa_name (TREE_TYPE (op), stmt);
2040 if (TREE_CODE (op) == PARM_DECL)
2041 SET_SSA_NAME_VAR_OR_IDENTIFIER (temp, DECL_NAME (op));
2042 SET_PHI_ARG_DEF (stmt, i, temp);
2044 gimple *assign = gimple_build_assign (temp, op);
2045 if (gimple_phi_arg_has_location (stmt, i))
2046 gimple_set_location (assign, gimple_phi_arg_location (stmt, i));
2047 gsi_insert_on_edge (gimple_phi_arg_edge (stmt, i), assign);
2048 needs_commit = true;
2052 return needs_commit;
2055 /* Create a reference type to use for PARM when turning it into a
2056 reference. */
2058 static tree
2059 build_ref_type_for (tree parm)
2061 gcc_checking_assert (TREE_CODE (parm) == PARM_DECL);
2063 tree ref_type = build_reference_type (TREE_TYPE (parm));
2065 return ref_type;
2068 /* Add cgraph edges from current_function_decl to callees in SEQ with frequency
2069 COUNT, assuming all calls in SEQ are direct. */
2071 static void
2072 add_call_edges_for_seq (gimple_seq seq, profile_count count)
2074 cgraph_node *node = cgraph_node::get_create (current_function_decl);
2076 for (gimple_stmt_iterator gsi = gsi_start (seq);
2077 !gsi_end_p (gsi); gsi_next (&gsi))
2079 gimple *stmt = gsi_stmt (gsi);
2081 gcall *call = dyn_cast <gcall *> (stmt);
2082 if (!call)
2083 continue;
2085 tree callee = gimple_call_fndecl (call);
2086 gcc_checking_assert (callee);
2087 node->create_edge (cgraph_node::get_create (callee), call, count, false);
2091 /* Insert SEQ after the call at GSI, as if the call was in a try block with SEQ
2092 as finally, i.e., SEQ will run after the call whether it returns or
2093 propagates an exception. This handles block splitting, EH edge and block
2094 creation, noreturn and nothrow optimizations, and even throwing calls without
2095 preexisting local handlers. */
2097 static void
2098 gsi_insert_finally_seq_after_call (gimple_stmt_iterator gsi, gimple_seq seq)
2100 if (!seq)
2101 return;
2103 gimple *stmt = gsi_stmt (gsi);
2105 if (gimple_has_location (stmt))
2106 annotate_all_with_location (seq, gimple_location (stmt));
2108 gcall *call = dyn_cast <gcall *> (stmt);
2109 bool noreturn_p = call && gimple_call_noreturn_p (call);
2110 int eh_lp = lookup_stmt_eh_lp (stmt);
2111 bool must_not_throw_p = eh_lp < 0;
2112 bool nothrow_p = (must_not_throw_p
2113 || (call && gimple_call_nothrow_p (call))
2114 || (eh_lp <= 0
2115 && (TREE_NOTHROW (cfun->decl)
2116 || !opt_for_fn (cfun->decl, flag_exceptions))));
2118 if (noreturn_p && nothrow_p)
2119 return;
2121 /* Don't expect an EH edge if we're not to throw, or if we're not in an EH
2122 region yet. */
2123 bool no_eh_edge_p = (nothrow_p || !eh_lp);
2124 bool must_end_bb = stmt_ends_bb_p (stmt);
2126 edge eft = NULL, eeh = NULL;
2127 if (must_end_bb && !(noreturn_p && no_eh_edge_p))
2129 gcc_checking_assert (gsi_one_before_end_p (gsi));
2131 edge e;
2132 edge_iterator ei;
2133 FOR_EACH_EDGE (e, ei, gsi_bb (gsi)->succs)
2135 if ((e->flags & EDGE_EH))
2137 gcc_checking_assert (!eeh);
2138 eeh = e;
2139 #if !CHECKING_P
2140 if (eft || noreturn_p)
2141 break;
2142 #endif
2144 if ((e->flags & EDGE_FALLTHRU))
2146 gcc_checking_assert (!eft);
2147 eft = e;
2148 #if !CHECKING_P
2149 if (eeh || no_eh_edge_p)
2150 break;
2151 #endif
2155 gcc_checking_assert (!(eft && (eft->flags & EDGE_FALLTHRU))
2156 == noreturn_p);
2157 gcc_checking_assert (!(eeh && (eeh->flags & EDGE_EH))
2158 == no_eh_edge_p);
2159 gcc_checking_assert (eft != eeh);
2162 if (!noreturn_p)
2164 gimple_seq nseq = nothrow_p ? seq : gimple_seq_copy (seq);
2166 if (must_end_bb)
2168 gcc_checking_assert (gsi_one_before_end_p (gsi));
2169 add_call_edges_for_seq (nseq, eft->count ());
2170 gsi_insert_seq_on_edge_immediate (eft, nseq);
2172 else
2174 add_call_edges_for_seq (nseq, gsi_bb (gsi)->count);
2175 gsi_insert_seq_after (&gsi, nseq, GSI_SAME_STMT);
2179 if (nothrow_p)
2180 return;
2182 if (eh_lp)
2184 add_call_edges_for_seq (seq, eeh->count ());
2185 gsi_insert_seq_on_edge_immediate (eeh, seq);
2186 return;
2189 /* A throwing call may appear within a basic block in a function that doesn't
2190 have any EH regions. We're going to add a cleanup if so, therefore the
2191 block will have to be split. */
2192 basic_block bb = gsi_bb (gsi);
2193 if (!gsi_one_before_end_p (gsi))
2194 split_block (bb, stmt);
2196 /* Create a new block for the EH cleanup. */
2197 basic_block bb_eh_cleanup = create_empty_bb (bb);
2198 if (dom_info_available_p (CDI_DOMINATORS))
2199 set_immediate_dominator (CDI_DOMINATORS, bb_eh_cleanup, bb);
2200 if (current_loops)
2201 add_bb_to_loop (bb_eh_cleanup, current_loops->tree_root);
2203 /* Make the new block an EH cleanup for the call. */
2204 eh_region new_r = gen_eh_region_cleanup (NULL);
2205 eh_landing_pad lp = gen_eh_landing_pad (new_r);
2206 tree label = gimple_block_label (bb_eh_cleanup);
2207 lp->post_landing_pad = label;
2208 EH_LANDING_PAD_NR (label) = lp->index;
2209 add_stmt_to_eh_lp (stmt, lp->index);
2211 /* Add the cleanup code to the EH cleanup block. */
2212 gsi = gsi_after_labels (bb_eh_cleanup);
2213 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
2215 /* And then propagate the exception further. */
2216 gresx *resx = gimple_build_resx (new_r->index);
2217 if (gimple_has_location (stmt))
2218 gimple_set_location (resx, gimple_location (stmt));
2219 gsi_insert_before (&gsi, resx, GSI_SAME_STMT);
2221 /* Finally, wire the EH cleanup block into the CFG. */
2222 edge neeh = make_eh_edge (stmt);
2223 neeh->probability = profile_probability::never ();
2224 gcc_checking_assert (neeh->dest == bb_eh_cleanup);
2225 gcc_checking_assert (!neeh->dest->count.initialized_p ());
2226 neeh->dest->count = neeh->count ();
2227 add_call_edges_for_seq (seq, neeh->dest->count);
2230 /* Copy the attribute list at *ATTRS, minus any NAME attributes, leaving
2231 shareable trailing nodes alone. */
2233 static inline void
2234 remove_named_attribute_unsharing (const char *name, tree *attrs)
2236 while (tree found = lookup_attribute (name, *attrs))
2238 /* Copy nodes up to the next NAME attribute. */
2239 while (*attrs != found)
2241 *attrs = tree_cons (TREE_PURPOSE (*attrs),
2242 TREE_VALUE (*attrs),
2243 TREE_CHAIN (*attrs));
2244 attrs = &TREE_CHAIN (*attrs);
2246 /* Then drop it. */
2247 gcc_checking_assert (*attrs == found);
2248 *attrs = TREE_CHAIN (*attrs);
2252 /* Record the order of the last cgraph entry whose mode we've already set, so
2253 that we can perform mode setting incrementally without duplication. */
2254 static int last_cgraph_order;
2256 /* Set strub modes for functions introduced since the last call. */
2258 static void
2259 ipa_strub_set_mode_for_new_functions ()
2261 if (symtab->order == last_cgraph_order)
2262 return;
2264 cgraph_node *node;
2266 /* Go through the functions twice, once over non-aliases, and then over
2267 aliases, so that aliases can reuse the mode computation of their ultimate
2268 targets. */
2269 for (int aliases = 0; aliases <= 1; aliases++)
2270 FOR_EACH_FUNCTION (node)
2272 if (!node->alias != !aliases)
2273 continue;
2275 /* Already done. */
2276 if (node->order < last_cgraph_order)
2277 continue;
2279 set_strub_mode (node);
2282 last_cgraph_order = symtab->order;
2285 /* Return FALSE if NODE is a strub context, and TRUE otherwise. */
2287 bool
2288 strub_splittable_p (cgraph_node *node)
2290 switch (get_strub_mode (node))
2292 case STRUB_WRAPPED:
2293 case STRUB_AT_CALLS:
2294 case STRUB_AT_CALLS_OPT:
2295 case STRUB_INLINABLE:
2296 case STRUB_INTERNAL:
2297 case STRUB_WRAPPER:
2298 return false;
2300 case STRUB_CALLABLE:
2301 case STRUB_DISABLED:
2302 break;
2304 default:
2305 gcc_unreachable ();
2308 return true;
2311 /* Return the PARM_DECL of the incoming watermark pointer, if there is one. */
2313 tree
2314 strub_watermark_parm (tree fndecl)
2316 switch (get_strub_mode_from_fndecl (fndecl))
2318 case STRUB_WRAPPED:
2319 case STRUB_AT_CALLS:
2320 case STRUB_AT_CALLS_OPT:
2321 break;
2323 case STRUB_INTERNAL:
2324 case STRUB_WRAPPER:
2325 case STRUB_CALLABLE:
2326 case STRUB_DISABLED:
2327 case STRUB_INLINABLE:
2328 return NULL_TREE;
2330 default:
2331 gcc_unreachable ();
2334 for (tree parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2335 /* The type (variant) compare finds the parameter even in a just-created
2336 clone, before we set its name, but the type-based compare doesn't work
2337 during builtin expansion within the lto compiler, because we'll have
2338 created a separate variant in that run. */
2339 if (TREE_TYPE (parm) == pass_ipa_strub::get_qpwmt ()
2340 || DECL_NAME (parm) == pass_ipa_strub::get_watermark_ptr ())
2341 return parm;
2343 gcc_unreachable ();
2346 /* Adjust a STRUB_AT_CALLS function TYPE, adding a watermark pointer if it
2347 hasn't been added yet. Return the named argument count. */
2350 pass_ipa_strub::adjust_at_calls_type (tree type)
2352 int named_args = 0;
2354 gcc_checking_assert (same_strub_mode_in_variants_p (type));
2356 if (!TYPE_ARG_TYPES (type))
2357 return named_args;
2359 tree *tlist = &TYPE_ARG_TYPES (type);
2360 tree qpwmptrt = get_qpwmt ();
2361 while (*tlist && TREE_VALUE (*tlist) != void_type_node)
2363 /* The type has already been adjusted. */
2364 if (TREE_VALUE (*tlist) == qpwmptrt)
2365 return named_args;
2366 named_args++;
2367 *tlist = tree_cons (TREE_PURPOSE (*tlist),
2368 TREE_VALUE (*tlist),
2369 TREE_CHAIN (*tlist));
2370 tlist = &TREE_CHAIN (*tlist);
2373 /* Add the new argument after all named arguments, so as to not mess with
2374 attributes that reference parameters. */
2375 *tlist = tree_cons (NULL_TREE, get_qpwmt (), *tlist);
2377 #if ATTR_FNSPEC_DECONST_WATERMARK
2378 if (!type_already_adjusted)
2380 int flags = flags_from_decl_or_type (type);
2381 tree fnspec = lookup_attribute ("fn spec", type);
2383 if ((flags & (ECF_CONST | ECF_PURE | ECF_NOVOPS)) || fnspec)
2385 size_t xargs = 1;
2386 size_t curlen = 0, tgtlen = 2 + 2 * (named_args + xargs);
2387 auto_vec<char> nspecv (tgtlen);
2388 char *nspec = &nspecv[0]; /* It will *not* be NUL-terminated! */
2389 if (fnspec)
2391 tree fnspecstr = TREE_VALUE (TREE_VALUE (fnspec));
2392 curlen = TREE_STRING_LENGTH (fnspecstr);
2393 memcpy (nspec, TREE_STRING_POINTER (fnspecstr), curlen);
2395 if (!curlen)
2397 nspec[curlen++] = '.';
2398 nspec[curlen++] = ((flags & ECF_CONST)
2399 ? 'c'
2400 : (flags & ECF_PURE)
2401 ? 'p'
2402 : ' ');
2404 while (curlen < tgtlen - 2 * xargs)
2406 nspec[curlen++] = '.';
2407 nspec[curlen++] = ' ';
2409 nspec[curlen++] = 'W';
2410 nspec[curlen++] = 't';
2412 /* The type has already been copied, if needed, before adding
2413 parameters. */
2414 TYPE_ATTRIBUTES (type)
2415 = tree_cons (get_identifier ("fn spec"),
2416 build_tree_list (NULL_TREE,
2417 build_string (tgtlen, nspec)),
2418 TYPE_ATTRIBUTES (type));
2421 #endif
2423 return named_args;
2426 /* Adjust a call to an at-calls call target. Create a watermark local variable
2427 if needed, initialize it before, pass it to the callee according to the
2428 modified at-calls interface, and release the callee's stack space after the
2429 call, if not deferred. If the call is const or pure, arrange for the
2430 watermark to not be assumed unused or unchanged. */
2432 void
2433 pass_ipa_strub::adjust_at_calls_call (cgraph_edge *e, int named_args,
2434 tree callee_fntype)
2436 gcc_checking_assert (e->call_stmt);
2437 gcall *ocall = e->call_stmt;
2438 gimple_stmt_iterator gsi = gsi_for_stmt (ocall);
2440 /* Make sure we haven't modified this call yet. */
2441 gcc_checking_assert (!(int (gimple_call_num_args (ocall)) > named_args
2442 && (TREE_TYPE (gimple_call_arg (ocall, named_args))
2443 == get_pwmt ())));
2445 tree tsup;
2446 if (!(tsup = gimple_call_fndecl (ocall)))
2447 tsup = TREE_TYPE (TREE_TYPE (gimple_call_fn (ocall)));
2448 if (!strub_target_support_p (tsup, true, gimple_location (ocall)))
2449 return;
2451 /* If we're already within a strub context, pass on the incoming watermark
2452 pointer, and omit the enter and leave calls around the modified call, as an
2453 optimization, or as a means to satisfy a tail-call requirement. */
2454 tree swmp = ((opt_for_fn (e->caller->decl, optimize_size)
2455 || opt_for_fn (e->caller->decl, optimize) > 2
2456 || gimple_call_must_tail_p (ocall)
2457 || (opt_for_fn (e->caller->decl, optimize) == 2
2458 && gimple_call_tail_p (ocall)))
2459 ? strub_watermark_parm (e->caller->decl)
2460 : NULL_TREE);
2461 bool omit_own_watermark = swmp;
2462 tree swm = NULL_TREE;
2463 if (!omit_own_watermark)
2465 swm = create_tmp_var (get_wmt (), ".strub.watermark");
2466 TREE_ADDRESSABLE (swm) = true;
2467 swmp = build1 (ADDR_EXPR, get_pwmt (), swm);
2469 /* Initialize the watermark before the call. */
2470 tree enter = get_enter ();
2471 gcall *stptr = gimple_build_call (enter, 1,
2472 unshare_expr (swmp));
2473 if (gimple_has_location (ocall))
2474 gimple_set_location (stptr, gimple_location (ocall));
2475 gsi_insert_before (&gsi, stptr, GSI_SAME_STMT);
2476 e->caller->create_edge (cgraph_node::get_create (enter),
2477 stptr, gsi_bb (gsi)->count, false);
2481 /* Replace the call with one that passes the swmp argument first. */
2482 gcall *wrcall;
2483 { gcall *stmt = ocall;
2484 // Mostly copied from gimple_call_copy_skip_args.
2485 int i = 0;
2486 int nargs = gimple_call_num_args (stmt);
2487 auto_vec<tree> vargs (MAX (nargs, named_args) + 1);
2488 gcall *new_stmt;
2490 /* pr71109.c calls a prototypeless function, then defines it with
2491 additional arguments. It's ill-formed, but after it's inlined,
2492 it somehow works out. */
2493 for (; i < named_args && i < nargs; i++)
2494 vargs.quick_push (gimple_call_arg (stmt, i));
2495 for (; i < named_args; i++)
2496 vargs.quick_push (null_pointer_node);
2498 vargs.quick_push (unshare_expr (swmp));
2500 for (; i < nargs; i++)
2501 vargs.quick_push (gimple_call_arg (stmt, i));
2503 if (gimple_call_internal_p (stmt))
2504 gcc_unreachable ();
2505 else
2506 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
2507 gimple_call_set_fntype (new_stmt, callee_fntype);
2509 if (gimple_call_lhs (stmt))
2510 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2512 gimple_move_vops (new_stmt, stmt);
2514 if (gimple_has_location (stmt))
2515 gimple_set_location (new_stmt, gimple_location (stmt));
2516 gimple_call_copy_flags (new_stmt, stmt);
2517 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2519 gimple_set_modified (new_stmt, true);
2521 wrcall = new_stmt;
2524 update_stmt (wrcall);
2525 gsi_replace (&gsi, wrcall, true);
2526 cgraph_edge::set_call_stmt (e, wrcall, false);
2528 /* Insert the strub code after the call. */
2529 gimple_seq seq = NULL;
2531 #if !ATTR_FNSPEC_DECONST_WATERMARK
2532 /* If the call will be assumed to not modify or even read the
2533 watermark, make it read and modified ourselves. */
2534 if ((gimple_call_flags (wrcall)
2535 & (ECF_CONST | ECF_PURE | ECF_NOVOPS)))
2537 if (!swm)
2538 swm = build2 (MEM_REF,
2539 TREE_TYPE (TREE_TYPE (swmp)),
2540 swmp,
2541 build_int_cst (TREE_TYPE (swmp), 0));
2543 vec<tree, va_gc> *inputs = NULL;
2544 vec<tree, va_gc> *outputs = NULL;
2545 vec_safe_push (outputs,
2546 build_tree_list
2547 (build_tree_list
2548 (NULL_TREE, build_string (2, "=m")),
2549 unshare_expr (swm)));
2550 vec_safe_push (inputs,
2551 build_tree_list
2552 (build_tree_list
2553 (NULL_TREE, build_string (1, "m")),
2554 unshare_expr (swm)));
2555 gasm *forcemod = gimple_build_asm_vec ("", inputs, outputs,
2556 NULL, NULL);
2557 gimple_seq_add_stmt (&seq, forcemod);
2559 /* If the call will be assumed to not even read the watermark,
2560 make sure it is already in memory before the call. */
2561 if ((gimple_call_flags (wrcall) & ECF_CONST))
2563 vec<tree, va_gc> *inputs = NULL;
2564 vec_safe_push (inputs,
2565 build_tree_list
2566 (build_tree_list
2567 (NULL_TREE, build_string (1, "m")),
2568 unshare_expr (swm)));
2569 gasm *force_store = gimple_build_asm_vec ("", inputs, NULL,
2570 NULL, NULL);
2571 if (gimple_has_location (wrcall))
2572 gimple_set_location (force_store, gimple_location (wrcall));
2573 gsi_insert_before (&gsi, force_store, GSI_SAME_STMT);
2576 #endif
2578 if (!omit_own_watermark)
2580 gcall *sleave = gimple_build_call (get_leave (), 1,
2581 unshare_expr (swmp));
2582 gimple_seq_add_stmt (&seq, sleave);
2584 gassign *clobber = gimple_build_assign (swm,
2585 build_clobber
2586 (TREE_TYPE (swm)));
2587 gimple_seq_add_stmt (&seq, clobber);
2590 gsi_insert_finally_seq_after_call (gsi, seq);
2593 /* Adjust all at-calls calls in NODE. */
2595 void
2596 pass_ipa_strub::adjust_at_calls_calls (cgraph_node *node)
2598 /* Adjust unknown-callee indirect calls with STRUB_AT_CALLS types within
2599 onode. */
2600 if (node->indirect_calls)
2602 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2603 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
2605 gcc_checking_assert (e->indirect_unknown_callee);
2607 if (!e->call_stmt)
2608 continue;
2610 tree callee_fntype;
2611 enum strub_mode callee_mode
2612 = effective_strub_mode_for_call (e->call_stmt, &callee_fntype);
2614 if (callee_mode != STRUB_AT_CALLS
2615 && callee_mode != STRUB_AT_CALLS_OPT)
2616 continue;
2618 int named_args = adjust_at_calls_type (callee_fntype);
2620 adjust_at_calls_call (e, named_args, callee_fntype);
2622 pop_cfun ();
2625 if (node->callees)
2627 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2628 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
2630 gcc_checking_assert (!e->indirect_unknown_callee);
2632 if (!e->call_stmt)
2633 continue;
2635 tree callee_fntype;
2636 enum strub_mode callee_mode
2637 = effective_strub_mode_for_call (e->call_stmt, &callee_fntype);
2639 if (callee_mode != STRUB_AT_CALLS
2640 && callee_mode != STRUB_AT_CALLS_OPT)
2641 continue;
2643 int named_args = adjust_at_calls_type (callee_fntype);
2645 adjust_at_calls_call (e, named_args, callee_fntype);
2647 pop_cfun ();
2651 /* The strubm (strub mode) pass computes a strub mode for each function in the
2652 call graph, and checks, before any inlining, that strub callability
2653 requirements in effect are satisfied. */
2655 unsigned int
2656 pass_ipa_strub_mode::execute (function *)
2658 last_cgraph_order = 0;
2659 ipa_strub_set_mode_for_new_functions ();
2661 /* Verify before any inlining or other transformations. */
2662 verify_strub ();
2664 return 0;
2667 /* Create a strub mode pass. */
2669 simple_ipa_opt_pass *
2670 make_pass_ipa_strub_mode (gcc::context *ctxt)
2672 return new pass_ipa_strub_mode (ctxt);
2675 /* The strub pass proper adjusts types, signatures, and at-calls calls, and
2676 splits internal-strub functions. */
2678 unsigned int
2679 pass_ipa_strub::execute (function *)
2681 cgraph_node *onode;
2683 ipa_strub_set_mode_for_new_functions ();
2685 /* First, adjust the signature of at-calls functions. We adjust types of
2686 at-calls functions first, so that we don't modify types in place unless
2687 strub is explicitly requested. */
2688 FOR_EACH_FUNCTION (onode)
2690 enum strub_mode mode = get_strub_mode (onode);
2692 if (mode == STRUB_AT_CALLS
2693 || mode == STRUB_AT_CALLS_OPT)
2695 /* Create a type variant if strubbing was not explicitly requested in
2696 the function type. */
2697 if (get_strub_mode_from_type (TREE_TYPE (onode->decl)) != mode)
2698 distinctify_node_type (onode);
2700 int named_args = adjust_at_calls_type (TREE_TYPE (onode->decl));
2702 /* An external function explicitly declared with strub won't have a
2703 body. Even with implicit at-calls strub, a function may have had its
2704 body removed after we selected the mode, and then we have nothing
2705 further to do. */
2706 if (!onode->has_gimple_body_p ())
2707 continue;
2709 tree *pargs = &DECL_ARGUMENTS (onode->decl);
2711 /* A noninterposable_alias reuses the same parm decl chain, don't add
2712 the parm twice. */
2713 bool aliased_parms = (onode->alias && *pargs
2714 && DECL_CONTEXT (*pargs) != onode->decl);
2716 if (aliased_parms)
2717 continue;
2719 for (int i = 0; i < named_args; i++)
2720 pargs = &DECL_CHAIN (*pargs);
2722 tree wmptr = build_decl (DECL_SOURCE_LOCATION (onode->decl),
2723 PARM_DECL,
2724 get_watermark_ptr (),
2725 get_qpwmt ());
2726 DECL_ARTIFICIAL (wmptr) = 1;
2727 DECL_ARG_TYPE (wmptr) = get_qpwmt ();
2728 DECL_CONTEXT (wmptr) = onode->decl;
2729 TREE_USED (wmptr) = 1;
2730 DECL_CHAIN (wmptr) = *pargs;
2731 *pargs = wmptr;
2733 if (onode->alias)
2734 continue;
2736 cgraph_node *nnode = onode;
2737 push_cfun (DECL_STRUCT_FUNCTION (nnode->decl));
2740 edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2741 gimple_seq seq = call_update_watermark (wmptr, nnode, e->src->count);
2742 gsi_insert_seq_on_edge_immediate (e, seq);
2745 if (DECL_STRUCT_FUNCTION (nnode->decl)->calls_alloca)
2747 basic_block bb;
2748 FOR_EACH_BB_FN (bb, cfun)
2749 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2750 !gsi_end_p (gsi); gsi_next (&gsi))
2752 gimple *stmt = gsi_stmt (gsi);
2754 gcall *call = dyn_cast <gcall *> (stmt);
2756 if (!call)
2757 continue;
2759 if (gimple_alloca_call_p (call))
2761 /* Capture stack growth. */
2762 gimple_seq seq = call_update_watermark (wmptr, NULL,
2763 gsi_bb (gsi)
2764 ->count);
2765 gsi_insert_finally_seq_after_call (gsi, seq);
2770 pop_cfun ();
2774 FOR_EACH_FUNCTION (onode)
2776 if (!onode->has_gimple_body_p ())
2777 continue;
2779 enum strub_mode mode = get_strub_mode (onode);
2781 if (mode != STRUB_INTERNAL)
2783 adjust_at_calls_calls (onode);
2784 continue;
2787 bool is_stdarg = calls_builtin_va_start_p (onode);;
2788 bool apply_args = calls_builtin_apply_args_p (onode);
2790 vec<ipa_adjusted_param, va_gc> *nparms = NULL;
2791 unsigned j = 0;
2793 // The following loop copied from ipa-split.c:split_function.
2794 for (tree parm = DECL_ARGUMENTS (onode->decl);
2795 parm; parm = DECL_CHAIN (parm), j++)
2797 ipa_adjusted_param adj = {};
2798 adj.op = IPA_PARAM_OP_COPY;
2799 adj.base_index = j;
2800 adj.prev_clone_index = j;
2801 vec_safe_push (nparms, adj);
2804 if (apply_args)
2806 ipa_adjusted_param aaadj = {};
2807 aaadj.op = IPA_PARAM_OP_NEW;
2808 aaadj.type = get_qptr ();
2809 vec_safe_push (nparms, aaadj);
2812 if (is_stdarg)
2814 ipa_adjusted_param vladj = {};
2815 vladj.op = IPA_PARAM_OP_NEW;
2816 vladj.type = get_qpvalst ();
2817 vec_safe_push (nparms, vladj);
2820 ipa_adjusted_param wmadj = {};
2821 wmadj.op = IPA_PARAM_OP_NEW;
2822 wmadj.type = get_qpwmt ();
2823 vec_safe_push (nparms, wmadj);
2825 ipa_param_adjustments adj (nparms, -1, false);
2827 cgraph_node *nnode = onode->create_version_clone_with_body
2828 (auto_vec<cgraph_edge *> (0),
2829 NULL, &adj, NULL, NULL, "strub", NULL);
2831 if (!nnode)
2833 error_at (DECL_SOURCE_LOCATION (onode->decl),
2834 "failed to split %qD for %<strub%>",
2835 onode->decl);
2836 continue;
2839 onode->split_part = true;
2840 if (onode->calls_comdat_local)
2841 nnode->add_to_same_comdat_group (onode);
2843 set_strub_mode_to (onode, STRUB_WRAPPER);
2844 set_strub_mode_to (nnode, STRUB_WRAPPED);
2846 adjust_at_calls_calls (nnode);
2848 /* Decide which of the wrapped function's parms we want to turn into
2849 references to the argument passed to the wrapper. In general, we want to
2850 copy small arguments, and avoid copying large ones. Variable-sized array
2851 lengths given by other arguments, as in 20020210-1.c, would lead to
2852 problems if passed by value, after resetting the original function and
2853 dropping the length computation; passing them by reference works.
2854 DECL_BY_REFERENCE is *not* a substitute for this: it involves copying
2855 anyway, but performed at the caller. */
2856 indirect_parms_t indirect_nparms (3, false);
2857 unsigned adjust_ftype = 0;
2858 unsigned named_args = 0;
2859 for (tree parm = DECL_ARGUMENTS (onode->decl),
2860 nparm = DECL_ARGUMENTS (nnode->decl),
2861 nparmt = TYPE_ARG_TYPES (TREE_TYPE (nnode->decl));
2862 parm;
2863 named_args++,
2864 parm = DECL_CHAIN (parm),
2865 nparm = DECL_CHAIN (nparm),
2866 nparmt = nparmt ? TREE_CHAIN (nparmt) : NULL_TREE)
2867 if (TREE_THIS_VOLATILE (parm)
2868 || !(0 /* DECL_BY_REFERENCE (narg) */
2869 || is_gimple_reg_type (TREE_TYPE (nparm))
2870 || VECTOR_TYPE_P (TREE_TYPE (nparm))
2871 || TREE_CODE (TREE_TYPE (nparm)) == COMPLEX_TYPE
2872 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (nparm)))
2873 && (tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (nparm)))
2874 <= 4 * UNITS_PER_WORD))))
2876 /* No point in indirecting pointer types. Presumably they
2877 won't ever pass the size-based test above, but check the
2878 assumption here, because getting this wrong would mess
2879 with attribute access and possibly others. We deal with
2880 fn spec below. */
2881 gcc_checking_assert (!POINTER_TYPE_P (TREE_TYPE (nparm)));
2883 indirect_nparms.add (nparm);
2885 /* ??? Is there any case in which it is not safe to suggest the parms
2886 turned indirect don't alias anything else? They are distinct,
2887 unaliased memory in the wrapper, and the wrapped can't possibly
2888 take pointers into them because none of the pointers passed to the
2889 wrapper can alias other incoming parameters passed by value, even
2890 if with transparent reference, and the wrapper doesn't take any
2891 extra parms that could point into wrapper's parms. So we can
2892 probably drop the TREE_ADDRESSABLE and keep the TRUE. */
2893 tree ref_type = build_ref_type_for (nparm);
2895 DECL_ARG_TYPE (nparm) = TREE_TYPE (nparm) = ref_type;
2896 relayout_decl (nparm);
2897 TREE_ADDRESSABLE (nparm) = 0;
2898 DECL_BY_REFERENCE (nparm) = 0;
2899 DECL_NOT_GIMPLE_REG_P (nparm) = 0;
2900 /* ??? This avoids mismatches in debug info bind stmts in
2901 e.g. a-chahan . */
2902 DECL_ABSTRACT_ORIGIN (nparm) = NULL;
2904 if (nparmt)
2905 adjust_ftype++;
2908 /* Also adjust the wrapped function type, if needed. */
2909 if (adjust_ftype)
2911 tree nftype = TREE_TYPE (nnode->decl);
2913 /* We always add at least one argument at the end of the signature, when
2914 cloning the function, so we don't expect to need to duplicate the
2915 type here. */
2916 gcc_checking_assert (TYPE_ARG_TYPES (nftype)
2917 != TYPE_ARG_TYPES (TREE_TYPE (onode->decl)));
2919 /* Check that fnspec still works for the modified function signature,
2920 and drop it otherwise. */
2921 bool drop_fnspec = false;
2922 tree fnspec = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (nftype));
2923 attr_fnspec spec = fnspec ? attr_fnspec (fnspec) : attr_fnspec ("");
2925 unsigned retcopy;
2926 if (!(fnspec && spec.returns_arg (&retcopy)))
2927 retcopy = (unsigned) -1;
2929 unsigned i = 0;
2930 for (tree nparm = DECL_ARGUMENTS (nnode->decl),
2931 nparmt = TYPE_ARG_TYPES (nftype);
2932 adjust_ftype > 0;
2933 i++, nparm = DECL_CHAIN (nparm), nparmt = TREE_CHAIN (nparmt))
2934 if (indirect_nparms.contains (nparm))
2936 TREE_VALUE (nparmt) = TREE_TYPE (nparm);
2937 adjust_ftype--;
2939 if (fnspec && !drop_fnspec)
2941 if (i == retcopy)
2942 drop_fnspec = true;
2943 else if (spec.arg_specified_p (i))
2945 /* Properties that apply to pointers only must not be
2946 present, because we don't make pointers further
2947 indirect. */
2948 gcc_checking_assert
2949 (!spec.arg_max_access_size_given_by_arg_p (i, NULL));
2950 gcc_checking_assert (!spec.arg_copied_to_arg_p (i, NULL));
2952 /* Any claim of direct access only is invalidated by
2953 adding an indirection level. */
2954 if (spec.arg_direct_p (i))
2955 drop_fnspec = true;
2957 /* If there's a claim the argument is not read from, the
2958 added indirection invalidates it: if the argument is
2959 used at all, then the pointer will necessarily be
2960 read. */
2961 if (!spec.arg_maybe_read_p (i)
2962 && spec.arg_used_p (i))
2963 drop_fnspec = true;
2968 /* ??? Maybe we could adjust it instead. Note we don't need
2969 to mess with attribute access: pointer-typed parameters are
2970 not modified, so they can remain unchanged. */
2971 if (drop_fnspec)
2972 remove_named_attribute_unsharing ("fn spec",
2973 &TYPE_ATTRIBUTES (nftype));
2975 TREE_TYPE (nnode->decl) = nftype;
2978 #if ATTR_FNSPEC_DECONST_WATERMARK
2980 int flags = flags_from_decl_or_type (nnode->decl);
2981 tree fnspec = lookup_attribute ("fn spec", TREE_TYPE (nnode->decl));
2983 if ((flags & (ECF_CONST | ECF_PURE | ECF_NOVOPS)) || fnspec)
2985 size_t xargs = 1 + int (is_stdarg) + int (apply_args);
2986 size_t curlen = 0, tgtlen = 2 + 2 * (named_args + xargs);
2987 auto_vec<char> nspecv (tgtlen);
2988 char *nspec = &nspecv[0]; /* It will *not* be NUL-terminated! */
2989 bool no_writes_p = true;
2990 if (fnspec)
2992 tree fnspecstr = TREE_VALUE (TREE_VALUE (fnspec));
2993 curlen = TREE_STRING_LENGTH (fnspecstr);
2994 memcpy (nspec, TREE_STRING_POINTER (fnspecstr), curlen);
2995 if (!(flags & (ECF_CONST | ECF_PURE | ECF_NOVOPS))
2996 && curlen >= 2
2997 && nspec[1] != 'c' && nspec[1] != 'C'
2998 && nspec[1] != 'p' && nspec[1] != 'P')
2999 no_writes_p = false;
3001 if (!curlen)
3003 nspec[curlen++] = '.';
3004 nspec[curlen++] = ((flags & ECF_CONST)
3005 ? 'c'
3006 : (flags & ECF_PURE)
3007 ? 'p'
3008 : ' ');
3010 while (curlen < tgtlen - 2 * xargs)
3012 nspec[curlen++] = '.';
3013 nspec[curlen++] = ' ';
3016 /* These extra args are unlikely to be present in const or pure
3017 functions. It's conceivable that a function that takes variable
3018 arguments, or that passes its arguments on to another function,
3019 could be const or pure, but it would not modify the arguments, and,
3020 being pure or const, it couldn't possibly modify or even access
3021 memory referenced by them. But it can read from these internal
3022 data structures created by the wrapper, and from any
3023 argument-passing memory referenced by them, so we denote the
3024 possibility of reading from multiple levels of indirection, but
3025 only of reading because const/pure. */
3026 if (apply_args)
3028 nspec[curlen++] = 'r';
3029 nspec[curlen++] = ' ';
3031 if (is_stdarg)
3033 nspec[curlen++] = (no_writes_p ? 'r' : '.');
3034 nspec[curlen++] = (no_writes_p ? 't' : ' ');
3037 nspec[curlen++] = 'W';
3038 nspec[curlen++] = 't';
3040 /* The type has already been copied before adding parameters. */
3041 gcc_checking_assert (TYPE_ARG_TYPES (TREE_TYPE (nnode->decl))
3042 != TYPE_ARG_TYPES (TREE_TYPE (onode->decl)));
3043 TYPE_ATTRIBUTES (TREE_TYPE (nnode->decl))
3044 = tree_cons (get_identifier ("fn spec"),
3045 build_tree_list (NULL_TREE,
3046 build_string (tgtlen, nspec)),
3047 TYPE_ATTRIBUTES (TREE_TYPE (nnode->decl)));
3050 #endif
3053 tree decl = onode->decl;
3054 cgraph_node *target = nnode;
3056 { // copied from create_wrapper
3058 /* Preserve DECL_RESULT so we get right by reference flag. */
3059 tree decl_result = DECL_RESULT (decl);
3061 /* Remove the function's body but keep arguments to be reused
3062 for thunk. */
3063 onode->release_body (true);
3064 onode->reset (/* unlike create_wrapper: preserve_comdat_group = */true);
3066 DECL_UNINLINABLE (decl) = false;
3067 DECL_RESULT (decl) = decl_result;
3068 DECL_INITIAL (decl) = NULL;
3069 allocate_struct_function (decl, false);
3070 set_cfun (NULL);
3072 /* Turn alias into thunk and expand it into GIMPLE representation. */
3073 onode->definition = true;
3075 thunk_info::get_create (onode);
3076 onode->thunk = true;
3077 onode->create_edge (target, NULL, onode->count);
3078 onode->callees->can_throw_external = !TREE_NOTHROW (target->decl);
3080 tree arguments = DECL_ARGUMENTS (decl);
3082 while (arguments)
3084 TREE_ADDRESSABLE (arguments) = false;
3085 arguments = TREE_CHAIN (arguments);
3089 tree alias = onode->callees->callee->decl;
3090 tree thunk_fndecl = decl;
3091 tree a;
3093 int nxargs = 1 + is_stdarg + apply_args;
3095 { // Simplified from expand_thunk.
3096 tree restype;
3097 basic_block bb, then_bb, else_bb, return_bb;
3098 gimple_stmt_iterator bsi;
3099 int nargs = 0;
3100 tree arg;
3101 int i;
3102 tree resdecl;
3103 tree restmp = NULL;
3105 gcall *call;
3106 greturn *ret;
3107 bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
3109 a = DECL_ARGUMENTS (thunk_fndecl);
3111 current_function_decl = thunk_fndecl;
3113 /* Ensure thunks are emitted in their correct sections. */
3114 resolve_unique_section (thunk_fndecl, 0,
3115 flag_function_sections);
3117 bitmap_obstack_initialize (NULL);
3119 /* Build the return declaration for the function. */
3120 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
3121 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
3123 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
3124 DECL_ARTIFICIAL (resdecl) = 1;
3125 DECL_IGNORED_P (resdecl) = 1;
3126 DECL_CONTEXT (resdecl) = thunk_fndecl;
3127 DECL_RESULT (thunk_fndecl) = resdecl;
3129 else
3130 resdecl = DECL_RESULT (thunk_fndecl);
3132 profile_count cfg_count = onode->count;
3133 if (!cfg_count.initialized_p ())
3134 cfg_count = profile_count::from_gcov_type (BB_FREQ_MAX).guessed_local ();
3136 bb = then_bb = else_bb = return_bb
3137 = init_lowered_empty_function (thunk_fndecl, true, cfg_count);
3139 bsi = gsi_start_bb (bb);
3141 /* Build call to the function being thunked. */
3142 if (!VOID_TYPE_P (restype)
3143 && (!alias_is_noreturn
3144 || TREE_ADDRESSABLE (restype)
3145 || TREE_CODE (TYPE_SIZE_UNIT (restype)) != INTEGER_CST))
3147 if (DECL_BY_REFERENCE (resdecl))
3149 restmp = gimple_fold_indirect_ref (resdecl);
3150 if (!restmp)
3151 restmp = build2 (MEM_REF,
3152 TREE_TYPE (TREE_TYPE (resdecl)),
3153 resdecl,
3154 build_int_cst (TREE_TYPE (resdecl), 0));
3156 else if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
3158 restmp = resdecl;
3160 if (VAR_P (restmp))
3162 add_local_decl (cfun, restmp);
3163 BLOCK_VARS (DECL_INITIAL (current_function_decl))
3164 = restmp;
3167 else
3168 restmp = create_tmp_reg (restype, "retval");
3171 for (arg = a; arg; arg = DECL_CHAIN (arg))
3172 nargs++;
3173 auto_vec<tree> vargs (nargs + nxargs);
3174 i = 0;
3175 arg = a;
3177 if (nargs)
3178 for (tree nparm = DECL_ARGUMENTS (nnode->decl);
3179 i < nargs;
3180 i++, arg = DECL_CHAIN (arg), nparm = DECL_CHAIN (nparm))
3182 tree save_arg = arg;
3184 /* Arrange to pass indirectly the parms, if we decided to do
3185 so, and revert its type in the wrapper. */
3186 if (indirect_nparms.contains (nparm))
3188 tree ref_type = TREE_TYPE (nparm);
3189 TREE_ADDRESSABLE (arg) = true;
3190 arg = build1 (ADDR_EXPR, ref_type, arg);
3192 else if (!TREE_THIS_VOLATILE (arg))
3193 DECL_NOT_GIMPLE_REG_P (arg) = 0;
3195 /* Convert the argument back to the type used by the calling
3196 conventions, e.g. a non-prototyped float type is passed as
3197 double, as in 930603-1.c, and needs to be converted back to
3198 double to be passed on unchanged to the wrapped
3199 function. */
3200 if (TREE_TYPE (nparm) != DECL_ARG_TYPE (nparm))
3202 tree tmp = arg;
3203 /* If ARG is e.g. volatile, we must copy and
3204 convert in separate statements. */
3205 if (!is_gimple_val (arg))
3207 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
3208 (TREE_TYPE (arg)), "arg");
3209 gimple *stmt = gimple_build_assign (tmp, arg);
3210 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
3212 arg = fold_convert (DECL_ARG_TYPE (nparm), tmp);
3215 if (!is_gimple_val (arg))
3217 tree tmp = create_tmp_reg (TYPE_MAIN_VARIANT
3218 (TREE_TYPE (arg)), "arg");
3219 gimple *stmt = gimple_build_assign (tmp, arg);
3220 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
3221 arg = tmp;
3223 vargs.quick_push (arg);
3224 arg = save_arg;
3226 /* These strub arguments are adjusted later. */
3227 if (apply_args)
3228 vargs.quick_push (null_pointer_node);
3229 if (is_stdarg)
3230 vargs.quick_push (null_pointer_node);
3231 vargs.quick_push (null_pointer_node);
3232 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias),
3233 vargs);
3234 onode->callees->call_stmt = call;
3235 // gimple_call_set_from_thunk (call, true);
3236 if (DECL_STATIC_CHAIN (alias))
3238 tree p = DECL_STRUCT_FUNCTION (alias)->static_chain_decl;
3239 tree type = TREE_TYPE (p);
3240 tree decl = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
3241 PARM_DECL, create_tmp_var_name ("CHAIN"),
3242 type);
3243 DECL_ARTIFICIAL (decl) = 1;
3244 DECL_IGNORED_P (decl) = 1;
3245 TREE_USED (decl) = 1;
3246 DECL_CONTEXT (decl) = thunk_fndecl;
3247 DECL_ARG_TYPE (decl) = type;
3248 TREE_READONLY (decl) = 1;
3250 struct function *sf = DECL_STRUCT_FUNCTION (thunk_fndecl);
3251 sf->static_chain_decl = decl;
3253 gimple_call_set_chain (call, decl);
3256 /* Return slot optimization is always possible and in fact required to
3257 return values with DECL_BY_REFERENCE. */
3258 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
3259 && (!is_gimple_reg_type (TREE_TYPE (resdecl))
3260 || DECL_BY_REFERENCE (resdecl)))
3261 gimple_call_set_return_slot_opt (call, true);
3263 if (restmp)
3265 gimple_call_set_lhs (call, restmp);
3266 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
3267 TREE_TYPE (TREE_TYPE (alias))));
3269 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
3270 if (!alias_is_noreturn)
3272 /* Build return value. */
3273 if (!DECL_BY_REFERENCE (resdecl))
3274 ret = gimple_build_return (restmp);
3275 else
3276 ret = gimple_build_return (resdecl);
3278 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
3280 else
3282 remove_edge (single_succ_edge (bb));
3285 cfun->gimple_df->in_ssa_p = true;
3286 update_max_bb_count ();
3287 profile_status_for_fn (cfun)
3288 = cfg_count.initialized_p () && cfg_count.ipa_p ()
3289 ? PROFILE_READ : PROFILE_GUESSED;
3290 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
3291 // TREE_ASM_WRITTEN (thunk_fndecl) = false;
3292 delete_unreachable_blocks ();
3293 update_ssa (TODO_update_ssa);
3294 checking_verify_flow_info ();
3295 free_dominance_info (CDI_DOMINATORS);
3297 /* Since we want to emit the thunk, we explicitly mark its name as
3298 referenced. */
3299 onode->thunk = false;
3300 onode->lowered = true;
3301 bitmap_obstack_release (NULL);
3303 current_function_decl = NULL;
3304 set_cfun (NULL);
3307 thunk_info::remove (onode);
3309 // some more of create_wrapper at the end of the next block.
3314 tree aaval = NULL_TREE;
3315 tree vaptr = NULL_TREE;
3316 tree wmptr = NULL_TREE;
3317 for (tree arg = DECL_ARGUMENTS (nnode->decl); arg; arg = DECL_CHAIN (arg))
3319 aaval = vaptr;
3320 vaptr = wmptr;
3321 wmptr = arg;
3324 if (!apply_args)
3325 aaval = NULL_TREE;
3326 /* The trailing args are [apply_args], [va_list_ptr], and
3327 watermark. If we don't have a va_list_ptr, the penultimate
3328 argument is apply_args.
3330 else if (!is_stdarg)
3331 aaval = vaptr;
3333 if (!is_stdarg)
3334 vaptr = NULL_TREE;
3336 DECL_NAME (wmptr) = get_watermark_ptr ();
3337 DECL_ARTIFICIAL (wmptr) = 1;
3338 DECL_IGNORED_P (wmptr) = 1;
3339 TREE_USED (wmptr) = 1;
3341 if (is_stdarg)
3343 DECL_NAME (vaptr) = get_va_list_ptr ();
3344 DECL_ARTIFICIAL (vaptr) = 1;
3345 DECL_IGNORED_P (vaptr) = 1;
3346 TREE_USED (vaptr) = 1;
3349 if (apply_args)
3351 DECL_NAME (aaval) = get_apply_args ();
3352 DECL_ARTIFICIAL (aaval) = 1;
3353 DECL_IGNORED_P (aaval) = 1;
3354 TREE_USED (aaval) = 1;
3357 push_cfun (DECL_STRUCT_FUNCTION (nnode->decl));
3360 edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3361 gimple_seq seq = call_update_watermark (wmptr, nnode, e->src->count);
3362 gsi_insert_seq_on_edge_immediate (e, seq);
3365 bool any_indirect = !indirect_nparms.is_empty ();
3367 if (any_indirect)
3369 basic_block bb;
3370 bool needs_commit = false;
3371 FOR_EACH_BB_FN (bb, cfun)
3373 for (gphi_iterator gsi = gsi_start_nonvirtual_phis (bb);
3374 !gsi_end_p (gsi);
3375 gsi_next_nonvirtual_phi (&gsi))
3377 gphi *stmt = gsi.phi ();
3379 walk_stmt_info wi = {};
3380 wi.info = &indirect_nparms;
3381 walk_gimple_op (stmt, walk_make_indirect, &wi);
3382 if (wi.changed && !is_gimple_debug (gsi_stmt (gsi)))
3383 if (walk_regimplify_phi (stmt))
3384 needs_commit = true;
3387 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
3388 !gsi_end_p (gsi); gsi_next (&gsi))
3390 gimple *stmt = gsi_stmt (gsi);
3392 walk_stmt_info wi = {};
3393 wi.info = &indirect_nparms;
3394 walk_gimple_op (stmt, walk_make_indirect, &wi);
3395 if (wi.changed)
3397 if (!is_gimple_debug (stmt))
3399 wi.info = &gsi;
3400 walk_gimple_op (stmt, walk_regimplify_addr_expr,
3401 &wi);
3403 update_stmt (stmt);
3407 if (needs_commit)
3408 gsi_commit_edge_inserts ();
3411 if (DECL_STRUCT_FUNCTION (nnode->decl)->calls_alloca
3412 || is_stdarg || apply_args)
3413 for (cgraph_edge *e = nnode->callees, *enext; e; e = enext)
3415 if (!e->call_stmt)
3416 continue;
3418 gcall *call = e->call_stmt;
3419 gimple_stmt_iterator gsi = gsi_for_stmt (call);
3420 tree fndecl = e->callee->decl;
3422 enext = e->next_callee;
3424 if (gimple_alloca_call_p (call))
3426 gimple_seq seq = call_update_watermark (wmptr, NULL,
3427 gsi_bb (gsi)->count);
3428 gsi_insert_finally_seq_after_call (gsi, seq);
3430 else if (fndecl && is_stdarg
3431 && fndecl_built_in_p (fndecl, BUILT_IN_VA_START))
3433 /* Using a non-default stdarg ABI makes the function ineligible
3434 for internal strub. */
3435 gcc_checking_assert (builtin_decl_explicit (BUILT_IN_VA_START)
3436 == fndecl);
3437 tree bvacopy = builtin_decl_explicit (BUILT_IN_VA_COPY);
3438 gimple_call_set_fndecl (call, bvacopy);
3439 tree arg = vaptr;
3440 /* The va_copy source must be dereferenced, unless it's an array
3441 type, that would have decayed to a pointer. */
3442 if (TREE_CODE (TREE_TYPE (TREE_TYPE (vaptr))) != ARRAY_TYPE)
3444 arg = gimple_fold_indirect_ref (vaptr);
3445 if (!arg)
3446 arg = build2 (MEM_REF,
3447 TREE_TYPE (TREE_TYPE (vaptr)),
3448 vaptr,
3449 build_int_cst (TREE_TYPE (vaptr), 0));
3450 if (!is_gimple_val (arg))
3451 arg = force_gimple_operand_gsi (&gsi, arg, true,
3452 NULL_TREE, true, GSI_SAME_STMT);
3454 gimple_call_set_arg (call, 1, arg);
3455 update_stmt (call);
3456 e->redirect_callee (cgraph_node::get_create (bvacopy));
3458 else if (fndecl && apply_args
3459 && fndecl_built_in_p (fndecl, BUILT_IN_APPLY_ARGS))
3461 tree lhs = gimple_call_lhs (call);
3462 gimple *assign = (lhs
3463 ? gimple_build_assign (lhs, aaval)
3464 : gimple_build_nop ());
3465 gsi_replace (&gsi, assign, true);
3466 cgraph_edge::remove (e);
3470 { // a little more copied from create_wrapper
3472 /* Inline summary set-up. */
3473 nnode->analyze ();
3474 // inline_analyze_function (nnode);
3477 pop_cfun ();
3481 push_cfun (DECL_STRUCT_FUNCTION (onode->decl));
3482 gimple_stmt_iterator gsi
3483 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
3485 gcall *wrcall;
3486 while (!(wrcall = dyn_cast <gcall *> (gsi_stmt (gsi))))
3487 gsi_next (&gsi);
3489 tree swm = create_tmp_var (get_wmt (), ".strub.watermark");
3490 TREE_ADDRESSABLE (swm) = true;
3491 tree swmp = build1 (ADDR_EXPR, get_pwmt (), swm);
3493 tree enter = get_enter ();
3494 gcall *stptr = gimple_build_call (enter, 1, unshare_expr (swmp));
3495 gimple_set_location (stptr, gimple_location (wrcall));
3496 gsi_insert_before (&gsi, stptr, GSI_SAME_STMT);
3497 onode->create_edge (cgraph_node::get_create (enter),
3498 stptr, gsi_bb (gsi)->count, false);
3500 int nargs = gimple_call_num_args (wrcall);
3502 gimple_seq seq = NULL;
3504 if (apply_args)
3506 tree aalst = create_tmp_var (ptr_type_node, ".strub.apply_args");
3507 tree bappargs = builtin_decl_explicit (BUILT_IN_APPLY_ARGS);
3508 gcall *appargs = gimple_build_call (bappargs, 0);
3509 gimple_call_set_lhs (appargs, aalst);
3510 gimple_set_location (appargs, gimple_location (wrcall));
3511 gsi_insert_before (&gsi, appargs, GSI_SAME_STMT);
3512 gimple_call_set_arg (wrcall, nargs - 2 - is_stdarg, aalst);
3513 onode->create_edge (cgraph_node::get_create (bappargs),
3514 appargs, gsi_bb (gsi)->count, false);
3517 if (is_stdarg)
3519 tree valst = create_tmp_var (va_list_type_node, ".strub.va_list");
3520 TREE_ADDRESSABLE (valst) = true;
3521 tree vaptr = build1 (ADDR_EXPR,
3522 build_pointer_type (va_list_type_node),
3523 valst);
3524 gimple_call_set_arg (wrcall, nargs - 2, unshare_expr (vaptr));
3526 tree bvastart = builtin_decl_explicit (BUILT_IN_VA_START);
3527 gcall *vastart = gimple_build_call (bvastart, 2,
3528 unshare_expr (vaptr),
3529 integer_zero_node);
3530 gimple_set_location (vastart, gimple_location (wrcall));
3531 gsi_insert_before (&gsi, vastart, GSI_SAME_STMT);
3532 onode->create_edge (cgraph_node::get_create (bvastart),
3533 vastart, gsi_bb (gsi)->count, false);
3535 tree bvaend = builtin_decl_explicit (BUILT_IN_VA_END);
3536 gcall *vaend = gimple_build_call (bvaend, 1, unshare_expr (vaptr));
3537 gimple_set_location (vaend, gimple_location (wrcall));
3538 gimple_seq_add_stmt (&seq, vaend);
3541 gimple_call_set_arg (wrcall, nargs - 1, unshare_expr (swmp));
3542 // gimple_call_set_tail (wrcall, false);
3543 update_stmt (wrcall);
3546 #if !ATTR_FNSPEC_DECONST_WATERMARK
3547 /* If the call will be assumed to not modify or even read the
3548 watermark, make it read and modified ourselves. */
3549 if ((gimple_call_flags (wrcall)
3550 & (ECF_CONST | ECF_PURE | ECF_NOVOPS)))
3552 vec<tree, va_gc> *inputs = NULL;
3553 vec<tree, va_gc> *outputs = NULL;
3554 vec_safe_push (outputs,
3555 build_tree_list
3556 (build_tree_list
3557 (NULL_TREE, build_string (2, "=m")),
3558 swm));
3559 vec_safe_push (inputs,
3560 build_tree_list
3561 (build_tree_list
3562 (NULL_TREE, build_string (1, "m")),
3563 swm));
3564 gasm *forcemod = gimple_build_asm_vec ("", inputs, outputs,
3565 NULL, NULL);
3566 gimple_seq_add_stmt (&seq, forcemod);
3568 /* If the call will be assumed to not even read the watermark,
3569 make sure it is already in memory before the call. */
3570 if ((gimple_call_flags (wrcall) & ECF_CONST))
3572 vec<tree, va_gc> *inputs = NULL;
3573 vec_safe_push (inputs,
3574 build_tree_list
3575 (build_tree_list
3576 (NULL_TREE, build_string (1, "m")),
3577 swm));
3578 gasm *force_store = gimple_build_asm_vec ("", inputs, NULL,
3579 NULL, NULL);
3580 gimple_set_location (force_store, gimple_location (wrcall));
3581 gsi_insert_before (&gsi, force_store, GSI_SAME_STMT);
3584 #endif
3586 gcall *sleave = gimple_build_call (get_leave (), 1,
3587 unshare_expr (swmp));
3588 gimple_seq_add_stmt (&seq, sleave);
3590 gassign *clobber = gimple_build_assign (swm,
3591 build_clobber
3592 (TREE_TYPE (swm)));
3593 gimple_seq_add_stmt (&seq, clobber);
3596 gsi_insert_finally_seq_after_call (gsi, seq);
3598 /* For nnode, we don't rebuild edges because we wish to retain
3599 any redirections copied to it from earlier passes, so we add
3600 call graph edges explicitly there, but for onode, we create a
3601 fresh function, so we may as well just issue the calls and
3602 then rebuild all cgraph edges. */
3603 // cgraph_edge::rebuild_edges ();
3604 onode->analyze ();
3605 // inline_analyze_function (onode);
3607 pop_cfun ();
3611 return 0;
3614 simple_ipa_opt_pass *
3615 make_pass_ipa_strub (gcc::context *ctxt)
3617 return new pass_ipa_strub (ctxt);
3620 #include "gt-ipa-strub.h"