hppa: Fix LO_SUM DLTIND14R address support in PRINT_OPERAND_ADDRESS
[official-gcc.git] / gcc / ipa-strub.cc
blobdff94222351adeadf5099c219afca43806aa91f7
1 /* strub (stack scrubbing) support.
2 Copyright (C) 2021-2024 Free Software Foundation, Inc.
3 Contributed by Alexandre Oliva <oliva@adacore.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "gimplify.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "gimple-iterator.h"
31 #include "gimplify-me.h"
32 #include "tree-into-ssa.h"
33 #include "tree-ssa.h"
34 #include "tree-cfg.h"
35 #include "cfghooks.h"
36 #include "cfgloop.h"
37 #include "cfgcleanup.h"
38 #include "tree-eh.h"
39 #include "except.h"
40 #include "builtins.h"
41 #include "attribs.h"
42 #include "tree-inline.h"
43 #include "cgraph.h"
44 #include "alloc-pool.h"
45 #include "symbol-summary.h"
46 #include "sreal.h"
47 #include "ipa-cp.h"
48 #include "ipa-prop.h"
49 #include "ipa-fnsummary.h"
50 #include "gimple-fold.h"
51 #include "fold-const.h"
52 #include "gimple-walk.h"
53 #include "tree-dfa.h"
54 #include "langhooks.h"
55 #include "calls.h"
56 #include "vec.h"
57 #include "stor-layout.h"
58 #include "varasm.h"
59 #include "alias.h"
60 #include "diagnostic.h"
61 #include "intl.h"
62 #include "ipa-strub.h"
63 #include "symtab-thunks.h"
64 #include "attr-fnspec.h"
65 #include "target.h"
67 /* This file introduces two passes that, together, implement
68 machine-independent stack scrubbing, strub for short. It arranges
69 for stack frames that have strub enabled to be zeroed-out after
70 relinquishing control to a caller, whether by returning or by
71 propagating an exception. This admittedly unusual design decision
72 was driven by exception support (one needs a stack frame to be
73 active to propagate exceptions out of it), and it enabled an
74 implementation that is entirely machine-independent (no custom
75 epilogue code is required).
77 Strub modes can be selected for stack frames by attaching attribute
78 strub to functions or to variables (to their types, actually).
79 Different strub modes, with different implementation details, are
80 available, and they can be selected by an argument to the strub
81 attribute. When enabled by strub-enabled variables, whether by
82 accessing (as in reading from) statically-allocated ones, or by
83 introducing (as in declaring) automatically-allocated ones, a
84 suitable mode is selected automatically.
86 At-calls mode modifies the interface of a function, adding a stack
87 watermark argument, that callers use to clean up the stack frame of
88 the called function. Because of the interface change, it can only
89 be used when explicitly selected, or when a function is internal to
90 a translation unit. Strub-at-calls function types are distinct
91 from their original types (they're not modified in-place), and they
92 are not interchangeable with other function types.
94 Internal mode, in turn, does not modify the type or the interface
95 of a function. It is currently implemented by turning the function
96 into a wrapper, moving the function body to a separate wrapped
97 function, and scrubbing the wrapped body's stack in the wrapper.
98 Internal-strub function types are mostly interface-compatible with
99 other strub modes, namely callable (from strub functions, though
100 not strub-enabled) and disabled (not callable from strub
101 functions).
103 Always_inline functions can be strub functions, but they can only
104 be called from other strub functions, because strub functions must
105 never be inlined into non-strub functions. Internal and at-calls
106 modes are indistinguishable when it comes to always_inline
107 functions: they will necessarily be inlined into another strub
108 function, and will thus be integrated into the caller's stack
109 frame, whatever the mode. (Contrast with non-always_inline strub
110 functions: an at-calls function can be called from other strub
111 functions, ensuring no discontinuity in stack erasing, whereas an
112 internal-strub function can only be called from other strub
113 functions if it happens to be inlined, or if -fstrub=relaxed mode
114 is in effect (that's the default). In -fstrub=strict mode,
115 internal-strub functions are not callable from strub functions,
116 because the wrapper itself is not strubbed.
118 The implementation involves two simple-IPA passes. The earliest
119 one, strub-mode, assigns strub modes to functions. It needs to run
120 before any inlining, so that we can prevent inlining of strub
121 functions into non-strub functions. It notes explicit strub mode
122 requests, enables strub in response to strub variables and testing
123 options, and flags unsatisfiable requests.
125 Three possibilities of unsatisfiable requests come to mind: (a)
126 when a strub mode is explicitly selected, but the function uses
127 features that make it ineligible for that mode (e.g. at-calls rules
128 out calling __builtin_apply_args, because of the interface changes,
129 and internal mode rules out noclone or otherwise non-versionable
130 functions, non-default varargs, non-local or forced labels, and
131 functions with far too many arguments); (b) when some strub mode
132 must be enabled because of a strub variable, but the function is
133 not eligible or not viable for any mode; and (c) when
134 -fstrub=strict is enabled, and calls are found in strub functions
135 to functions that are not callable from strub contexts.
136 compute_strub_mode implements (a) and (b), and verify_strub
137 implements (c).
139 The second IPA pass modifies interfaces of at-calls-strub functions
140 and types, introduces strub calls in and around them. and splits
141 internal-strub functions. It is placed after early inlining, so
142 that even internal-strub functions get a chance of being inlined
143 into other strub functions, but before non-early inlining, so that
144 internal-strub wrapper functions still get a chance of inlining
145 after splitting.
147 Wrappers avoid duplicating the copying of large arguments again by
148 passing them by reference to the wrapped bodies. This involves
149 occasional SSA rewriting of address computations, because of the
150 additional indirection. Besides these changes, and the
151 introduction of the stack watermark parameter, wrappers and wrapped
152 functions cooperate to handle variable argument lists (performing
153 va_start in the wrapper, passing the list as an argument, and
154 replacing va_start calls in the wrapped body with va_copy), and
155 __builtin_apply_args (also called in the wrapper and passed to the
156 wrapped body as an argument).
158 Strub bodies (both internal-mode wrapped bodies, and at-calls
159 functions) always start by adjusting the watermark parameter, by
160 calling __builtin___strub_update. The compiler inserts them in the
161 main strub pass. Allocations of additional stack space for the
162 frame (__builtin_alloca) are also followed by watermark updates.
163 Stack space temporarily allocated to pass arguments to other
164 functions, released right after the call, is not regarded as part
165 of the frame. Around calls to them, i.e., in internal-mode
166 wrappers and at-calls callers (even calls through pointers), calls
167 to __builtin___strub_enter and __builtin___strub_leave are
168 inserted, the latter as a __finally block, so that it runs at
169 regular and exceptional exit paths. strub_enter only initializes
170 the stack watermark, and strub_leave is where the scrubbing takes
171 place, overwriting with zeros the stack space from the top of the
172 stack to the watermark.
174 These calls can be optimized in various cases. In
175 pass_ipa_strub::adjust_at_calls_call, for example, we enable
176 tail-calling and other optimized calls from one strub body to
177 another by passing on the watermark parameter. The builtins
178 themselves may undergo inline substitution during expansion,
179 dependign on optimization levels. This involves dealing with stack
180 red zones (when the builtins are called out-of-line, the red zone
181 cannot be used) and other ugly details related with inlining strub
182 bodies into other strub bodies (see expand_builtin_strub_update).
183 expand_builtin_strub_leave may even perform partial inline
184 substitution. */
186 /* Const and pure functions that gain a watermark parameter for strub purposes
187 are still regarded as such, which may cause the inline expansions of the
188 __strub builtins to malfunction. Ideally, attribute "fn spec" would enable
189 us to inform the backend about requirements and side effects of the call, but
190 call_fusage building in calls.c:expand_call does not even look at
191 attr_fnspec, so we resort to asm loads and updates to attain an equivalent
192 effect. Once expand_call gains the ability to issue extra memory uses and
193 clobbers based on pure/const function's fnspec, we can define this to 1. */
194 #define ATTR_FNSPEC_DECONST_WATERMARK 0
196 enum strub_mode {
197 /* This mode denotes a regular function, that does not require stack
198 scrubbing (strubbing). It may call any other functions, but if
199 it calls AT_CALLS (or WRAPPED) ones, strubbing logic is
200 automatically introduced around those calls (the latter, by
201 inlining INTERNAL wrappers). */
202 STRUB_DISABLED = 0,
204 /* This denotes a function whose signature is (to be) modified to
205 take an extra parameter, for stack use annotation, and its
206 callers must initialize and pass that argument, and perform the
207 strubbing. Functions that are explicitly marked with attribute
208 strub must have the mark visible wherever the function is,
209 including aliases, and overriders and overriding methods.
210 Functions that are implicitly marked for strubbing, for accessing
211 variables explicitly marked as such, will only select this
212 strubbing method if they are internal to a translation unit. It
213 can only be inlined into other strubbing functions, i.e.,
214 STRUB_AT_CALLS or STRUB_WRAPPED. */
215 STRUB_AT_CALLS = 1,
217 /* This denotes a function that is to perform strubbing internally,
218 without any changes to its interface (the function is turned into
219 a strubbing wrapper, and its original body is moved to a separate
220 STRUB_WRAPPED function, with a modified interface). Functions
221 may be explicitly marked with attribute strub(2), and the
222 attribute must be visible at the point of definition. Functions
223 that are explicitly marked for strubbing, for accessing variables
224 explicitly marked as such, may select this strubbing mode if
225 their interface cannot change, e.g. because its interface is
226 visible to other translation units, directly, by indirection
227 (having its address taken), inheritance, etc. Functions that use
228 this method must not have the noclone attribute, nor the noipa
229 one. Functions marked as always_inline may select this mode, but
230 they are NOT wrapped, they remain unchanged, and are only inlined
231 into strubbed contexts. Once non-always_inline functions are
232 wrapped, the wrapper becomes STRUB_WRAPPER, and the wrapped becomes
233 STRUB_WRAPPED. */
234 STRUB_INTERNAL = 2,
236 /* This denotes a function whose stack is not strubbed, but that is
237 nevertheless explicitly or implicitly marked as callable from strubbing
238 functions. Normally, only STRUB_AT_CALLS (and STRUB_INTERNAL ->
239 STRUB_WRAPPED) functions can be called from strubbing contexts (bodies of
240 STRUB_AT_CALLS, STRUB_INTERNAL and STRUB_WRAPPED functions), but attribute
241 strub(3) enables other functions to be (indirectly) called from these
242 contexts. Some builtins and internal functions may be implicitly marked as
243 STRUB_CALLABLE. */
244 STRUB_CALLABLE = 3,
246 /* This denotes the function that took over the body of a
247 STRUB_INTERNAL function. At first, it's only called by its
248 wrapper, but the wrapper may be inlined. The wrapped function,
249 in turn, can only be inlined into other functions whose stack
250 frames are strubbed, i.e., that are STRUB_WRAPPED or
251 STRUB_AT_CALLS. */
252 STRUB_WRAPPED = -1,
254 /* This denotes the wrapper function that replaced the STRUB_INTERNAL
255 function. This mode overrides the STRUB_INTERNAL mode at the time the
256 internal to-be-wrapped function becomes a wrapper, so that inlining logic
257 can tell one from the other. */
258 STRUB_WRAPPER = -2,
260 /* This denotes an always_inline function that requires strubbing. It can
261 only be called from, and inlined into, other strubbing contexts. */
262 STRUB_INLINABLE = -3,
264 /* This denotes a function that accesses strub variables, so it would call for
265 internal strubbing (whether or not it's eligible for that), but since
266 at-calls strubbing is viable, that's selected as an optimization. This
267 mode addresses the inconvenience that such functions may have different
268 modes selected depending on optimization flags, and get a different
269 callable status depending on that choice: if we assigned them
270 STRUB_AT_CALLS mode, they would be callable when optimizing, whereas
271 STRUB_INTERNAL would not be callable. */
272 STRUB_AT_CALLS_OPT = -4,
276 /* Look up a strub attribute in TYPE, and return it. */
278 static tree
279 get_strub_attr_from_type (tree type)
281 return lookup_attribute ("strub", TYPE_ATTRIBUTES (type));
284 /* Look up a strub attribute in DECL or in its type, and return it. */
286 static tree
287 get_strub_attr_from_decl (tree decl)
289 tree ret = lookup_attribute ("strub", DECL_ATTRIBUTES (decl));
290 if (ret)
291 return ret;
292 return get_strub_attr_from_type (TREE_TYPE (decl));
295 #define STRUB_ID_COUNT 8
296 #define STRUB_IDENT_COUNT 3
297 #define STRUB_TYPE_COUNT 5
299 #define STRUB_ID_BASE 0
300 #define STRUB_IDENT_BASE (STRUB_ID_BASE + STRUB_ID_COUNT)
301 #define STRUB_TYPE_BASE (STRUB_IDENT_BASE + STRUB_IDENT_COUNT)
302 #define STRUB_CACHE_SIZE (STRUB_TYPE_BASE + STRUB_TYPE_COUNT)
304 /* Keep the strub mode and temp identifiers and types from being GC'd. */
305 static GTY((deletable)) tree strub_cache[STRUB_CACHE_SIZE];
307 /* Define a function to cache identifier ID, to be used as a strub attribute
308 parameter for a strub mode named after NAME. */
309 #define DEF_STRUB_IDS(IDX, NAME, ID) \
310 static inline tree get_strub_mode_id_ ## NAME () { \
311 int idx = STRUB_ID_BASE + IDX; \
312 tree identifier = strub_cache[idx]; \
313 if (!identifier) \
314 strub_cache[idx] = identifier = get_identifier (ID); \
315 return identifier; \
317 /* Same as DEF_STRUB_IDS, but use the string expansion of NAME as ID. */
318 #define DEF_STRUB_ID(IDX, NAME) \
319 DEF_STRUB_IDS (IDX, NAME, #NAME)
321 /* Define functions for each of the strub mode identifiers.
322 Expose dashes rather than underscores. */
323 DEF_STRUB_ID (0, disabled)
324 DEF_STRUB_IDS (1, at_calls, "at-calls")
325 DEF_STRUB_ID (2, internal)
326 DEF_STRUB_ID (3, callable)
327 DEF_STRUB_ID (4, wrapped)
328 DEF_STRUB_ID (5, wrapper)
329 DEF_STRUB_ID (6, inlinable)
330 DEF_STRUB_IDS (7, at_calls_opt, "at-calls-opt")
332 /* Release the temporary macro names. */
333 #undef DEF_STRUB_IDS
334 #undef DEF_STRUB_ID
336 /* Return the identifier corresponding to strub MODE. */
338 static tree
339 get_strub_mode_attr_parm (enum strub_mode mode)
341 switch (mode)
343 case STRUB_DISABLED:
344 return get_strub_mode_id_disabled ();
346 case STRUB_AT_CALLS:
347 return get_strub_mode_id_at_calls ();
349 case STRUB_INTERNAL:
350 return get_strub_mode_id_internal ();
352 case STRUB_CALLABLE:
353 return get_strub_mode_id_callable ();
355 case STRUB_WRAPPED:
356 return get_strub_mode_id_wrapped ();
358 case STRUB_WRAPPER:
359 return get_strub_mode_id_wrapper ();
361 case STRUB_INLINABLE:
362 return get_strub_mode_id_inlinable ();
364 case STRUB_AT_CALLS_OPT:
365 return get_strub_mode_id_at_calls_opt ();
367 default:
368 gcc_unreachable ();
372 /* Return the parmeters (TREE_VALUE) for a strub attribute of MODE.
373 We know we use a single parameter, so we bypass the creation of a
374 tree list. */
376 static tree
377 get_strub_mode_attr_value (enum strub_mode mode)
379 return get_strub_mode_attr_parm (mode);
382 /* Determine whether ID is a well-formed strub mode-specifying attribute
383 parameter for a function (type). Only user-visible modes are accepted, and
384 ID must be non-NULL.
386 For unacceptable parms, return 0, otherwise a nonzero value as below.
388 If the parm enables strub, return positive, otherwise negative.
390 If the affected type must be a distinct, incompatible type,return an integer
391 of absolute value 2, otherwise 1. */
394 strub_validate_fn_attr_parm (tree id)
396 int ret;
397 const char *s = NULL;
398 size_t len = 0;
400 /* do NOT test for NULL. This is only to be called with non-NULL arguments.
401 We assume that the strub parameter applies to a function, because only
402 functions accept an explicit argument. If we accepted NULL, and we
403 happened to be called to verify the argument for a variable, our return
404 values would be wrong. */
405 if (TREE_CODE (id) == STRING_CST)
407 s = TREE_STRING_POINTER (id);
408 len = TREE_STRING_LENGTH (id) - 1;
410 else if (TREE_CODE (id) == IDENTIFIER_NODE)
412 s = IDENTIFIER_POINTER (id);
413 len = IDENTIFIER_LENGTH (id);
415 else
416 return 0;
418 enum strub_mode mode;
420 if (len != 8)
421 return 0;
423 switch (s[0])
425 case 'd':
426 mode = STRUB_DISABLED;
427 ret = -1;
428 break;
430 case 'a':
431 mode = STRUB_AT_CALLS;
432 ret = 2;
433 break;
435 case 'i':
436 mode = STRUB_INTERNAL;
437 ret = 1;
438 break;
440 case 'c':
441 mode = STRUB_CALLABLE;
442 ret = -2;
443 break;
445 default:
446 /* Other parms are for internal use only. */
447 return 0;
450 tree mode_id = get_strub_mode_attr_parm (mode);
452 if (TREE_CODE (id) == IDENTIFIER_NODE
453 ? id != mode_id
454 : strncmp (s, IDENTIFIER_POINTER (mode_id), len) != 0)
455 return 0;
457 return ret;
460 /* Return the strub mode from STRUB_ATTR. VAR_P should be TRUE if the attribute
461 is taken from a variable, rather than from a function, or a type thereof. */
463 static enum strub_mode
464 get_strub_mode_from_attr (tree strub_attr, bool var_p = false)
466 enum strub_mode mode = STRUB_DISABLED;
468 if (strub_attr)
470 if (!TREE_VALUE (strub_attr))
471 mode = !var_p ? STRUB_AT_CALLS : STRUB_INTERNAL;
472 else
474 gcc_checking_assert (!var_p);
475 tree id = TREE_VALUE (strub_attr);
476 if (TREE_CODE (id) == TREE_LIST)
477 id = TREE_VALUE (id);
478 const char *s = (TREE_CODE (id) == STRING_CST
479 ? TREE_STRING_POINTER (id)
480 : IDENTIFIER_POINTER (id));
481 size_t len = (TREE_CODE (id) == STRING_CST
482 ? TREE_STRING_LENGTH (id) - 1
483 : IDENTIFIER_LENGTH (id));
485 switch (len)
487 case 7:
488 switch (s[6])
490 case 'r':
491 mode = STRUB_WRAPPER;
492 break;
494 case 'd':
495 mode = STRUB_WRAPPED;
496 break;
498 default:
499 gcc_unreachable ();
501 break;
503 case 8:
504 switch (s[0])
506 case 'd':
507 mode = STRUB_DISABLED;
508 break;
510 case 'a':
511 mode = STRUB_AT_CALLS;
512 break;
514 case 'i':
515 mode = STRUB_INTERNAL;
516 break;
518 case 'c':
519 mode = STRUB_CALLABLE;
520 break;
522 default:
523 gcc_unreachable ();
525 break;
527 case 9:
528 mode = STRUB_INLINABLE;
529 break;
531 case 12:
532 mode = STRUB_AT_CALLS_OPT;
533 break;
535 default:
536 gcc_unreachable ();
539 gcc_checking_assert (TREE_CODE (id) == IDENTIFIER_NODE
540 ? id == get_strub_mode_attr_parm (mode)
541 : strncmp (IDENTIFIER_POINTER
542 (get_strub_mode_attr_parm (mode)),
543 s, len) == 0);
547 return mode;
550 /* Look up, decode and return the strub mode associated with FNDECL. */
552 static enum strub_mode
553 get_strub_mode_from_fndecl (tree fndecl)
555 return get_strub_mode_from_attr (get_strub_attr_from_decl (fndecl));
558 /* Look up, decode and return the strub mode associated with NODE. */
560 static enum strub_mode
561 get_strub_mode (cgraph_node *node)
563 return get_strub_mode_from_fndecl (node->decl);
566 /* Look up, decode and return the strub mode associated with TYPE. */
568 static enum strub_mode
569 get_strub_mode_from_type (tree type)
571 bool var_p = !FUNC_OR_METHOD_TYPE_P (type);
572 tree attr = get_strub_attr_from_type (type);
574 if (attr)
575 return get_strub_mode_from_attr (attr, var_p);
577 if (flag_strub >= -1 && !var_p)
578 return STRUB_CALLABLE;
580 return STRUB_DISABLED;
584 /* Return TRUE iff NODE calls builtin va_start. */
586 static bool
587 calls_builtin_va_start_p (cgraph_node *node)
589 bool result = false;
591 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
593 tree cdecl = e->callee->decl;
594 if (fndecl_built_in_p (cdecl, BUILT_IN_VA_START))
595 return true;
598 return result;
601 /* Return TRUE iff NODE calls builtin apply_args, and optionally REPORT it. */
603 static bool
604 calls_builtin_apply_args_p (cgraph_node *node, bool report = false)
606 bool result = false;
608 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
610 tree cdecl = e->callee->decl;
611 if (!fndecl_built_in_p (cdecl, BUILT_IN_APPLY_ARGS))
612 continue;
614 result = true;
616 if (!report)
617 break;
619 sorry_at (e->call_stmt
620 ? gimple_location (e->call_stmt)
621 : DECL_SOURCE_LOCATION (node->decl),
622 "at-calls %<strub%> does not support call to %qD",
623 cdecl);
626 return result;
629 /* Return TRUE iff NODE carries the always_inline attribute. */
631 static inline bool
632 strub_always_inline_p (cgraph_node *node)
634 return lookup_attribute ("always_inline", DECL_ATTRIBUTES (node->decl));
637 /* Return TRUE iff the target has strub support for T, a function
638 decl, or a type used in an indirect call, and optionally REPORT the
639 reasons for ineligibility. If T is a type and error REPORTing is
640 enabled, the LOCation (of the indirect call) should be provided. */
641 static inline bool
642 strub_target_support_p (tree t, bool report = false,
643 location_t loc = UNKNOWN_LOCATION)
645 bool result = true;
647 if (!targetm.have_strub_support_for (t))
649 result = false;
651 if (!report)
652 return result;
654 if (DECL_P (t))
655 sorry_at (DECL_SOURCE_LOCATION (t),
656 "%qD is not eligible for %<strub%>"
657 " on the target system", t);
658 else
659 sorry_at (loc,
660 "unsupported %<strub%> call"
661 " on the target system");
664 return result;
667 /* Return TRUE iff NODE is potentially eligible for any strub-enabled mode, and
668 optionally REPORT the reasons for ineligibility. */
670 static inline bool
671 can_strub_p (cgraph_node *node, bool report = false)
673 bool result = strub_target_support_p (node->decl, report);
675 if (!report && (!result || strub_always_inline_p (node)))
676 return result;
678 if (flag_split_stack)
680 result = false;
682 if (!report)
683 return result;
685 sorry_at (DECL_SOURCE_LOCATION (node->decl),
686 "%qD is not eligible for %<strub%>"
687 " because %<-fsplit-stack%> is enabled",
688 node->decl);
691 if (lookup_attribute ("noipa", DECL_ATTRIBUTES (node->decl)))
693 result = false;
695 if (!report)
696 return result;
698 sorry_at (DECL_SOURCE_LOCATION (node->decl),
699 "%qD is not eligible for %<strub%>"
700 " because of attribute %<noipa%>",
701 node->decl);
704 /* We can't, and don't want to vectorize the watermark and other
705 strub-introduced parms. */
706 if (lookup_attribute ("simd", DECL_ATTRIBUTES (node->decl)))
708 result = false;
710 if (!report)
711 return result;
713 sorry_at (DECL_SOURCE_LOCATION (node->decl),
714 "%qD is not eligible for %<strub%>"
715 " because of attribute %<simd%>",
716 node->decl);
719 return result;
722 /* Return TRUE iff NODE is eligible for at-calls strub, and optionally REPORT
723 the reasons for ineligibility. Besides general non-eligibility for
724 strub-enabled modes, at-calls rules out calling builtin apply_args. */
726 static bool
727 can_strub_at_calls_p (cgraph_node *node, bool report = false)
729 bool result = !report || can_strub_p (node, report);
731 if (!result && !report)
732 return result;
734 return !calls_builtin_apply_args_p (node, report);
737 /* Return TRUE iff the called function (pointer or, if available,
738 decl) undergoes a significant type conversion for the call. Strub
739 mode changes between function types, and other non-useless type
740 conversions, are regarded as significant. When the function type
741 is overridden, the effective strub mode for the call is that of the
742 call fntype, rather than that of the pointer or of the decl.
743 Functions called with type overrides cannot undergo type changes;
744 it's as if their address was taken, so they're considered
745 non-viable for implicit at-calls strub mode. */
747 static inline bool
748 strub_call_fntype_override_p (const gcall *gs)
750 if (gimple_call_internal_p (gs))
751 return false;
752 tree fn_type = TREE_TYPE (TREE_TYPE (gimple_call_fn (gs)));
753 if (tree decl = gimple_call_fndecl (gs))
754 fn_type = TREE_TYPE (decl);
756 /* We do NOT want to take the mode from the decl here. This
757 function is used to tell whether we can change the strub mode of
758 a function, and whether the effective mode for the call is to be
759 taken from the decl or from an overrider type. When the strub
760 mode is explicitly declared, or overridden with a type cast, the
761 difference will be noticed in function types. However, if the
762 strub mode is implicit due to e.g. strub variables or -fstrub=*
763 command-line flags, we will adjust call types along with function
764 types. In either case, the presence of type or strub mode
765 overriders in calls will prevent a function from having its strub
766 modes changed in ways that would imply type changes, but taking
767 strub modes from decls would defeat this, since we set strub
768 modes and then call this function to tell whether the original
769 type was overridden to decide whether to adjust the call. We
770 need the answer to be about the type, not the decl. */
771 enum strub_mode mode = get_strub_mode_from_type (fn_type);
772 return (get_strub_mode_from_type (gs->u.fntype) != mode
773 || !useless_type_conversion_p (gs->u.fntype, fn_type));
776 /* Return TRUE iff NODE is called directly with a type override. */
778 static bool
779 called_directly_with_type_override_p (cgraph_node *node, void *)
781 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
782 if (e->call_stmt && strub_call_fntype_override_p (e->call_stmt))
783 return true;
785 return false;
788 /* Return TRUE iff NODE or any other nodes aliased to it are called
789 with type overrides. We can't safely change the type of such
790 functions. */
792 static bool
793 called_with_type_override_p (cgraph_node *node)
795 return (node->call_for_symbol_thunks_and_aliases
796 (called_directly_with_type_override_p, NULL, true, true));
799 /* Symbolic macro for the max number of arguments that internal strub may add to
800 a function. */
802 #define STRUB_INTERNAL_MAX_EXTRA_ARGS 3
804 /* We can't perform internal strubbing if the function body involves certain
805 features:
807 - a non-default __builtin_va_start (e.g. x86's __builtin_ms_va_start) is
808 currently unsupported because we can't discover the corresponding va_copy and
809 va_end decls in the wrapper, and we don't convey the alternate variable
810 arguments ABI to the modified wrapped function. The default
811 __builtin_va_start is supported by calling va_start/va_end at the wrapper,
812 that takes variable arguments, passing a pointer to the va_list object to the
813 wrapped function, that runs va_copy from it where the original function ran
814 va_start.
816 __builtin_next_arg is currently unsupported because the wrapped function
817 won't be a variable argument function. We could process it in the wrapper,
818 that remains a variable argument function, and replace calls in the wrapped
819 body, but we currently don't.
821 __builtin_return_address is rejected because it's generally used when the
822 actual caller matters, and introducing a wrapper breaks such uses as those in
823 the unwinder. */
825 static bool
826 can_strub_internally_p (cgraph_node *node, bool report = false)
828 bool result = !report || can_strub_p (node, report);
830 if (!result && !report)
831 return result;
833 if (!report && strub_always_inline_p (node))
834 return result;
836 /* Since we're not changing the function identity proper, just
837 moving its full implementation, we *could* disable
838 fun->cannot_be_copied_reason and/or temporarily drop a noclone
839 attribute, but we'd have to prevent remapping of the labels. */
840 if (lookup_attribute ("noclone", DECL_ATTRIBUTES (node->decl)))
842 result = false;
844 if (!report)
845 return result;
847 sorry_at (DECL_SOURCE_LOCATION (node->decl),
848 "%qD is not eligible for internal %<strub%>"
849 " because of attribute %<noclone%>",
850 node->decl);
853 if (node->has_gimple_body_p ())
855 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
857 tree cdecl = e->callee->decl;
858 if (!((fndecl_built_in_p (cdecl, BUILT_IN_VA_START)
859 && cdecl != builtin_decl_explicit (BUILT_IN_VA_START))
860 || fndecl_built_in_p (cdecl, BUILT_IN_NEXT_ARG)
861 || fndecl_built_in_p (cdecl, BUILT_IN_RETURN_ADDRESS)))
862 continue;
864 result = false;
866 if (!report)
867 return result;
869 sorry_at (e->call_stmt
870 ? gimple_location (e->call_stmt)
871 : DECL_SOURCE_LOCATION (node->decl),
872 "%qD is not eligible for internal %<strub%> "
873 "because it calls %qD",
874 node->decl, cdecl);
877 struct function *fun = DECL_STRUCT_FUNCTION (node->decl);
878 if (fun->has_nonlocal_label)
880 result = false;
882 if (!report)
883 return result;
885 sorry_at (DECL_SOURCE_LOCATION (node->decl),
886 "%qD is not eligible for internal %<strub%> "
887 "because it contains a non-local goto target",
888 node->decl);
891 if (fun->has_forced_label_in_static)
893 result = false;
895 if (!report)
896 return result;
898 sorry_at (DECL_SOURCE_LOCATION (node->decl),
899 "%qD is not eligible for internal %<strub%> "
900 "because the address of a local label escapes",
901 node->decl);
904 /* Catch any other case that would prevent versioning/cloning
905 so as to also have it covered above. */
906 gcc_checking_assert (!result /* || !node->has_gimple_body_p () */
907 || tree_versionable_function_p (node->decl));
910 /* Label values references are not preserved when copying. If referenced
911 in nested functions, as in 920415-1.c and 920721-4.c their decls get
912 remapped independently. The exclusion below might be too broad, in
913 that we might be able to support correctly cases in which the labels
914 are only used internally in a function, but disconnecting forced labels
915 from their original declarations is undesirable in general. */
916 basic_block bb;
917 FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (node->decl))
918 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
919 !gsi_end_p (gsi); gsi_next (&gsi))
921 glabel *label_stmt = dyn_cast <glabel *> (gsi_stmt (gsi));
922 tree target;
924 if (!label_stmt)
925 break;
927 target = gimple_label_label (label_stmt);
929 if (!FORCED_LABEL (target))
930 continue;
932 result = false;
934 if (!report)
935 return result;
937 sorry_at (gimple_location (label_stmt),
938 "internal %<strub%> does not support forced labels");
942 if (list_length (TYPE_ARG_TYPES (TREE_TYPE (node->decl)))
943 >= ((HOST_WIDE_INT_1 << IPA_PARAM_MAX_INDEX_BITS)
944 - STRUB_INTERNAL_MAX_EXTRA_ARGS))
946 result = false;
948 if (!report)
949 return result;
951 sorry_at (DECL_SOURCE_LOCATION (node->decl),
952 "%qD has too many arguments for internal %<strub%>",
953 node->decl);
956 return result;
959 /* Return TRUE iff NODE has any strub-requiring local variable, or accesses (as
960 in reading) any variable through a strub-requiring type. */
962 static bool
963 strub_from_body_p (cgraph_node *node)
965 if (!node->has_gimple_body_p ())
966 return false;
968 /* If any local variable is marked for strub... */
969 unsigned i;
970 tree var;
971 FOR_EACH_LOCAL_DECL (DECL_STRUCT_FUNCTION (node->decl),
972 i, var)
973 if (get_strub_mode_from_type (TREE_TYPE (var))
974 != STRUB_DISABLED)
975 return true;
977 /* Now scan the body for loads with strub-requiring types.
978 ??? Compound types don't propagate the strub requirement to
979 component types. */
980 basic_block bb;
981 FOR_EACH_BB_FN (bb, DECL_STRUCT_FUNCTION (node->decl))
982 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
983 !gsi_end_p (gsi); gsi_next (&gsi))
985 gimple *stmt = gsi_stmt (gsi);
987 if (!gimple_assign_load_p (stmt))
988 continue;
990 tree rhs = gimple_assign_rhs1 (stmt);
991 if (get_strub_mode_from_type (TREE_TYPE (rhs))
992 != STRUB_DISABLED)
993 return true;
996 return false;
999 /* Return TRUE iff node is associated with a builtin that should be callable
1000 from strub contexts. */
1002 static inline bool
1003 strub_callable_builtin_p (cgraph_node *node)
1005 if (DECL_BUILT_IN_CLASS (node->decl) != BUILT_IN_NORMAL)
1006 return false;
1008 enum built_in_function fcode = DECL_FUNCTION_CODE (node->decl);
1010 switch (fcode)
1012 case BUILT_IN_NONE:
1013 gcc_unreachable ();
1015 /* This temporarily allocates stack for the call, and we can't reasonably
1016 update the watermark for that. Besides, we don't check the actual call
1017 target, nor its signature, and it seems to be overkill to as much as
1018 try to do so. */
1019 case BUILT_IN_APPLY:
1020 return false;
1022 /* Conversely, this shouldn't be called from within strub contexts, since
1023 the caller may have had its signature modified. STRUB_INTERNAL is ok,
1024 the call will remain in the STRUB_WRAPPER, and removed from the
1025 STRUB_WRAPPED clone. */
1026 case BUILT_IN_APPLY_ARGS:
1027 return false;
1029 /* ??? Make all other builtins callable. We wish to make any builtin call
1030 the compiler might introduce on its own callable. Anything that is
1031 predictable enough as to be known not to allow stack data that should
1032 be strubbed to unintentionally escape to non-strub contexts can be
1033 allowed, and pretty much every builtin appears to fit this description.
1034 The exceptions to this rule seem to be rare, and only available as
1035 explicit __builtin calls, so let's keep it simple and allow all of
1036 them... */
1037 default:
1038 return true;
1042 /* Compute the strub mode to be used for NODE. STRUB_ATTR should be the strub
1043 attribute,found for NODE, if any. */
1045 static enum strub_mode
1046 compute_strub_mode (cgraph_node *node, tree strub_attr)
1048 enum strub_mode req_mode = get_strub_mode_from_attr (strub_attr);
1050 gcc_checking_assert (flag_strub >= -2 && flag_strub <= 3);
1052 /* Symbolic encodings of the -fstrub-* flags. */
1053 /* Enable strub when explicitly requested through attributes to functions or
1054 variables, reporting errors if the requests cannot be satisfied. */
1055 const bool strub_flag_auto = flag_strub < 0;
1056 /* strub_flag_auto with strub call verification; without this, functions are
1057 implicitly callable. */
1058 const bool strub_flag_strict = flag_strub < -1;
1059 /* Disable strub altogether, ignore attributes entirely. */
1060 const bool strub_flag_disabled = flag_strub == 0;
1061 /* On top of _auto, also enable strub implicitly for functions that can
1062 safely undergo at-calls strubbing. Internal mode will still be used in
1063 functions that request it explicitly with attribute strub(2), or when the
1064 function body requires strubbing and at-calls strubbing is not viable. */
1065 const bool strub_flag_at_calls = flag_strub == 1;
1066 /* On top of default, also enable strub implicitly for functions that can
1067 safely undergo internal strubbing. At-calls mode will still be used in
1068 functions that requiest it explicitly with attribute strub() or strub(1),
1069 or when the function body requires strubbing and internal strubbing is not
1070 viable. */
1071 const bool strub_flag_internal = flag_strub == 2;
1072 /* On top of default, also enable strub implicitly for functions that can
1073 safely undergo strubbing in either mode. When both modes are viable,
1074 at-calls is preferred. */
1075 const bool strub_flag_either = flag_strub == 3;
1076 /* Besides the default behavior, enable strub implicitly for all viable
1077 functions. */
1078 const bool strub_flag_viable = flag_strub > 0;
1080 /* The consider_* variables should be TRUE if selecting the corresponding
1081 strub modes would be consistent with requests from attributes and command
1082 line flags. Attributes associated with functions pretty much mandate a
1083 selection, and should report an error if not satisfied; strub_flag_auto
1084 implicitly enables some viable strub mode if that's required by references
1085 to variables marked for strub; strub_flag_viable enables strub if viable
1086 (even when favoring one mode, body-requested strub can still be satisfied
1087 by either mode), and falls back to callable, silently unless variables
1088 require strubbing. */
1090 const bool consider_at_calls
1091 = (!strub_flag_disabled
1092 && (strub_attr
1093 ? req_mode == STRUB_AT_CALLS
1094 : true));
1095 const bool consider_internal
1096 = (!strub_flag_disabled
1097 && (strub_attr
1098 ? req_mode == STRUB_INTERNAL
1099 : true));
1101 const bool consider_callable
1102 = (!strub_flag_disabled
1103 && (strub_attr
1104 ? req_mode == STRUB_CALLABLE
1105 : (!strub_flag_strict
1106 || strub_callable_builtin_p (node))));
1108 /* This is a shorthand for either strub-enabled mode. */
1109 const bool consider_strub
1110 = (consider_at_calls || consider_internal);
1112 /* We can cope with always_inline functions even with noipa and noclone,
1113 because we just leave them alone. */
1114 const bool is_always_inline
1115 = strub_always_inline_p (node);
1117 /* Strubbing in general, and each specific strub mode, may have its own set of
1118 requirements. We require noipa for strubbing, either because of cloning
1119 required for internal strub, or because of caller enumeration required for
1120 at-calls strub. We don't consider the at-calls mode eligible if it's not
1121 even considered, it has no further requirements. Internal mode requires
1122 cloning and the absence of certain features in the body and, like at-calls,
1123 it's not eligible if it's not even under consideration.
1125 ??? Do we need target hooks for further constraints? E.g., x86's
1126 "interrupt" attribute breaks internal strubbing because the wrapped clone
1127 carries the attribute and thus isn't callable; in this case, we could use a
1128 target hook to adjust the clone instead. */
1129 const bool strub_eligible
1130 = (consider_strub
1131 && (is_always_inline || can_strub_p (node)));
1132 const bool at_calls_eligible
1133 = (consider_at_calls && strub_eligible
1134 && can_strub_at_calls_p (node));
1135 const bool internal_eligible
1136 = (consider_internal && strub_eligible
1137 && (is_always_inline
1138 || can_strub_internally_p (node)));
1140 /* In addition to the strict eligibility requirements, some additional
1141 constraints are placed on implicit selection of certain modes. These do
1142 not prevent the selection of a mode if explicitly specified as part of a
1143 function interface (the strub attribute), but they may prevent modes from
1144 being selected by the command line or by function bodies. The only actual
1145 constraint is on at-calls mode: since we change the function's exposed
1146 signature, we won't do it implicitly if the function can possibly be used
1147 in ways that do not expect the signature change, e.g., if the function is
1148 available to or interposable by other units, if its address is taken,
1149 etc. */
1150 const bool at_calls_viable
1151 = (at_calls_eligible
1152 && (strub_attr
1153 || (node->has_gimple_body_p ()
1154 && (!node->externally_visible
1155 || (node->binds_to_current_def_p ()
1156 && node->can_be_local_p ()))
1157 && node->only_called_directly_p ()
1158 && !called_with_type_override_p (node))));
1159 const bool internal_viable
1160 = (internal_eligible);
1162 /* Shorthand. */
1163 const bool strub_viable
1164 = (at_calls_viable || internal_viable);
1166 /* We wish to analyze the body, to look for implicit requests for strub, both
1167 to implicitly enable it when the body calls for it, and to report errors if
1168 the body calls for it but neither mode is viable (even if that follows from
1169 non-eligibility because of the explicit specification of some non-strubbing
1170 mode). We can refrain from scanning the body only in rare circumstances:
1171 when strub is enabled by a function attribute (scanning might be redundant
1172 in telling us to also enable it), and when we are enabling strub implicitly
1173 but there are non-viable modes: we want to know whether strubbing is
1174 required, to fallback to another mode, even if we're only enabling a
1175 certain mode, or, when either mode would do, to report an error if neither
1176 happens to be viable. */
1177 const bool analyze_body
1178 = (strub_attr
1179 ? !consider_strub
1180 : (strub_flag_auto
1181 || (strub_flag_viable && (!at_calls_viable && !internal_viable))
1182 || (strub_flag_either && !strub_viable)));
1184 /* Cases in which strubbing is enabled or disabled by strub_flag_auto.
1185 Unsatisfiable requests ought to be reported. */
1186 const bool strub_required
1187 = ((strub_attr && consider_strub)
1188 || (analyze_body && strub_from_body_p (node)));
1190 /* Besides the required cases, we want to abide by the requests to enabling on
1191 an if-viable basis. */
1192 const bool strub_enable
1193 = (strub_required
1194 || (strub_flag_at_calls && at_calls_viable)
1195 || (strub_flag_internal && internal_viable)
1196 || (strub_flag_either && strub_viable));
1198 /* And now we're finally ready to select a mode that abides by the viability
1199 and eligibility constraints, and that satisfies the strubbing requirements
1200 and requests, subject to the constraints. If both modes are viable and
1201 strub is to be enabled, pick STRUB_AT_CALLS unless STRUB_INTERNAL was named
1202 as preferred. */
1203 const enum strub_mode mode
1204 = ((strub_enable && is_always_inline)
1205 ? (strub_required ? STRUB_INLINABLE : STRUB_CALLABLE)
1206 : (strub_enable && internal_viable
1207 && (strub_flag_internal || !at_calls_viable))
1208 ? STRUB_INTERNAL
1209 : (strub_enable && at_calls_viable)
1210 ? (strub_required && !strub_attr
1211 ? STRUB_AT_CALLS_OPT
1212 : STRUB_AT_CALLS)
1213 : consider_callable
1214 ? STRUB_CALLABLE
1215 : STRUB_DISABLED);
1217 switch (mode)
1219 case STRUB_CALLABLE:
1220 if (is_always_inline)
1221 break;
1222 /* Fall through. */
1224 case STRUB_DISABLED:
1225 if (strub_enable && !strub_attr)
1227 gcc_checking_assert (analyze_body);
1228 error_at (DECL_SOURCE_LOCATION (node->decl),
1229 "%qD requires %<strub%>,"
1230 " but no viable %<strub%> mode was found",
1231 node->decl);
1232 break;
1234 /* Fall through. */
1236 case STRUB_AT_CALLS:
1237 case STRUB_INTERNAL:
1238 case STRUB_INLINABLE:
1239 /* Differences from an mode requested through a function attribute are
1240 reported in set_strub_mode_to. */
1241 break;
1243 case STRUB_AT_CALLS_OPT:
1244 /* Functions that select this mode do so because of references to strub
1245 variables. Even if we choose at-calls as an optimization, the
1246 requirements for internal strub must still be satisfied. Optimization
1247 options may render implicit at-calls strub not viable (-O0 sets
1248 force_output for static non-inline functions), and it would not be good
1249 if changing optimization options turned a well-formed into an
1250 ill-formed one. */
1251 if (!internal_viable)
1252 can_strub_internally_p (node, true);
1253 break;
1255 case STRUB_WRAPPED:
1256 case STRUB_WRAPPER:
1257 default:
1258 gcc_unreachable ();
1261 return mode;
1264 /* Set FNDT's strub mode to MODE; FNDT may be a function decl or
1265 function type. If OVERRIDE, do not check whether a mode is already
1266 set. */
1268 static void
1269 strub_set_fndt_mode_to (tree fndt, enum strub_mode mode, bool override)
1271 gcc_checking_assert (override
1272 || !(DECL_P (fndt)
1273 ? get_strub_attr_from_decl (fndt)
1274 : get_strub_attr_from_type (fndt)));
1276 tree attr = tree_cons (get_identifier ("strub"),
1277 get_strub_mode_attr_value (mode),
1278 NULL_TREE);
1279 tree *attrp = NULL;
1280 if (DECL_P (fndt))
1282 gcc_checking_assert (FUNC_OR_METHOD_TYPE_P (TREE_TYPE (fndt)));
1283 attrp = &DECL_ATTRIBUTES (fndt);
1285 else if (FUNC_OR_METHOD_TYPE_P (fndt))
1286 attrp = &TYPE_ATTRIBUTES (fndt);
1287 else
1288 gcc_unreachable ();
1290 TREE_CHAIN (attr) = *attrp;
1291 *attrp = attr;
1294 /* Set FNDT's strub mode to callable.
1295 FNDT may be a function decl or a function type. */
1297 void
1298 strub_make_callable (tree fndt)
1300 strub_set_fndt_mode_to (fndt, STRUB_CALLABLE, false);
1303 /* Set NODE to strub MODE. Report incompatibilities between MODE and the mode
1304 requested through explicit attributes, and cases of non-eligibility. */
1306 static void
1307 set_strub_mode_to (cgraph_node *node, enum strub_mode mode)
1309 tree attr = get_strub_attr_from_decl (node->decl);
1310 enum strub_mode req_mode = get_strub_mode_from_attr (attr);
1312 if (attr)
1314 /* Check for and report incompatible mode changes. */
1315 if (mode != req_mode
1316 && !(req_mode == STRUB_INTERNAL
1317 && (mode == STRUB_WRAPPED
1318 || mode == STRUB_WRAPPER))
1319 && !((req_mode == STRUB_INTERNAL
1320 || req_mode == STRUB_AT_CALLS
1321 || req_mode == STRUB_CALLABLE)
1322 && mode == STRUB_INLINABLE))
1324 error_at (DECL_SOURCE_LOCATION (node->decl),
1325 "%<strub%> mode %qE selected for %qD, when %qE was requested",
1326 get_strub_mode_attr_parm (mode),
1327 node->decl,
1328 get_strub_mode_attr_parm (req_mode));
1329 if (node->alias)
1331 cgraph_node *target = node->ultimate_alias_target ();
1332 if (target != node)
1333 error_at (DECL_SOURCE_LOCATION (target->decl),
1334 "the incompatible selection was determined"
1335 " by ultimate alias target %qD",
1336 target->decl);
1339 /* Report any incompatibilities with explicitly-requested strub. */
1340 switch (req_mode)
1342 case STRUB_AT_CALLS:
1343 can_strub_at_calls_p (node, true);
1344 break;
1346 case STRUB_INTERNAL:
1347 can_strub_internally_p (node, true);
1348 break;
1350 default:
1351 break;
1355 /* Drop any incompatible strub attributes leading the decl attribute
1356 chain. Return if we find one with the mode we need. */
1357 for (;;)
1359 if (mode == req_mode)
1360 return;
1362 if (DECL_ATTRIBUTES (node->decl) != attr)
1363 break;
1365 DECL_ATTRIBUTES (node->decl) = TREE_CHAIN (attr);
1366 attr = get_strub_attr_from_decl (node->decl);
1367 if (!attr)
1368 break;
1370 req_mode = get_strub_mode_from_attr (attr);
1373 else if (mode == req_mode)
1374 return;
1376 strub_set_fndt_mode_to (node->decl, mode, attr);
1379 /* Compute and set NODE's strub mode. */
1381 static void
1382 set_strub_mode (cgraph_node *node)
1384 tree attr = get_strub_attr_from_decl (node->decl);
1386 if (attr)
1387 switch (get_strub_mode_from_attr (attr))
1389 /* These can't have been requested through user attributes, so we must
1390 have already gone through them. */
1391 case STRUB_WRAPPER:
1392 case STRUB_WRAPPED:
1393 case STRUB_INLINABLE:
1394 case STRUB_AT_CALLS_OPT:
1395 return;
1397 case STRUB_DISABLED:
1398 case STRUB_AT_CALLS:
1399 case STRUB_INTERNAL:
1400 case STRUB_CALLABLE:
1401 break;
1403 default:
1404 gcc_unreachable ();
1407 cgraph_node *xnode = node;
1408 if (node->alias)
1409 xnode = node->ultimate_alias_target ();
1410 /* Weakrefs may remain unresolved (the above will return node) if
1411 their targets are not defined, so make sure we compute a strub
1412 mode for them, instead of defaulting to STRUB_DISABLED and
1413 rendering them uncallable. */
1414 enum strub_mode mode = (xnode != node && !xnode->alias
1415 ? get_strub_mode (xnode)
1416 : compute_strub_mode (node, attr));
1418 set_strub_mode_to (node, mode);
1422 /* Non-strub functions shouldn't be called from within strub contexts,
1423 except through callable ones. Always inline strub functions can
1424 only be called from strub functions. */
1426 static bool
1427 strub_callable_from_p (strub_mode caller_mode, strub_mode callee_mode)
1429 switch (caller_mode)
1431 case STRUB_WRAPPED:
1432 case STRUB_AT_CALLS_OPT:
1433 case STRUB_AT_CALLS:
1434 case STRUB_INTERNAL:
1435 case STRUB_INLINABLE:
1436 break;
1438 case STRUB_WRAPPER:
1439 case STRUB_DISABLED:
1440 case STRUB_CALLABLE:
1441 return callee_mode != STRUB_INLINABLE;
1443 default:
1444 gcc_unreachable ();
1447 switch (callee_mode)
1449 case STRUB_WRAPPED:
1450 case STRUB_AT_CALLS:
1451 case STRUB_INLINABLE:
1452 break;
1454 case STRUB_AT_CALLS_OPT:
1455 case STRUB_INTERNAL:
1456 case STRUB_WRAPPER:
1457 return (flag_strub >= -1);
1459 case STRUB_DISABLED:
1460 return false;
1462 case STRUB_CALLABLE:
1463 break;
1465 default:
1466 gcc_unreachable ();
1469 return true;
1472 /* Return TRUE iff CALLEE can be inlined into CALLER. We wish to avoid inlining
1473 WRAPPED functions back into their WRAPPERs. More generally, we wish to avoid
1474 inlining strubbed functions into non-strubbed ones. CALLER doesn't have to
1475 be an immediate caller of CALLEE: the immediate caller may have already been
1476 cloned for inlining, and then CALLER may be further up the original call
1477 chain. ??? It would be nice if our own caller would retry inlining callee
1478 if caller gets inlined. */
1480 bool
1481 strub_inlinable_to_p (cgraph_node *callee, cgraph_node *caller)
1483 strub_mode callee_mode = get_strub_mode (callee);
1485 switch (callee_mode)
1487 case STRUB_WRAPPED:
1488 case STRUB_AT_CALLS:
1489 case STRUB_INTERNAL:
1490 case STRUB_INLINABLE:
1491 case STRUB_AT_CALLS_OPT:
1492 break;
1494 case STRUB_WRAPPER:
1495 case STRUB_DISABLED:
1496 case STRUB_CALLABLE:
1497 /* When we consider inlining, we've already verified callability, so we
1498 can even inline callable and then disabled into a strub context. That
1499 will get strubbed along with the context, so it's hopefully not a
1500 problem. */
1501 return true;
1503 default:
1504 gcc_unreachable ();
1507 strub_mode caller_mode = get_strub_mode (caller);
1509 switch (caller_mode)
1511 case STRUB_WRAPPED:
1512 case STRUB_AT_CALLS:
1513 case STRUB_INTERNAL:
1514 case STRUB_INLINABLE:
1515 case STRUB_AT_CALLS_OPT:
1516 return true;
1518 case STRUB_WRAPPER:
1519 case STRUB_DISABLED:
1520 case STRUB_CALLABLE:
1521 break;
1523 default:
1524 gcc_unreachable ();
1527 return false;
1530 /* Check that types T1 and T2 are strub-compatible. Return 1 if the strub modes
1531 are the same, 2 if they are interchangeable, and 0 otherwise. */
1534 strub_comptypes (tree t1, tree t2)
1536 if (TREE_CODE (t1) != TREE_CODE (t2))
1537 return 0;
1539 enum strub_mode m1 = get_strub_mode_from_type (t1);
1540 enum strub_mode m2 = get_strub_mode_from_type (t2);
1542 if (m1 == m2)
1543 return 1;
1545 /* We're dealing with types, so only strub modes that can be selected by
1546 attributes in the front end matter. If either mode is at-calls (for
1547 functions) or internal (for variables), the conversion is not
1548 compatible. */
1549 bool var_p = !FUNC_OR_METHOD_TYPE_P (t1);
1550 enum strub_mode mr = var_p ? STRUB_INTERNAL : STRUB_AT_CALLS;
1551 if (m1 == mr || m2 == mr)
1552 return 0;
1554 return 2;
1557 /* Return the effective strub mode used for CALL, and set *TYPEP to
1558 the effective type used for the call. The effective type and mode
1559 are those of the callee, unless the call involves a typecast. */
1561 static enum strub_mode
1562 effective_strub_mode_for_call (gcall *call, tree *typep)
1564 tree type;
1565 enum strub_mode mode;
1567 if (strub_call_fntype_override_p (call))
1569 type = gimple_call_fntype (call);
1570 mode = get_strub_mode_from_type (type);
1572 else
1574 type = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
1575 tree decl = gimple_call_fndecl (call);
1576 if (decl)
1577 mode = get_strub_mode_from_fndecl (decl);
1578 else
1579 mode = get_strub_mode_from_type (type);
1582 if (typep)
1583 *typep = type;
1585 return mode;
1588 /* Create a distinct copy of the type of NODE's function, and change
1589 the fntype of all calls to it with the same main type to the new
1590 type. */
1592 static void
1593 distinctify_node_type (cgraph_node *node)
1595 tree old_type = TREE_TYPE (node->decl);
1596 tree new_type = build_distinct_type_copy (old_type);
1597 tree new_ptr_type = NULL_TREE;
1599 /* Remap any calls to node->decl that use old_type, or a variant
1600 thereof, to new_type as well. We don't look for aliases, their
1601 declarations will have their types changed independently, and
1602 we'll adjust their fntypes then. */
1603 for (cgraph_edge *e = node->callers; e; e = e->next_caller)
1605 if (!e->call_stmt)
1606 continue;
1607 tree fnaddr = gimple_call_fn (e->call_stmt);
1608 gcc_checking_assert (TREE_CODE (fnaddr) == ADDR_EXPR
1609 && TREE_OPERAND (fnaddr, 0) == node->decl);
1610 if (strub_call_fntype_override_p (e->call_stmt))
1611 continue;
1612 if (!new_ptr_type)
1613 new_ptr_type = build_pointer_type (new_type);
1614 TREE_TYPE (fnaddr) = new_ptr_type;
1615 gimple_call_set_fntype (e->call_stmt, new_type);
1618 TREE_TYPE (node->decl) = new_type;
1621 /* Return TRUE iff TYPE and any variants have the same strub mode. */
1623 static bool
1624 same_strub_mode_in_variants_p (tree type)
1626 enum strub_mode mode = get_strub_mode_from_type (type);
1628 for (tree other = TYPE_MAIN_VARIANT (type);
1629 other != NULL_TREE; other = TYPE_NEXT_VARIANT (other))
1630 if (type != other && mode != get_strub_mode_from_type (other))
1631 return false;
1633 /* Check that the canonical type, if set, either is in the same
1634 variant chain, or has the same strub mode as type. Also check
1635 the variants of the canonical type. */
1636 if (TYPE_CANONICAL (type)
1637 && (TYPE_MAIN_VARIANT (TYPE_CANONICAL (type))
1638 != TYPE_MAIN_VARIANT (type)))
1640 if (mode != get_strub_mode_from_type (TYPE_CANONICAL (type)))
1641 return false;
1642 else
1643 return same_strub_mode_in_variants_p (TYPE_CANONICAL (type));
1646 return true;
1649 /* Check that strub functions don't call non-strub functions, and that
1650 always_inline strub functions are only called by strub
1651 functions. */
1653 static void
1654 verify_strub ()
1656 cgraph_node *node;
1658 /* It's expected that check strub-wise pointer type compatibility of variables
1659 and of functions is already taken care of by front-ends, on account of the
1660 attribute's being marked as affecting type identity and of the creation of
1661 distinct types. */
1663 /* Check that call targets in strub contexts have strub-callable types. */
1665 FOR_EACH_FUNCTION_WITH_GIMPLE_BODY (node)
1667 enum strub_mode caller_mode = get_strub_mode (node);
1669 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
1671 gcc_checking_assert (e->indirect_unknown_callee);
1673 if (!e->call_stmt)
1674 continue;
1676 enum strub_mode callee_mode
1677 = effective_strub_mode_for_call (e->call_stmt, NULL);
1679 if (!strub_callable_from_p (caller_mode, callee_mode))
1680 error_at (gimple_location (e->call_stmt),
1681 "indirect non-%<strub%> call in %<strub%> context %qD",
1682 node->decl);
1685 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
1687 gcc_checking_assert (!e->indirect_unknown_callee);
1689 if (!e->call_stmt)
1690 continue;
1692 tree callee_fntype;
1693 enum strub_mode callee_mode
1694 = effective_strub_mode_for_call (e->call_stmt, &callee_fntype);
1696 if (!strub_callable_from_p (caller_mode, callee_mode))
1698 if (callee_mode == STRUB_INLINABLE)
1699 error_at (gimple_location (e->call_stmt),
1700 "calling %<always_inline%> %<strub%> %qD"
1701 " in non-%<strub%> context %qD",
1702 e->callee->decl, node->decl);
1703 else if (fndecl_built_in_p (e->callee->decl, BUILT_IN_APPLY_ARGS)
1704 && caller_mode == STRUB_INTERNAL)
1705 /* This is ok, it will be kept in the STRUB_WRAPPER, and removed
1706 from the STRUB_WRAPPED's strub context. */
1707 continue;
1708 else if (!strub_call_fntype_override_p (e->call_stmt))
1709 error_at (gimple_location (e->call_stmt),
1710 "calling non-%<strub%> %qD in %<strub%> context %qD",
1711 e->callee->decl, node->decl);
1712 else
1713 error_at (gimple_location (e->call_stmt),
1714 "calling %qD using non-%<strub%> type %qT"
1715 " in %<strub%> context %qD",
1716 e->callee->decl, callee_fntype, node->decl);
1722 namespace {
1724 /* Define a pass to compute strub modes. */
1725 const pass_data pass_data_ipa_strub_mode = {
1726 SIMPLE_IPA_PASS,
1727 "strubm",
1728 OPTGROUP_NONE,
1729 TV_NONE,
1730 PROP_cfg, // properties_required
1731 0, // properties_provided
1732 0, // properties_destroyed
1733 0, // properties_start
1734 0, // properties_finish
1737 class pass_ipa_strub_mode : public simple_ipa_opt_pass
1739 public:
1740 pass_ipa_strub_mode (gcc::context *ctxt)
1741 : simple_ipa_opt_pass (pass_data_ipa_strub_mode, ctxt)
1743 opt_pass *clone () { return new pass_ipa_strub_mode (m_ctxt); }
1744 virtual bool gate (function *) {
1745 /* In relaxed (-3) and strict (-4) settings, that only enable strub at a
1746 function or variable attribute's request, the attribute handler changes
1747 flag_strub to -1 or -2, respectively, if any strub-enabling occurence of
1748 the attribute is found. Therefore, if it remains at -3 or -4, nothing
1749 that would enable strub was found, so we can disable it and avoid the
1750 overhead. */
1751 if (flag_strub < -2)
1752 flag_strub = 0;
1753 return flag_strub;
1755 virtual unsigned int execute (function *);
1758 /* Define a pass to introduce strub transformations. */
1759 const pass_data pass_data_ipa_strub = {
1760 SIMPLE_IPA_PASS,
1761 "strub",
1762 OPTGROUP_NONE,
1763 TV_NONE,
1764 PROP_cfg | PROP_ssa, // properties_required
1765 0, // properties_provided
1766 0, // properties_destroyed
1767 0, // properties_start
1768 TODO_update_ssa
1769 | TODO_cleanup_cfg
1770 | TODO_rebuild_cgraph_edges
1771 | TODO_verify_il, // properties_finish
1774 class pass_ipa_strub : public simple_ipa_opt_pass
1776 public:
1777 pass_ipa_strub (gcc::context *ctxt)
1778 : simple_ipa_opt_pass (pass_data_ipa_strub, ctxt)
1780 opt_pass *clone () { return new pass_ipa_strub (m_ctxt); }
1781 virtual bool gate (function *) { return flag_strub && !seen_error (); }
1782 virtual unsigned int execute (function *);
1784 /* Define on demand and cache some types we use often. */
1785 #define DEF_TYPE(IDX, NAME, INIT) \
1786 static inline tree get_ ## NAME () { \
1787 int idx = STRUB_TYPE_BASE + IDX; \
1788 static tree type = strub_cache[idx]; \
1789 if (!type) \
1790 strub_cache[idx] = type = (INIT); \
1791 return type; \
1794 /* Use a distinct ptr_type_node to denote the watermark, so that we can
1795 recognize it in arg lists and avoid modifying types twice. */
1796 DEF_TYPE (0, wmt, build_variant_type_copy (ptr_type_node))
1798 DEF_TYPE (1, pwmt, build_reference_type (get_wmt ()))
1800 DEF_TYPE (2, qpwmt,
1801 build_qualified_type (get_pwmt (),
1802 TYPE_QUAL_RESTRICT
1803 /* | TYPE_QUAL_CONST */))
1805 DEF_TYPE (3, qptr,
1806 build_qualified_type (ptr_type_node,
1807 TYPE_QUAL_RESTRICT
1808 | TYPE_QUAL_CONST))
1810 DEF_TYPE (4, qpvalst,
1811 build_qualified_type (build_reference_type
1812 (va_list_type_node),
1813 TYPE_QUAL_RESTRICT
1814 /* | TYPE_QUAL_CONST */))
1816 #undef DEF_TYPE
1818 /* Define non-strub builtins on demand. */
1819 #define DEF_NM_BUILTIN(NAME, CODE, FNTYPELIST) \
1820 static tree get_ ## NAME () { \
1821 tree decl = builtin_decl_explicit (CODE); \
1822 if (!decl) \
1824 tree type = build_function_type_list FNTYPELIST; \
1825 decl = add_builtin_function \
1826 ("__builtin_" #NAME, \
1827 type, CODE, BUILT_IN_NORMAL, \
1828 NULL, NULL); \
1829 TREE_NOTHROW (decl) = true; \
1830 set_builtin_decl ((CODE), decl, true); \
1832 return decl; \
1835 DEF_NM_BUILTIN (stack_address,
1836 BUILT_IN_STACK_ADDRESS,
1837 (ptr_type_node, NULL))
1839 #undef DEF_NM_BUILTIN
1841 /* Define strub builtins on demand. */
1842 #define DEF_SS_BUILTIN(NAME, FNSPEC, CODE, FNTYPELIST) \
1843 static tree get_ ## NAME () { \
1844 tree decl = builtin_decl_explicit (CODE); \
1845 if (!decl) \
1847 tree type = build_function_type_list FNTYPELIST; \
1848 tree attrs = NULL; \
1849 if (FNSPEC) \
1850 attrs = tree_cons (get_identifier ("fn spec"), \
1851 build_tree_list \
1852 (NULL_TREE, \
1853 build_string (strlen (FNSPEC), \
1854 (FNSPEC))), \
1855 attrs); \
1856 decl = add_builtin_function_ext_scope \
1857 ("__builtin___strub_" #NAME, \
1858 type, CODE, BUILT_IN_NORMAL, \
1859 "__strub_" #NAME, attrs); \
1860 TREE_NOTHROW (decl) = true; \
1861 set_builtin_decl ((CODE), decl, true); \
1863 return decl; \
1866 DEF_SS_BUILTIN (enter, ". Ot",
1867 BUILT_IN___STRUB_ENTER,
1868 (void_type_node, get_qpwmt (), NULL))
1869 DEF_SS_BUILTIN (update, ". Wt",
1870 BUILT_IN___STRUB_UPDATE,
1871 (void_type_node, get_qpwmt (), NULL))
1872 DEF_SS_BUILTIN (leave, ". w ",
1873 BUILT_IN___STRUB_LEAVE,
1874 (void_type_node, get_qpwmt (), NULL))
1876 #undef DEF_SS_BUILTIN
1878 /* Define strub identifiers on demand. */
1879 #define DEF_IDENT(IDX, NAME) \
1880 static inline tree get_ ## NAME () { \
1881 int idx = STRUB_IDENT_BASE + IDX; \
1882 tree identifier = strub_cache[idx]; \
1883 if (!identifier) \
1884 strub_cache[idx] = identifier = get_identifier (".strub." #NAME); \
1885 return identifier; \
1888 DEF_IDENT (0, watermark_ptr)
1889 DEF_IDENT (1, va_list_ptr)
1890 DEF_IDENT (2, apply_args)
1892 #undef DEF_IDENT
1894 static inline int adjust_at_calls_type (tree);
1895 static inline void adjust_at_calls_call (cgraph_edge *, int, tree);
1896 static inline void adjust_at_calls_calls (cgraph_node *);
1898 /* Add to SEQ a call to the strub watermark update builtin, taking NODE's
1899 location if given. Optionally add the corresponding edge from NODE, with
1900 execution frequency COUNT. Return the modified SEQ. */
1902 static inline gimple_seq
1903 call_update_watermark (tree wmptr, cgraph_node *node, profile_count count,
1904 gimple_seq seq = NULL)
1906 tree uwm = get_update ();
1907 gcall *update = gimple_build_call (uwm, 1, wmptr);
1908 if (node)
1909 gimple_set_location (update, DECL_SOURCE_LOCATION (node->decl));
1910 gimple_seq_add_stmt (&seq, update);
1911 if (node)
1912 node->create_edge (cgraph_node::get_create (uwm), update, count, false);
1913 return seq;
1918 } // anon namespace
1920 /* Gather with this type a collection of parameters that we're turning into
1921 explicit references. */
1923 typedef hash_set<tree> indirect_parms_t;
1925 /* Dereference OP's incoming turned-into-reference parm if it's an
1926 INDIRECT_PARMS or an ADDR_EXPR thereof. Set *REC and return according to
1927 gimple-walking expectations. */
1929 static tree
1930 maybe_make_indirect (indirect_parms_t &indirect_parms, tree op, int *rec)
1932 if (DECL_P (op))
1934 *rec = 0;
1935 if (indirect_parms.contains (op))
1937 tree ret = gimple_fold_indirect_ref (op);
1938 if (!ret)
1939 ret = build2 (MEM_REF,
1940 TREE_TYPE (TREE_TYPE (op)),
1942 build_int_cst (TREE_TYPE (op), 0));
1943 return ret;
1946 else if (TREE_CODE (op) == ADDR_EXPR
1947 && DECL_P (TREE_OPERAND (op, 0)))
1949 *rec = 0;
1950 if (indirect_parms.contains (TREE_OPERAND (op, 0)))
1952 op = TREE_OPERAND (op, 0);
1953 return op;
1957 return NULL_TREE;
1960 /* A gimple-walking function that adds dereferencing to indirect parms. */
1962 static tree
1963 walk_make_indirect (tree *op, int *rec, void *arg)
1965 walk_stmt_info *wi = (walk_stmt_info *)arg;
1966 indirect_parms_t &indirect_parms = *(indirect_parms_t *)wi->info;
1968 if (!*op || TYPE_P (*op))
1970 *rec = 0;
1971 return NULL_TREE;
1974 if (tree repl = maybe_make_indirect (indirect_parms, *op, rec))
1976 *op = repl;
1977 wi->changed = true;
1980 return NULL_TREE;
1983 /* A gimple-walking function that turns any non-gimple-val ADDR_EXPRs into a
1984 separate SSA. Though addresses of e.g. parameters, and of members thereof,
1985 are gimple vals, turning parameters into references, with an extra layer of
1986 indirection and thus explicit dereferencing, need to be regimplified. */
1988 static tree
1989 walk_regimplify_addr_expr (tree *op, int *rec, void *arg)
1991 walk_stmt_info *wi = (walk_stmt_info *)arg;
1992 gimple_stmt_iterator &gsi = *(gimple_stmt_iterator *)wi->info;
1994 *rec = 0;
1996 if (!*op || TREE_CODE (*op) != ADDR_EXPR)
1997 return NULL_TREE;
1999 if (!is_gimple_val (*op))
2001 tree ret = force_gimple_operand_gsi (&gsi, *op, true,
2002 NULL_TREE, true, GSI_SAME_STMT);
2003 gcc_assert (ret != *op);
2004 *op = ret;
2005 wi->changed = true;
2008 return NULL_TREE;
2011 /* Turn STMT's PHI arg defs into separate SSA defs if they've become
2012 non-gimple_val. Return TRUE if any edge insertions need to be committed. */
2014 static bool
2015 walk_regimplify_phi (gphi *stmt)
2017 bool needs_commit = false;
2019 for (unsigned i = 0, n = gimple_phi_num_args (stmt); i < n; i++)
2021 tree op = gimple_phi_arg_def (stmt, i);
2022 if ((TREE_CODE (op) == ADDR_EXPR
2023 && !is_gimple_val (op))
2024 /* ??? A PARM_DECL that was addressable in the original function and
2025 had its address in PHI nodes, but that became a reference in the
2026 wrapped clone would NOT be updated by update_ssa in PHI nodes.
2027 Alas, if we were to create a default def for it now, update_ssa
2028 would complain that the symbol that needed rewriting already has
2029 SSA names associated with it. OTOH, leaving the PARM_DECL alone,
2030 it eventually causes errors because it remains unchanged in PHI
2031 nodes, but it gets rewritten as expected if it appears in other
2032 stmts. So we cheat a little here, and force the PARM_DECL out of
2033 the PHI node and into an assignment. It's a little expensive,
2034 because we insert it at the edge, which introduces a basic block
2035 that's entirely unnecessary, but it works, and the block will be
2036 removed as the default def gets propagated back into the PHI node,
2037 so the final optimized code looks just as expected. */
2038 || (TREE_CODE (op) == PARM_DECL
2039 && !TREE_ADDRESSABLE (op)))
2041 tree temp = make_ssa_name (TREE_TYPE (op), stmt);
2042 if (TREE_CODE (op) == PARM_DECL)
2043 SET_SSA_NAME_VAR_OR_IDENTIFIER (temp, DECL_NAME (op));
2044 SET_PHI_ARG_DEF (stmt, i, temp);
2046 gimple *assign = gimple_build_assign (temp, op);
2047 if (gimple_phi_arg_has_location (stmt, i))
2048 gimple_set_location (assign, gimple_phi_arg_location (stmt, i));
2049 gsi_insert_on_edge (gimple_phi_arg_edge (stmt, i), assign);
2050 needs_commit = true;
2054 return needs_commit;
2057 /* Create a reference type to use for PARM when turning it into a
2058 reference. */
2060 static tree
2061 build_ref_type_for (tree parm)
2063 gcc_checking_assert (TREE_CODE (parm) == PARM_DECL);
2065 tree ref_type = build_reference_type (TREE_TYPE (parm));
2067 return ref_type;
2070 /* Add cgraph edges from current_function_decl to callees in SEQ with frequency
2071 COUNT, assuming all calls in SEQ are direct. */
2073 static void
2074 add_call_edges_for_seq (gimple_seq seq, profile_count count)
2076 cgraph_node *node = cgraph_node::get_create (current_function_decl);
2078 for (gimple_stmt_iterator gsi = gsi_start (seq);
2079 !gsi_end_p (gsi); gsi_next (&gsi))
2081 gimple *stmt = gsi_stmt (gsi);
2083 gcall *call = dyn_cast <gcall *> (stmt);
2084 if (!call)
2085 continue;
2087 tree callee = gimple_call_fndecl (call);
2088 gcc_checking_assert (callee);
2089 node->create_edge (cgraph_node::get_create (callee), call, count, false);
2093 /* Insert SEQ after the call at GSI, as if the call was in a try block with SEQ
2094 as finally, i.e., SEQ will run after the call whether it returns or
2095 propagates an exception. This handles block splitting, EH edge and block
2096 creation, noreturn and nothrow optimizations, and even throwing calls without
2097 preexisting local handlers. */
2099 static void
2100 gsi_insert_finally_seq_after_call (gimple_stmt_iterator gsi, gimple_seq seq)
2102 if (!seq)
2103 return;
2105 gimple *stmt = gsi_stmt (gsi);
2107 if (gimple_has_location (stmt))
2108 annotate_all_with_location (seq, gimple_location (stmt));
2110 gcall *call = dyn_cast <gcall *> (stmt);
2111 bool noreturn_p = call && gimple_call_noreturn_p (call);
2112 int eh_lp = lookup_stmt_eh_lp (stmt);
2113 bool must_not_throw_p = eh_lp < 0;
2114 bool nothrow_p = (must_not_throw_p
2115 || (call && gimple_call_nothrow_p (call))
2116 || (eh_lp <= 0
2117 && (TREE_NOTHROW (cfun->decl)
2118 || !opt_for_fn (cfun->decl, flag_exceptions))));
2120 if (noreturn_p && nothrow_p)
2121 return;
2123 /* Don't expect an EH edge if we're not to throw, or if we're not in an EH
2124 region yet. */
2125 bool no_eh_edge_p = (nothrow_p || !eh_lp);
2126 bool must_end_bb = stmt_ends_bb_p (stmt);
2128 edge eft = NULL, eeh = NULL;
2129 if (must_end_bb && !(noreturn_p && no_eh_edge_p))
2131 gcc_checking_assert (gsi_one_before_end_p (gsi));
2133 edge e;
2134 edge_iterator ei;
2135 FOR_EACH_EDGE (e, ei, gsi_bb (gsi)->succs)
2137 if ((e->flags & EDGE_EH))
2139 gcc_checking_assert (!eeh);
2140 eeh = e;
2141 #if !CHECKING_P
2142 if (eft || noreturn_p)
2143 break;
2144 #endif
2146 if ((e->flags & EDGE_FALLTHRU))
2148 gcc_checking_assert (!eft);
2149 eft = e;
2150 #if !CHECKING_P
2151 if (eeh || no_eh_edge_p)
2152 break;
2153 #endif
2157 gcc_checking_assert (!(eft && (eft->flags & EDGE_FALLTHRU))
2158 == noreturn_p);
2159 gcc_checking_assert (!(eeh && (eeh->flags & EDGE_EH))
2160 == no_eh_edge_p);
2161 gcc_checking_assert (eft != eeh);
2164 if (!noreturn_p)
2166 gimple_seq nseq = nothrow_p ? seq : gimple_seq_copy (seq);
2168 if (must_end_bb)
2170 gcc_checking_assert (gsi_one_before_end_p (gsi));
2171 add_call_edges_for_seq (nseq, eft->count ());
2172 gsi_insert_seq_on_edge_immediate (eft, nseq);
2174 else
2176 add_call_edges_for_seq (nseq, gsi_bb (gsi)->count);
2177 gsi_insert_seq_after (&gsi, nseq, GSI_SAME_STMT);
2181 if (nothrow_p)
2182 return;
2184 if (eh_lp)
2186 add_call_edges_for_seq (seq, eeh->count ());
2187 gsi_insert_seq_on_edge_immediate (eeh, seq);
2188 return;
2191 /* A throwing call may appear within a basic block in a function that doesn't
2192 have any EH regions. We're going to add a cleanup if so, therefore the
2193 block will have to be split. */
2194 basic_block bb = gsi_bb (gsi);
2195 if (!gsi_one_before_end_p (gsi))
2196 split_block (bb, stmt);
2198 /* Create a new block for the EH cleanup. */
2199 basic_block bb_eh_cleanup = create_empty_bb (bb);
2200 if (dom_info_available_p (CDI_DOMINATORS))
2201 set_immediate_dominator (CDI_DOMINATORS, bb_eh_cleanup, bb);
2202 if (current_loops)
2203 add_bb_to_loop (bb_eh_cleanup, current_loops->tree_root);
2205 /* Make the new block an EH cleanup for the call. */
2206 eh_region new_r = gen_eh_region_cleanup (NULL);
2207 eh_landing_pad lp = gen_eh_landing_pad (new_r);
2208 tree label = gimple_block_label (bb_eh_cleanup);
2209 lp->post_landing_pad = label;
2210 EH_LANDING_PAD_NR (label) = lp->index;
2211 add_stmt_to_eh_lp (stmt, lp->index);
2213 /* Add the cleanup code to the EH cleanup block. */
2214 gsi = gsi_after_labels (bb_eh_cleanup);
2215 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
2217 /* And then propagate the exception further. */
2218 gresx *resx = gimple_build_resx (new_r->index);
2219 if (gimple_has_location (stmt))
2220 gimple_set_location (resx, gimple_location (stmt));
2221 gsi_insert_before (&gsi, resx, GSI_SAME_STMT);
2223 /* Finally, wire the EH cleanup block into the CFG. */
2224 edge neeh = make_eh_edge (stmt);
2225 neeh->probability = profile_probability::never ();
2226 gcc_checking_assert (neeh->dest == bb_eh_cleanup);
2227 gcc_checking_assert (!neeh->dest->count.initialized_p ());
2228 neeh->dest->count = neeh->count ();
2229 add_call_edges_for_seq (seq, neeh->dest->count);
2232 /* Copy the attribute list at *ATTRS, minus any NAME attributes, leaving
2233 shareable trailing nodes alone. */
2235 static inline void
2236 remove_named_attribute_unsharing (const char *name, tree *attrs)
2238 while (tree found = lookup_attribute (name, *attrs))
2240 /* Copy nodes up to the next NAME attribute. */
2241 while (*attrs != found)
2243 *attrs = tree_cons (TREE_PURPOSE (*attrs),
2244 TREE_VALUE (*attrs),
2245 TREE_CHAIN (*attrs));
2246 attrs = &TREE_CHAIN (*attrs);
2248 /* Then drop it. */
2249 gcc_checking_assert (*attrs == found);
2250 *attrs = TREE_CHAIN (*attrs);
2254 /* Record the order of the last cgraph entry whose mode we've already set, so
2255 that we can perform mode setting incrementally without duplication. */
2256 static int last_cgraph_order;
2258 /* Set strub modes for functions introduced since the last call. */
2260 static void
2261 ipa_strub_set_mode_for_new_functions ()
2263 if (symtab->order == last_cgraph_order)
2264 return;
2266 cgraph_node *node;
2268 /* Go through the functions twice, once over non-aliases, and then over
2269 aliases, so that aliases can reuse the mode computation of their ultimate
2270 targets. */
2271 for (int aliases = 0; aliases <= 1; aliases++)
2272 FOR_EACH_FUNCTION (node)
2274 if (!node->alias != !aliases)
2275 continue;
2277 /* Already done. */
2278 if (node->order < last_cgraph_order)
2279 continue;
2281 set_strub_mode (node);
2284 last_cgraph_order = symtab->order;
2287 /* Return FALSE if NODE is a strub context, and TRUE otherwise. */
2289 bool
2290 strub_splittable_p (cgraph_node *node)
2292 switch (get_strub_mode (node))
2294 case STRUB_WRAPPED:
2295 case STRUB_AT_CALLS:
2296 case STRUB_AT_CALLS_OPT:
2297 case STRUB_INLINABLE:
2298 case STRUB_INTERNAL:
2299 case STRUB_WRAPPER:
2300 return false;
2302 case STRUB_CALLABLE:
2303 case STRUB_DISABLED:
2304 break;
2306 default:
2307 gcc_unreachable ();
2310 return true;
2313 /* Return the PARM_DECL of the incoming watermark pointer, if there is one. */
2315 tree
2316 strub_watermark_parm (tree fndecl)
2318 switch (get_strub_mode_from_fndecl (fndecl))
2320 case STRUB_WRAPPED:
2321 case STRUB_AT_CALLS:
2322 case STRUB_AT_CALLS_OPT:
2323 break;
2325 case STRUB_INTERNAL:
2326 case STRUB_WRAPPER:
2327 case STRUB_CALLABLE:
2328 case STRUB_DISABLED:
2329 case STRUB_INLINABLE:
2330 return NULL_TREE;
2332 default:
2333 gcc_unreachable ();
2336 for (tree parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
2337 /* The type (variant) compare finds the parameter even in a just-created
2338 clone, before we set its name, but the type-based compare doesn't work
2339 during builtin expansion within the lto compiler, because we'll have
2340 created a separate variant in that run. */
2341 if (TREE_TYPE (parm) == pass_ipa_strub::get_qpwmt ()
2342 || DECL_NAME (parm) == pass_ipa_strub::get_watermark_ptr ())
2343 return parm;
2345 gcc_unreachable ();
2348 /* Adjust a STRUB_AT_CALLS function TYPE, adding a watermark pointer if it
2349 hasn't been added yet. Return the named argument count. */
2352 pass_ipa_strub::adjust_at_calls_type (tree type)
2354 int named_args = 0;
2356 gcc_checking_assert (same_strub_mode_in_variants_p (type));
2358 if (!TYPE_ARG_TYPES (type))
2359 return named_args;
2361 tree *tlist = &TYPE_ARG_TYPES (type);
2362 tree qpwmptrt = get_qpwmt ();
2363 while (*tlist && TREE_VALUE (*tlist) != void_type_node)
2365 /* The type has already been adjusted. */
2366 if (TREE_VALUE (*tlist) == qpwmptrt)
2367 return named_args;
2368 named_args++;
2369 *tlist = tree_cons (TREE_PURPOSE (*tlist),
2370 TREE_VALUE (*tlist),
2371 TREE_CHAIN (*tlist));
2372 tlist = &TREE_CHAIN (*tlist);
2375 /* Add the new argument after all named arguments, so as to not mess with
2376 attributes that reference parameters. */
2377 *tlist = tree_cons (NULL_TREE, get_qpwmt (), *tlist);
2379 #if ATTR_FNSPEC_DECONST_WATERMARK
2380 if (!type_already_adjusted)
2382 int flags = flags_from_decl_or_type (type);
2383 tree fnspec = lookup_attribute ("fn spec", type);
2385 if ((flags & (ECF_CONST | ECF_PURE | ECF_NOVOPS)) || fnspec)
2387 size_t xargs = 1;
2388 size_t curlen = 0, tgtlen = 2 + 2 * (named_args + xargs);
2389 auto_vec<char> nspecv (tgtlen);
2390 char *nspec = &nspecv[0]; /* It will *not* be NUL-terminated! */
2391 if (fnspec)
2393 tree fnspecstr = TREE_VALUE (TREE_VALUE (fnspec));
2394 curlen = TREE_STRING_LENGTH (fnspecstr);
2395 memcpy (nspec, TREE_STRING_POINTER (fnspecstr), curlen);
2397 if (!curlen)
2399 nspec[curlen++] = '.';
2400 nspec[curlen++] = ((flags & ECF_CONST)
2401 ? 'c'
2402 : (flags & ECF_PURE)
2403 ? 'p'
2404 : ' ');
2406 while (curlen < tgtlen - 2 * xargs)
2408 nspec[curlen++] = '.';
2409 nspec[curlen++] = ' ';
2411 nspec[curlen++] = 'W';
2412 nspec[curlen++] = 't';
2414 /* The type has already been copied, if needed, before adding
2415 parameters. */
2416 TYPE_ATTRIBUTES (type)
2417 = tree_cons (get_identifier ("fn spec"),
2418 build_tree_list (NULL_TREE,
2419 build_string (tgtlen, nspec)),
2420 TYPE_ATTRIBUTES (type));
2423 #endif
2425 return named_args;
2428 /* Adjust a call to an at-calls call target. Create a watermark local variable
2429 if needed, initialize it before, pass it to the callee according to the
2430 modified at-calls interface, and release the callee's stack space after the
2431 call, if not deferred. If the call is const or pure, arrange for the
2432 watermark to not be assumed unused or unchanged. */
2434 void
2435 pass_ipa_strub::adjust_at_calls_call (cgraph_edge *e, int named_args,
2436 tree callee_fntype)
2438 gcc_checking_assert (e->call_stmt);
2439 gcall *ocall = e->call_stmt;
2440 gimple_stmt_iterator gsi = gsi_for_stmt (ocall);
2442 /* Make sure we haven't modified this call yet. */
2443 gcc_checking_assert (!(int (gimple_call_num_args (ocall)) > named_args
2444 && (TREE_TYPE (gimple_call_arg (ocall, named_args))
2445 == get_pwmt ())));
2447 tree tsup;
2448 if (!(tsup = gimple_call_fndecl (ocall)))
2449 tsup = TREE_TYPE (TREE_TYPE (gimple_call_fn (ocall)));
2450 if (!strub_target_support_p (tsup, true, gimple_location (ocall)))
2451 return;
2453 /* If we're already within a strub context, pass on the incoming watermark
2454 pointer, and omit the enter and leave calls around the modified call, as an
2455 optimization, or as a means to satisfy a tail-call requirement. */
2456 tree swmp = ((opt_for_fn (e->caller->decl, optimize_size)
2457 || opt_for_fn (e->caller->decl, optimize) > 2
2458 || gimple_call_must_tail_p (ocall)
2459 || (opt_for_fn (e->caller->decl, optimize) == 2
2460 && gimple_call_tail_p (ocall)))
2461 ? strub_watermark_parm (e->caller->decl)
2462 : NULL_TREE);
2463 bool omit_own_watermark = swmp;
2464 tree swm = NULL_TREE;
2465 if (!omit_own_watermark)
2467 swm = create_tmp_var (get_wmt (), ".strub.watermark");
2468 TREE_ADDRESSABLE (swm) = true;
2469 swmp = build1 (ADDR_EXPR, get_pwmt (), swm);
2471 /* Initialize the watermark before the call. */
2472 tree enter = get_enter ();
2473 gcall *stptr = gimple_build_call (enter, 1,
2474 unshare_expr (swmp));
2475 if (gimple_has_location (ocall))
2476 gimple_set_location (stptr, gimple_location (ocall));
2477 gsi_insert_before (&gsi, stptr, GSI_SAME_STMT);
2478 e->caller->create_edge (cgraph_node::get_create (enter),
2479 stptr, gsi_bb (gsi)->count, false);
2483 /* Replace the call with one that passes the swmp argument first. */
2484 gcall *wrcall;
2485 { gcall *stmt = ocall;
2486 // Mostly copied from gimple_call_copy_skip_args.
2487 int i = 0;
2488 int nargs = gimple_call_num_args (stmt);
2489 auto_vec<tree> vargs (MAX (nargs, named_args) + 1);
2490 gcall *new_stmt;
2492 /* pr71109.c calls a prototypeless function, then defines it with
2493 additional arguments. It's ill-formed, but after it's inlined,
2494 it somehow works out. */
2495 for (; i < named_args && i < nargs; i++)
2496 vargs.quick_push (gimple_call_arg (stmt, i));
2497 for (; i < named_args; i++)
2498 vargs.quick_push (null_pointer_node);
2500 vargs.quick_push (unshare_expr (swmp));
2502 for (; i < nargs; i++)
2503 vargs.quick_push (gimple_call_arg (stmt, i));
2505 if (gimple_call_internal_p (stmt))
2506 gcc_unreachable ();
2507 else
2508 new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
2509 gimple_call_set_fntype (new_stmt, callee_fntype);
2511 if (gimple_call_lhs (stmt))
2512 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
2514 gimple_move_vops (new_stmt, stmt);
2516 if (gimple_has_location (stmt))
2517 gimple_set_location (new_stmt, gimple_location (stmt));
2518 gimple_call_copy_flags (new_stmt, stmt);
2519 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2521 gimple_set_modified (new_stmt, true);
2523 wrcall = new_stmt;
2526 update_stmt (wrcall);
2527 gsi_replace (&gsi, wrcall, true);
2528 cgraph_edge::set_call_stmt (e, wrcall, false);
2530 /* Insert the strub code after the call. */
2531 gimple_seq seq = NULL;
2533 #if !ATTR_FNSPEC_DECONST_WATERMARK
2534 /* If the call will be assumed to not modify or even read the
2535 watermark, make it read and modified ourselves. */
2536 if ((gimple_call_flags (wrcall)
2537 & (ECF_CONST | ECF_PURE | ECF_NOVOPS)))
2539 if (!swm)
2540 swm = build2 (MEM_REF,
2541 TREE_TYPE (TREE_TYPE (swmp)),
2542 swmp,
2543 build_int_cst (TREE_TYPE (swmp), 0));
2545 vec<tree, va_gc> *inputs = NULL;
2546 vec<tree, va_gc> *outputs = NULL;
2547 vec_safe_push (outputs,
2548 build_tree_list
2549 (build_tree_list
2550 (NULL_TREE, build_string (2, "=m")),
2551 unshare_expr (swm)));
2552 vec_safe_push (inputs,
2553 build_tree_list
2554 (build_tree_list
2555 (NULL_TREE, build_string (1, "m")),
2556 unshare_expr (swm)));
2557 gasm *forcemod = gimple_build_asm_vec ("", inputs, outputs,
2558 NULL, NULL);
2559 gimple_seq_add_stmt (&seq, forcemod);
2561 /* If the call will be assumed to not even read the watermark,
2562 make sure it is already in memory before the call. */
2563 if ((gimple_call_flags (wrcall) & ECF_CONST))
2565 vec<tree, va_gc> *inputs = NULL;
2566 vec_safe_push (inputs,
2567 build_tree_list
2568 (build_tree_list
2569 (NULL_TREE, build_string (1, "m")),
2570 unshare_expr (swm)));
2571 gasm *force_store = gimple_build_asm_vec ("", inputs, NULL,
2572 NULL, NULL);
2573 if (gimple_has_location (wrcall))
2574 gimple_set_location (force_store, gimple_location (wrcall));
2575 gsi_insert_before (&gsi, force_store, GSI_SAME_STMT);
2578 #endif
2580 if (!omit_own_watermark)
2582 gcall *sleave = gimple_build_call (get_leave (), 1,
2583 unshare_expr (swmp));
2584 gimple_seq_add_stmt (&seq, sleave);
2586 gassign *clobber = gimple_build_assign (swm,
2587 build_clobber
2588 (TREE_TYPE (swm)));
2589 gimple_seq_add_stmt (&seq, clobber);
2592 gsi_insert_finally_seq_after_call (gsi, seq);
2595 /* Adjust all at-calls calls in NODE. */
2597 void
2598 pass_ipa_strub::adjust_at_calls_calls (cgraph_node *node)
2600 /* Adjust unknown-callee indirect calls with STRUB_AT_CALLS types within
2601 onode. */
2602 if (node->indirect_calls)
2604 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2605 for (cgraph_edge *e = node->indirect_calls; e; e = e->next_callee)
2607 gcc_checking_assert (e->indirect_unknown_callee);
2609 if (!e->call_stmt)
2610 continue;
2612 tree callee_fntype;
2613 enum strub_mode callee_mode
2614 = effective_strub_mode_for_call (e->call_stmt, &callee_fntype);
2616 if (callee_mode != STRUB_AT_CALLS
2617 && callee_mode != STRUB_AT_CALLS_OPT)
2618 continue;
2620 int named_args = adjust_at_calls_type (callee_fntype);
2622 adjust_at_calls_call (e, named_args, callee_fntype);
2624 pop_cfun ();
2627 if (node->callees)
2629 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
2630 for (cgraph_edge *e = node->callees; e; e = e->next_callee)
2632 gcc_checking_assert (!e->indirect_unknown_callee);
2634 if (!e->call_stmt)
2635 continue;
2637 tree callee_fntype;
2638 enum strub_mode callee_mode
2639 = effective_strub_mode_for_call (e->call_stmt, &callee_fntype);
2641 if (callee_mode != STRUB_AT_CALLS
2642 && callee_mode != STRUB_AT_CALLS_OPT)
2643 continue;
2645 int named_args = adjust_at_calls_type (callee_fntype);
2647 adjust_at_calls_call (e, named_args, callee_fntype);
2649 pop_cfun ();
2653 /* The strubm (strub mode) pass computes a strub mode for each function in the
2654 call graph, and checks, before any inlining, that strub callability
2655 requirements in effect are satisfied. */
2657 unsigned int
2658 pass_ipa_strub_mode::execute (function *)
2660 last_cgraph_order = 0;
2661 ipa_strub_set_mode_for_new_functions ();
2663 /* Verify before any inlining or other transformations. */
2664 verify_strub ();
2666 return 0;
2669 /* Create a strub mode pass. */
2671 simple_ipa_opt_pass *
2672 make_pass_ipa_strub_mode (gcc::context *ctxt)
2674 return new pass_ipa_strub_mode (ctxt);
2677 /* The strub pass proper adjusts types, signatures, and at-calls calls, and
2678 splits internal-strub functions. */
2680 unsigned int
2681 pass_ipa_strub::execute (function *)
2683 cgraph_node *onode;
2685 ipa_strub_set_mode_for_new_functions ();
2687 /* First, adjust the signature of at-calls functions. We adjust types of
2688 at-calls functions first, so that we don't modify types in place unless
2689 strub is explicitly requested. */
2690 FOR_EACH_FUNCTION (onode)
2692 enum strub_mode mode = get_strub_mode (onode);
2694 if (mode == STRUB_AT_CALLS
2695 || mode == STRUB_AT_CALLS_OPT)
2697 /* Create a type variant if strubbing was not explicitly requested in
2698 the function type. */
2699 if (get_strub_mode_from_type (TREE_TYPE (onode->decl)) != mode)
2700 distinctify_node_type (onode);
2702 int named_args = adjust_at_calls_type (TREE_TYPE (onode->decl));
2704 /* An external function explicitly declared with strub won't have a
2705 body. Even with implicit at-calls strub, a function may have had its
2706 body removed after we selected the mode, and then we have nothing
2707 further to do. */
2708 if (!onode->has_gimple_body_p ())
2709 continue;
2711 tree *pargs = &DECL_ARGUMENTS (onode->decl);
2713 /* A noninterposable_alias reuses the same parm decl chain, don't add
2714 the parm twice. */
2715 bool aliased_parms = (onode->alias && *pargs
2716 && DECL_CONTEXT (*pargs) != onode->decl);
2718 if (aliased_parms)
2719 continue;
2721 for (int i = 0; i < named_args; i++)
2722 pargs = &DECL_CHAIN (*pargs);
2724 tree wmptr = build_decl (DECL_SOURCE_LOCATION (onode->decl),
2725 PARM_DECL,
2726 get_watermark_ptr (),
2727 get_qpwmt ());
2728 DECL_ARTIFICIAL (wmptr) = 1;
2729 DECL_ARG_TYPE (wmptr) = get_qpwmt ();
2730 DECL_CONTEXT (wmptr) = onode->decl;
2731 TREE_USED (wmptr) = 1;
2732 DECL_CHAIN (wmptr) = *pargs;
2733 *pargs = wmptr;
2735 if (onode->alias)
2736 continue;
2738 cgraph_node *nnode = onode;
2739 push_cfun (DECL_STRUCT_FUNCTION (nnode->decl));
2742 edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2743 gimple_seq seq = call_update_watermark (wmptr, nnode, e->src->count);
2744 gsi_insert_seq_on_edge_immediate (e, seq);
2747 if (DECL_STRUCT_FUNCTION (nnode->decl)->calls_alloca)
2749 basic_block bb;
2750 FOR_EACH_BB_FN (bb, cfun)
2751 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
2752 !gsi_end_p (gsi); gsi_next (&gsi))
2754 gimple *stmt = gsi_stmt (gsi);
2756 gcall *call = dyn_cast <gcall *> (stmt);
2758 if (!call)
2759 continue;
2761 if (gimple_alloca_call_p (call))
2763 /* Capture stack growth. */
2764 gimple_seq seq = call_update_watermark (wmptr, NULL,
2765 gsi_bb (gsi)
2766 ->count);
2767 gsi_insert_finally_seq_after_call (gsi, seq);
2772 pop_cfun ();
2776 FOR_EACH_FUNCTION (onode)
2778 if (!onode->has_gimple_body_p ())
2779 continue;
2781 enum strub_mode mode = get_strub_mode (onode);
2783 if (mode != STRUB_INTERNAL)
2785 adjust_at_calls_calls (onode);
2786 continue;
2789 bool is_stdarg = calls_builtin_va_start_p (onode);;
2790 bool apply_args = calls_builtin_apply_args_p (onode);
2792 vec<ipa_adjusted_param, va_gc> *nparms = NULL;
2793 unsigned j = 0;
2795 // The following loop copied from ipa-split.c:split_function.
2796 for (tree parm = DECL_ARGUMENTS (onode->decl);
2797 parm; parm = DECL_CHAIN (parm), j++)
2799 ipa_adjusted_param adj = {};
2800 adj.op = IPA_PARAM_OP_COPY;
2801 adj.base_index = j;
2802 adj.prev_clone_index = j;
2803 vec_safe_push (nparms, adj);
2806 if (apply_args)
2808 ipa_adjusted_param aaadj = {};
2809 aaadj.op = IPA_PARAM_OP_NEW;
2810 aaadj.type = get_qptr ();
2811 vec_safe_push (nparms, aaadj);
2814 if (is_stdarg)
2816 ipa_adjusted_param vladj = {};
2817 vladj.op = IPA_PARAM_OP_NEW;
2818 vladj.type = get_qpvalst ();
2819 vec_safe_push (nparms, vladj);
2822 ipa_adjusted_param wmadj = {};
2823 wmadj.op = IPA_PARAM_OP_NEW;
2824 wmadj.type = get_qpwmt ();
2825 vec_safe_push (nparms, wmadj);
2827 ipa_param_adjustments adj (nparms, -1, false);
2829 cgraph_node *nnode = onode->create_version_clone_with_body
2830 (auto_vec<cgraph_edge *> (0),
2831 NULL, &adj, NULL, NULL, "strub", NULL);
2833 if (!nnode)
2835 error_at (DECL_SOURCE_LOCATION (onode->decl),
2836 "failed to split %qD for %<strub%>",
2837 onode->decl);
2838 continue;
2841 onode->split_part = true;
2842 if (onode->calls_comdat_local)
2843 nnode->add_to_same_comdat_group (onode);
2845 set_strub_mode_to (onode, STRUB_WRAPPER);
2846 set_strub_mode_to (nnode, STRUB_WRAPPED);
2848 adjust_at_calls_calls (nnode);
2850 /* Decide which of the wrapped function's parms we want to turn into
2851 references to the argument passed to the wrapper. In general, we want to
2852 copy small arguments, and avoid copying large ones. Variable-sized array
2853 lengths given by other arguments, as in 20020210-1.c, would lead to
2854 problems if passed by value, after resetting the original function and
2855 dropping the length computation; passing them by reference works.
2856 DECL_BY_REFERENCE is *not* a substitute for this: it involves copying
2857 anyway, but performed at the caller. */
2858 indirect_parms_t indirect_nparms (3, false);
2859 unsigned adjust_ftype = 0;
2860 unsigned named_args = 0;
2861 for (tree parm = DECL_ARGUMENTS (onode->decl),
2862 nparm = DECL_ARGUMENTS (nnode->decl),
2863 nparmt = TYPE_ARG_TYPES (TREE_TYPE (nnode->decl));
2864 parm;
2865 named_args++,
2866 parm = DECL_CHAIN (parm),
2867 nparm = DECL_CHAIN (nparm),
2868 nparmt = nparmt ? TREE_CHAIN (nparmt) : NULL_TREE)
2869 if (TREE_THIS_VOLATILE (parm)
2870 || !(0 /* DECL_BY_REFERENCE (narg) */
2871 || is_gimple_reg_type (TREE_TYPE (nparm))
2872 || VECTOR_TYPE_P (TREE_TYPE (nparm))
2873 || TREE_CODE (TREE_TYPE (nparm)) == COMPLEX_TYPE
2874 || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (nparm)))
2875 && (tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (nparm)))
2876 <= 4 * UNITS_PER_WORD))))
2878 /* No point in indirecting pointer types. Presumably they
2879 won't ever pass the size-based test above, but check the
2880 assumption here, because getting this wrong would mess
2881 with attribute access and possibly others. We deal with
2882 fn spec below. */
2883 gcc_checking_assert (!POINTER_TYPE_P (TREE_TYPE (nparm)));
2885 indirect_nparms.add (nparm);
2887 /* ??? Is there any case in which it is not safe to suggest the parms
2888 turned indirect don't alias anything else? They are distinct,
2889 unaliased memory in the wrapper, and the wrapped can't possibly
2890 take pointers into them because none of the pointers passed to the
2891 wrapper can alias other incoming parameters passed by value, even
2892 if with transparent reference, and the wrapper doesn't take any
2893 extra parms that could point into wrapper's parms. So we can
2894 probably drop the TREE_ADDRESSABLE and keep the TRUE. */
2895 tree ref_type = build_ref_type_for (nparm);
2897 DECL_ARG_TYPE (nparm) = TREE_TYPE (nparm) = ref_type;
2898 relayout_decl (nparm);
2899 TREE_ADDRESSABLE (nparm) = 0;
2900 DECL_BY_REFERENCE (nparm) = 0;
2901 DECL_NOT_GIMPLE_REG_P (nparm) = 0;
2902 /* ??? This avoids mismatches in debug info bind stmts in
2903 e.g. a-chahan . */
2904 DECL_ABSTRACT_ORIGIN (nparm) = NULL;
2906 if (nparmt)
2907 adjust_ftype++;
2910 /* Also adjust the wrapped function type, if needed. */
2911 if (adjust_ftype)
2913 tree nftype = TREE_TYPE (nnode->decl);
2915 /* We always add at least one argument at the end of the signature, when
2916 cloning the function, so we don't expect to need to duplicate the
2917 type here. */
2918 gcc_checking_assert (TYPE_ARG_TYPES (nftype)
2919 != TYPE_ARG_TYPES (TREE_TYPE (onode->decl)));
2921 /* Check that fnspec still works for the modified function signature,
2922 and drop it otherwise. */
2923 bool drop_fnspec = false;
2924 tree fnspec = lookup_attribute ("fn spec", TYPE_ATTRIBUTES (nftype));
2925 attr_fnspec spec = fnspec ? attr_fnspec (fnspec) : attr_fnspec ("");
2927 unsigned retcopy;
2928 if (!(fnspec && spec.returns_arg (&retcopy)))
2929 retcopy = (unsigned) -1;
2931 unsigned i = 0;
2932 for (tree nparm = DECL_ARGUMENTS (nnode->decl),
2933 nparmt = TYPE_ARG_TYPES (nftype);
2934 adjust_ftype > 0;
2935 i++, nparm = DECL_CHAIN (nparm), nparmt = TREE_CHAIN (nparmt))
2936 if (indirect_nparms.contains (nparm))
2938 TREE_VALUE (nparmt) = TREE_TYPE (nparm);
2939 adjust_ftype--;
2941 if (fnspec && !drop_fnspec)
2943 if (i == retcopy)
2944 drop_fnspec = true;
2945 else if (spec.arg_specified_p (i))
2947 /* Properties that apply to pointers only must not be
2948 present, because we don't make pointers further
2949 indirect. */
2950 gcc_checking_assert
2951 (!spec.arg_max_access_size_given_by_arg_p (i, NULL));
2952 gcc_checking_assert (!spec.arg_copied_to_arg_p (i, NULL));
2954 /* Any claim of direct access only is invalidated by
2955 adding an indirection level. */
2956 if (spec.arg_direct_p (i))
2957 drop_fnspec = true;
2959 /* If there's a claim the argument is not read from, the
2960 added indirection invalidates it: if the argument is
2961 used at all, then the pointer will necessarily be
2962 read. */
2963 if (!spec.arg_maybe_read_p (i)
2964 && spec.arg_used_p (i))
2965 drop_fnspec = true;
2970 /* ??? Maybe we could adjust it instead. Note we don't need
2971 to mess with attribute access: pointer-typed parameters are
2972 not modified, so they can remain unchanged. */
2973 if (drop_fnspec)
2974 remove_named_attribute_unsharing ("fn spec",
2975 &TYPE_ATTRIBUTES (nftype));
2977 TREE_TYPE (nnode->decl) = nftype;
2980 #if ATTR_FNSPEC_DECONST_WATERMARK
2982 int flags = flags_from_decl_or_type (nnode->decl);
2983 tree fnspec = lookup_attribute ("fn spec", TREE_TYPE (nnode->decl));
2985 if ((flags & (ECF_CONST | ECF_PURE | ECF_NOVOPS)) || fnspec)
2987 size_t xargs = 1 + int (is_stdarg) + int (apply_args);
2988 size_t curlen = 0, tgtlen = 2 + 2 * (named_args + xargs);
2989 auto_vec<char> nspecv (tgtlen);
2990 char *nspec = &nspecv[0]; /* It will *not* be NUL-terminated! */
2991 bool no_writes_p = true;
2992 if (fnspec)
2994 tree fnspecstr = TREE_VALUE (TREE_VALUE (fnspec));
2995 curlen = TREE_STRING_LENGTH (fnspecstr);
2996 memcpy (nspec, TREE_STRING_POINTER (fnspecstr), curlen);
2997 if (!(flags & (ECF_CONST | ECF_PURE | ECF_NOVOPS))
2998 && curlen >= 2
2999 && nspec[1] != 'c' && nspec[1] != 'C'
3000 && nspec[1] != 'p' && nspec[1] != 'P')
3001 no_writes_p = false;
3003 if (!curlen)
3005 nspec[curlen++] = '.';
3006 nspec[curlen++] = ((flags & ECF_CONST)
3007 ? 'c'
3008 : (flags & ECF_PURE)
3009 ? 'p'
3010 : ' ');
3012 while (curlen < tgtlen - 2 * xargs)
3014 nspec[curlen++] = '.';
3015 nspec[curlen++] = ' ';
3018 /* These extra args are unlikely to be present in const or pure
3019 functions. It's conceivable that a function that takes variable
3020 arguments, or that passes its arguments on to another function,
3021 could be const or pure, but it would not modify the arguments, and,
3022 being pure or const, it couldn't possibly modify or even access
3023 memory referenced by them. But it can read from these internal
3024 data structures created by the wrapper, and from any
3025 argument-passing memory referenced by them, so we denote the
3026 possibility of reading from multiple levels of indirection, but
3027 only of reading because const/pure. */
3028 if (apply_args)
3030 nspec[curlen++] = 'r';
3031 nspec[curlen++] = ' ';
3033 if (is_stdarg)
3035 nspec[curlen++] = (no_writes_p ? 'r' : '.');
3036 nspec[curlen++] = (no_writes_p ? 't' : ' ');
3039 nspec[curlen++] = 'W';
3040 nspec[curlen++] = 't';
3042 /* The type has already been copied before adding parameters. */
3043 gcc_checking_assert (TYPE_ARG_TYPES (TREE_TYPE (nnode->decl))
3044 != TYPE_ARG_TYPES (TREE_TYPE (onode->decl)));
3045 TYPE_ATTRIBUTES (TREE_TYPE (nnode->decl))
3046 = tree_cons (get_identifier ("fn spec"),
3047 build_tree_list (NULL_TREE,
3048 build_string (tgtlen, nspec)),
3049 TYPE_ATTRIBUTES (TREE_TYPE (nnode->decl)));
3052 #endif
3055 tree decl = onode->decl;
3056 cgraph_node *target = nnode;
3058 { // copied from create_wrapper
3060 /* Preserve DECL_RESULT so we get right by reference flag. */
3061 tree decl_result = DECL_RESULT (decl);
3063 /* Remove the function's body but keep arguments to be reused
3064 for thunk. */
3065 onode->release_body (true);
3066 onode->reset (/* unlike create_wrapper: preserve_comdat_group = */true);
3068 DECL_UNINLINABLE (decl) = false;
3069 DECL_RESULT (decl) = decl_result;
3070 DECL_INITIAL (decl) = NULL;
3071 allocate_struct_function (decl, false);
3072 set_cfun (NULL);
3074 /* Turn alias into thunk and expand it into GIMPLE representation. */
3075 onode->definition = true;
3077 thunk_info::get_create (onode);
3078 onode->thunk = true;
3079 onode->create_edge (target, NULL, onode->count);
3080 onode->callees->can_throw_external = !TREE_NOTHROW (target->decl);
3082 tree arguments = DECL_ARGUMENTS (decl);
3084 while (arguments)
3086 TREE_ADDRESSABLE (arguments) = false;
3087 arguments = TREE_CHAIN (arguments);
3091 tree alias = onode->callees->callee->decl;
3092 tree thunk_fndecl = decl;
3093 tree a;
3095 int nxargs = 1 + is_stdarg + apply_args;
3097 { // Simplified from expand_thunk.
3098 tree restype;
3099 basic_block bb, then_bb, else_bb, return_bb;
3100 gimple_stmt_iterator bsi;
3101 int nargs = 0;
3102 tree arg;
3103 int i;
3104 tree resdecl;
3105 tree restmp = NULL;
3107 gcall *call;
3108 greturn *ret;
3109 bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
3111 a = DECL_ARGUMENTS (thunk_fndecl);
3113 current_function_decl = thunk_fndecl;
3115 /* Ensure thunks are emitted in their correct sections. */
3116 resolve_unique_section (thunk_fndecl, 0,
3117 flag_function_sections);
3119 bitmap_obstack_initialize (NULL);
3121 /* Build the return declaration for the function. */
3122 restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
3123 if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
3125 resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
3126 DECL_ARTIFICIAL (resdecl) = 1;
3127 DECL_IGNORED_P (resdecl) = 1;
3128 DECL_CONTEXT (resdecl) = thunk_fndecl;
3129 DECL_RESULT (thunk_fndecl) = resdecl;
3131 else
3132 resdecl = DECL_RESULT (thunk_fndecl);
3134 profile_count cfg_count = onode->count;
3135 if (!cfg_count.initialized_p ())
3136 cfg_count = profile_count::from_gcov_type (BB_FREQ_MAX).guessed_local ();
3138 bb = then_bb = else_bb = return_bb
3139 = init_lowered_empty_function (thunk_fndecl, true, cfg_count);
3141 bsi = gsi_start_bb (bb);
3143 /* Build call to the function being thunked. */
3144 if (!VOID_TYPE_P (restype)
3145 && (!alias_is_noreturn
3146 || TREE_ADDRESSABLE (restype)
3147 || TREE_CODE (TYPE_SIZE_UNIT (restype)) != INTEGER_CST))
3149 if (DECL_BY_REFERENCE (resdecl))
3151 restmp = gimple_fold_indirect_ref (resdecl);
3152 if (!restmp)
3153 restmp = build2 (MEM_REF,
3154 TREE_TYPE (TREE_TYPE (resdecl)),
3155 resdecl,
3156 build_int_cst (TREE_TYPE (resdecl), 0));
3158 else if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
3160 restmp = resdecl;
3162 if (VAR_P (restmp))
3164 add_local_decl (cfun, restmp);
3165 BLOCK_VARS (DECL_INITIAL (current_function_decl))
3166 = restmp;
3169 else
3170 restmp = create_tmp_reg (restype, "retval");
3173 for (arg = a; arg; arg = DECL_CHAIN (arg))
3174 nargs++;
3175 auto_vec<tree> vargs (nargs + nxargs);
3176 i = 0;
3177 arg = a;
3179 if (nargs)
3180 for (tree nparm = DECL_ARGUMENTS (nnode->decl);
3181 i < nargs;
3182 i++, arg = DECL_CHAIN (arg), nparm = DECL_CHAIN (nparm))
3184 tree save_arg = arg;
3186 /* Arrange to pass indirectly the parms, if we decided to do
3187 so, and revert its type in the wrapper. */
3188 if (indirect_nparms.contains (nparm))
3190 tree ref_type = TREE_TYPE (nparm);
3191 TREE_ADDRESSABLE (arg) = true;
3192 arg = build1 (ADDR_EXPR, ref_type, arg);
3194 else if (!TREE_THIS_VOLATILE (arg))
3195 DECL_NOT_GIMPLE_REG_P (arg) = 0;
3197 /* Convert the argument back to the type used by the calling
3198 conventions, e.g. a non-prototyped float type is passed as
3199 double, as in 930603-1.c, and needs to be converted back to
3200 double to be passed on unchanged to the wrapped
3201 function. */
3202 if (TREE_TYPE (nparm) != DECL_ARG_TYPE (nparm))
3204 tree tmp = arg;
3205 /* If ARG is e.g. volatile, we must copy and
3206 convert in separate statements. */
3207 if (!is_gimple_val (arg))
3209 tmp = create_tmp_reg (TYPE_MAIN_VARIANT
3210 (TREE_TYPE (arg)), "arg");
3211 gimple *stmt = gimple_build_assign (tmp, arg);
3212 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
3214 arg = fold_convert (DECL_ARG_TYPE (nparm), tmp);
3217 if (!is_gimple_val (arg))
3219 tree tmp = create_tmp_reg (TYPE_MAIN_VARIANT
3220 (TREE_TYPE (arg)), "arg");
3221 gimple *stmt = gimple_build_assign (tmp, arg);
3222 gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
3223 arg = tmp;
3225 vargs.quick_push (arg);
3226 arg = save_arg;
3228 /* These strub arguments are adjusted later. */
3229 if (apply_args)
3230 vargs.quick_push (null_pointer_node);
3231 if (is_stdarg)
3232 vargs.quick_push (null_pointer_node);
3233 vargs.quick_push (null_pointer_node);
3234 call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias),
3235 vargs);
3236 onode->callees->call_stmt = call;
3237 // gimple_call_set_from_thunk (call, true);
3238 if (DECL_STATIC_CHAIN (alias))
3240 tree p = DECL_STRUCT_FUNCTION (alias)->static_chain_decl;
3241 tree type = TREE_TYPE (p);
3242 tree decl = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
3243 PARM_DECL, create_tmp_var_name ("CHAIN"),
3244 type);
3245 DECL_ARTIFICIAL (decl) = 1;
3246 DECL_IGNORED_P (decl) = 1;
3247 TREE_USED (decl) = 1;
3248 DECL_CONTEXT (decl) = thunk_fndecl;
3249 DECL_ARG_TYPE (decl) = type;
3250 TREE_READONLY (decl) = 1;
3252 struct function *sf = DECL_STRUCT_FUNCTION (thunk_fndecl);
3253 sf->static_chain_decl = decl;
3255 gimple_call_set_chain (call, decl);
3258 /* Return slot optimization is always possible and in fact required to
3259 return values with DECL_BY_REFERENCE. */
3260 if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
3261 && (!is_gimple_reg_type (TREE_TYPE (resdecl))
3262 || DECL_BY_REFERENCE (resdecl)))
3263 gimple_call_set_return_slot_opt (call, true);
3265 if (restmp)
3267 gimple_call_set_lhs (call, restmp);
3268 gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
3269 TREE_TYPE (TREE_TYPE (alias))));
3271 gsi_insert_after (&bsi, call, GSI_NEW_STMT);
3272 if (!alias_is_noreturn)
3274 /* Build return value. */
3275 if (!DECL_BY_REFERENCE (resdecl))
3276 ret = gimple_build_return (restmp);
3277 else
3278 ret = gimple_build_return (resdecl);
3280 gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
3282 else
3284 remove_edge (single_succ_edge (bb));
3287 cfun->gimple_df->in_ssa_p = true;
3288 update_max_bb_count ();
3289 profile_status_for_fn (cfun)
3290 = cfg_count.initialized_p () && cfg_count.ipa_p ()
3291 ? PROFILE_READ : PROFILE_GUESSED;
3292 /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
3293 // TREE_ASM_WRITTEN (thunk_fndecl) = false;
3294 delete_unreachable_blocks ();
3295 update_ssa (TODO_update_ssa);
3296 checking_verify_flow_info ();
3297 free_dominance_info (CDI_DOMINATORS);
3299 /* Since we want to emit the thunk, we explicitly mark its name as
3300 referenced. */
3301 onode->thunk = false;
3302 onode->lowered = true;
3303 bitmap_obstack_release (NULL);
3305 current_function_decl = NULL;
3306 set_cfun (NULL);
3309 thunk_info::remove (onode);
3311 // some more of create_wrapper at the end of the next block.
3316 tree aaval = NULL_TREE;
3317 tree vaptr = NULL_TREE;
3318 tree wmptr = NULL_TREE;
3319 for (tree arg = DECL_ARGUMENTS (nnode->decl); arg; arg = DECL_CHAIN (arg))
3321 aaval = vaptr;
3322 vaptr = wmptr;
3323 wmptr = arg;
3326 if (!apply_args)
3327 aaval = NULL_TREE;
3328 /* The trailing args are [apply_args], [va_list_ptr], and
3329 watermark. If we don't have a va_list_ptr, the penultimate
3330 argument is apply_args.
3332 else if (!is_stdarg)
3333 aaval = vaptr;
3335 if (!is_stdarg)
3336 vaptr = NULL_TREE;
3338 DECL_NAME (wmptr) = get_watermark_ptr ();
3339 DECL_ARTIFICIAL (wmptr) = 1;
3340 DECL_IGNORED_P (wmptr) = 1;
3341 TREE_USED (wmptr) = 1;
3343 if (is_stdarg)
3345 DECL_NAME (vaptr) = get_va_list_ptr ();
3346 DECL_ARTIFICIAL (vaptr) = 1;
3347 DECL_IGNORED_P (vaptr) = 1;
3348 TREE_USED (vaptr) = 1;
3351 if (apply_args)
3353 DECL_NAME (aaval) = get_apply_args ();
3354 DECL_ARTIFICIAL (aaval) = 1;
3355 DECL_IGNORED_P (aaval) = 1;
3356 TREE_USED (aaval) = 1;
3359 push_cfun (DECL_STRUCT_FUNCTION (nnode->decl));
3362 edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3363 gimple_seq seq = call_update_watermark (wmptr, nnode, e->src->count);
3364 gsi_insert_seq_on_edge_immediate (e, seq);
3367 bool any_indirect = !indirect_nparms.is_empty ();
3369 if (any_indirect)
3371 basic_block bb;
3372 bool needs_commit = false;
3373 FOR_EACH_BB_FN (bb, cfun)
3375 for (gphi_iterator gsi = gsi_start_nonvirtual_phis (bb);
3376 !gsi_end_p (gsi);
3377 gsi_next_nonvirtual_phi (&gsi))
3379 gphi *stmt = gsi.phi ();
3381 walk_stmt_info wi = {};
3382 wi.info = &indirect_nparms;
3383 walk_gimple_op (stmt, walk_make_indirect, &wi);
3384 if (wi.changed && !is_gimple_debug (gsi_stmt (gsi)))
3385 if (walk_regimplify_phi (stmt))
3386 needs_commit = true;
3389 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
3390 !gsi_end_p (gsi); gsi_next (&gsi))
3392 gimple *stmt = gsi_stmt (gsi);
3394 walk_stmt_info wi = {};
3395 wi.info = &indirect_nparms;
3396 walk_gimple_op (stmt, walk_make_indirect, &wi);
3397 if (wi.changed)
3399 if (!is_gimple_debug (stmt))
3401 wi.info = &gsi;
3402 walk_gimple_op (stmt, walk_regimplify_addr_expr,
3403 &wi);
3405 update_stmt (stmt);
3409 if (needs_commit)
3410 gsi_commit_edge_inserts ();
3413 if (DECL_STRUCT_FUNCTION (nnode->decl)->calls_alloca
3414 || is_stdarg || apply_args)
3415 for (cgraph_edge *e = nnode->callees, *enext; e; e = enext)
3417 if (!e->call_stmt)
3418 continue;
3420 gcall *call = e->call_stmt;
3421 gimple_stmt_iterator gsi = gsi_for_stmt (call);
3422 tree fndecl = e->callee->decl;
3424 enext = e->next_callee;
3426 if (gimple_alloca_call_p (call))
3428 gimple_seq seq = call_update_watermark (wmptr, NULL,
3429 gsi_bb (gsi)->count);
3430 gsi_insert_finally_seq_after_call (gsi, seq);
3432 else if (fndecl && is_stdarg
3433 && fndecl_built_in_p (fndecl, BUILT_IN_VA_START))
3435 /* Using a non-default stdarg ABI makes the function ineligible
3436 for internal strub. */
3437 gcc_checking_assert (builtin_decl_explicit (BUILT_IN_VA_START)
3438 == fndecl);
3439 tree bvacopy = builtin_decl_explicit (BUILT_IN_VA_COPY);
3440 gimple_call_set_fndecl (call, bvacopy);
3441 tree arg = vaptr;
3442 /* The va_copy source must be dereferenced, unless it's an array
3443 type, that would have decayed to a pointer. */
3444 if (TREE_CODE (TREE_TYPE (TREE_TYPE (vaptr))) != ARRAY_TYPE)
3446 arg = gimple_fold_indirect_ref (vaptr);
3447 if (!arg)
3448 arg = build2 (MEM_REF,
3449 TREE_TYPE (TREE_TYPE (vaptr)),
3450 vaptr,
3451 build_int_cst (TREE_TYPE (vaptr), 0));
3452 if (!is_gimple_val (arg))
3453 arg = force_gimple_operand_gsi (&gsi, arg, true,
3454 NULL_TREE, true, GSI_SAME_STMT);
3456 gimple_call_set_arg (call, 1, arg);
3457 update_stmt (call);
3458 e->redirect_callee (cgraph_node::get_create (bvacopy));
3460 else if (fndecl && apply_args
3461 && fndecl_built_in_p (fndecl, BUILT_IN_APPLY_ARGS))
3463 tree lhs = gimple_call_lhs (call);
3464 gimple *assign = (lhs
3465 ? gimple_build_assign (lhs, aaval)
3466 : gimple_build_nop ());
3467 gsi_replace (&gsi, assign, true);
3468 cgraph_edge::remove (e);
3472 { // a little more copied from create_wrapper
3474 /* Inline summary set-up. */
3475 nnode->analyze ();
3476 // inline_analyze_function (nnode);
3479 pop_cfun ();
3483 push_cfun (DECL_STRUCT_FUNCTION (onode->decl));
3484 gimple_stmt_iterator gsi
3485 = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
3487 gcall *wrcall;
3488 while (!(wrcall = dyn_cast <gcall *> (gsi_stmt (gsi))))
3489 gsi_next (&gsi);
3491 tree swm = create_tmp_var (get_wmt (), ".strub.watermark");
3492 TREE_ADDRESSABLE (swm) = true;
3493 tree swmp = build1 (ADDR_EXPR, get_pwmt (), swm);
3495 tree enter = get_enter ();
3496 gcall *stptr = gimple_build_call (enter, 1, unshare_expr (swmp));
3497 gimple_set_location (stptr, gimple_location (wrcall));
3498 gsi_insert_before (&gsi, stptr, GSI_SAME_STMT);
3499 onode->create_edge (cgraph_node::get_create (enter),
3500 stptr, gsi_bb (gsi)->count, false);
3502 int nargs = gimple_call_num_args (wrcall);
3504 gimple_seq seq = NULL;
3506 if (apply_args)
3508 tree aalst = create_tmp_var (ptr_type_node, ".strub.apply_args");
3509 tree bappargs = builtin_decl_explicit (BUILT_IN_APPLY_ARGS);
3510 gcall *appargs = gimple_build_call (bappargs, 0);
3511 gimple_call_set_lhs (appargs, aalst);
3512 gimple_set_location (appargs, gimple_location (wrcall));
3513 gsi_insert_before (&gsi, appargs, GSI_SAME_STMT);
3514 gimple_call_set_arg (wrcall, nargs - 2 - is_stdarg, aalst);
3515 onode->create_edge (cgraph_node::get_create (bappargs),
3516 appargs, gsi_bb (gsi)->count, false);
3519 if (is_stdarg)
3521 tree valst = create_tmp_var (va_list_type_node, ".strub.va_list");
3522 TREE_ADDRESSABLE (valst) = true;
3523 tree vaptr = build1 (ADDR_EXPR,
3524 build_pointer_type (va_list_type_node),
3525 valst);
3526 gimple_call_set_arg (wrcall, nargs - 2, unshare_expr (vaptr));
3528 tree bvastart = builtin_decl_explicit (BUILT_IN_VA_START);
3529 gcall *vastart = gimple_build_call (bvastart, 2,
3530 unshare_expr (vaptr),
3531 integer_zero_node);
3532 gimple_set_location (vastart, gimple_location (wrcall));
3533 gsi_insert_before (&gsi, vastart, GSI_SAME_STMT);
3534 onode->create_edge (cgraph_node::get_create (bvastart),
3535 vastart, gsi_bb (gsi)->count, false);
3537 tree bvaend = builtin_decl_explicit (BUILT_IN_VA_END);
3538 gcall *vaend = gimple_build_call (bvaend, 1, unshare_expr (vaptr));
3539 gimple_set_location (vaend, gimple_location (wrcall));
3540 gimple_seq_add_stmt (&seq, vaend);
3543 gimple_call_set_arg (wrcall, nargs - 1, unshare_expr (swmp));
3544 // gimple_call_set_tail (wrcall, false);
3545 update_stmt (wrcall);
3548 #if !ATTR_FNSPEC_DECONST_WATERMARK
3549 /* If the call will be assumed to not modify or even read the
3550 watermark, make it read and modified ourselves. */
3551 if ((gimple_call_flags (wrcall)
3552 & (ECF_CONST | ECF_PURE | ECF_NOVOPS)))
3554 vec<tree, va_gc> *inputs = NULL;
3555 vec<tree, va_gc> *outputs = NULL;
3556 vec_safe_push (outputs,
3557 build_tree_list
3558 (build_tree_list
3559 (NULL_TREE, build_string (2, "=m")),
3560 swm));
3561 vec_safe_push (inputs,
3562 build_tree_list
3563 (build_tree_list
3564 (NULL_TREE, build_string (1, "m")),
3565 swm));
3566 gasm *forcemod = gimple_build_asm_vec ("", inputs, outputs,
3567 NULL, NULL);
3568 gimple_seq_add_stmt (&seq, forcemod);
3570 /* If the call will be assumed to not even read the watermark,
3571 make sure it is already in memory before the call. */
3572 if ((gimple_call_flags (wrcall) & ECF_CONST))
3574 vec<tree, va_gc> *inputs = NULL;
3575 vec_safe_push (inputs,
3576 build_tree_list
3577 (build_tree_list
3578 (NULL_TREE, build_string (1, "m")),
3579 swm));
3580 gasm *force_store = gimple_build_asm_vec ("", inputs, NULL,
3581 NULL, NULL);
3582 gimple_set_location (force_store, gimple_location (wrcall));
3583 gsi_insert_before (&gsi, force_store, GSI_SAME_STMT);
3586 #endif
3588 gcall *sleave = gimple_build_call (get_leave (), 1,
3589 unshare_expr (swmp));
3590 gimple_seq_add_stmt (&seq, sleave);
3592 gassign *clobber = gimple_build_assign (swm,
3593 build_clobber
3594 (TREE_TYPE (swm)));
3595 gimple_seq_add_stmt (&seq, clobber);
3598 gsi_insert_finally_seq_after_call (gsi, seq);
3600 /* For nnode, we don't rebuild edges because we wish to retain
3601 any redirections copied to it from earlier passes, so we add
3602 call graph edges explicitly there, but for onode, we create a
3603 fresh function, so we may as well just issue the calls and
3604 then rebuild all cgraph edges. */
3605 // cgraph_edge::rebuild_edges ();
3606 onode->analyze ();
3607 // inline_analyze_function (onode);
3609 pop_cfun ();
3613 return 0;
3616 simple_ipa_opt_pass *
3617 make_pass_ipa_strub (gcc::context *ctxt)
3619 return new pass_ipa_strub (ctxt);
3622 #include "gt-ipa-strub.h"