2 +----------------------------------------------------------------------+
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/hhbbc/interp.h"
23 #include <folly/gen/Base.h>
24 #include <folly/gen/String.h>
26 #include "hphp/util/configs/eval.h"
27 #include "hphp/util/hash-set.h"
28 #include "hphp/util/trace.h"
29 #include "hphp/runtime/base/array-init.h"
30 #include "hphp/runtime/base/array-iterator.h"
31 #include "hphp/runtime/base/collections.h"
32 #include "hphp/runtime/base/implicit-context.h"
33 #include "hphp/runtime/base/static-string-table.h"
34 #include "hphp/runtime/base/tv-arith.h"
35 #include "hphp/runtime/base/tv-comparisons.h"
36 #include "hphp/runtime/base/tv-conversions.h"
37 #include "hphp/runtime/base/type-structure.h"
38 #include "hphp/runtime/base/type-structure-helpers.h"
39 #include "hphp/runtime/base/type-structure-helpers-defs.h"
40 #include "hphp/runtime/vm/runtime.h"
41 #include "hphp/runtime/vm/unit-util.h"
43 #include "hphp/runtime/ext/hh/ext_hh.h"
45 #include "hphp/hhbbc/analyze.h"
46 #include "hphp/hhbbc/bc.h"
47 #include "hphp/hhbbc/cfg.h"
48 #include "hphp/hhbbc/class-util.h"
49 #include "hphp/hhbbc/eval-cell.h"
50 #include "hphp/hhbbc/index.h"
51 #include "hphp/hhbbc/interp-state.h"
52 #include "hphp/hhbbc/optimize.h"
53 #include "hphp/hhbbc/representation.h"
54 #include "hphp/hhbbc/type-builtins.h"
55 #include "hphp/hhbbc/type-ops.h"
56 #include "hphp/hhbbc/type-structure.h"
57 #include "hphp/hhbbc/type-system.h"
58 #include "hphp/hhbbc/unit-util.h"
59 #include "hphp/hhbbc/wide-func.h"
61 #include "hphp/hhbbc/stats.h"
63 #include "hphp/hhbbc/interp-internal.h"
65 namespace HPHP::HHBBC
{
67 //////////////////////////////////////////////////////////////////////
71 const StaticString
s_MethCallerHelper("__SystemLib\\MethCallerHelper");
72 const StaticString
s_PHP_Incomplete_Class("__PHP_Incomplete_Class");
73 const StaticString
s_ConstMap("ConstMap");
74 const StaticString
s_ConstSet("ConstSet");
75 const StaticString
s_ConstVector("ConstVector");
76 const StaticString
s_Iterator("HH\\Iterator");
77 const StaticString
s_IMemoizeParam("HH\\IMemoizeParam");
78 const StaticString
s_getInstanceKey("getInstanceKey");
79 const StaticString
s_Closure("Closure");
80 const StaticString
s_this(annotTypeName(AnnotType::This
));
82 bool poppable(Op op
) {
94 case Op::NewDictArray
:
97 case Op::EnumClassLabel
:
104 bool pushes_immediate(Op op
) {
116 case Op::EnumClassLabel
:
123 void interpStep(ISS
& env
, const Bytecode
& bc
);
125 void record(ISS
& env
, const Bytecode
& bc
) {
126 if (bc
.srcLoc
!= env
.srcLoc
) {
128 tmp
.srcLoc
= env
.srcLoc
;
129 return record(env
, tmp
);
132 if (env
.replacedBcs
.empty() &&
133 env
.unchangedBcs
< env
.blk
.hhbcs
.size() &&
134 bc
== env
.blk
.hhbcs
[env
.unchangedBcs
]) {
139 ITRACE(2, " => {}\n", show(*env
.ctx
.func
, bc
));
140 env
.replacedBcs
.push_back(bc
);
143 // The number of pops as seen by interp.
144 uint32_t numPop(const Bytecode
& bc
) {
145 if (bc
.op
== Op::CGetL2
) return 1;
149 // The number of pushes as seen by interp.
150 uint32_t numPush(const Bytecode
& bc
) {
151 if (bc
.op
== Op::CGetL2
) return 2;
155 void reprocess(ISS
& env
) {
156 FTRACE(2, " reprocess\n");
157 env
.reprocess
= true;
160 ArrayData
** add_elem_array(ISS
& env
) {
161 auto const idx
= env
.trackedElems
.back().idx
;
162 if (idx
< env
.unchangedBcs
) {
163 auto const DEBUG_ONLY
& bc
= env
.blk
.hhbcs
[idx
];
164 assertx(bc
.op
== Op::Concat
);
167 assertx(idx
>= env
.unchangedBcs
);
168 auto& bc
= env
.replacedBcs
[idx
- env
.unchangedBcs
];
169 auto arr
= [&] () -> const ArrayData
** {
171 case Op::Vec
: return &bc
.Vec
.arr1
;
172 case Op::Dict
: return &bc
.Dict
.arr1
;
173 case Op::Keyset
: return &bc
.Keyset
.arr1
;
174 case Op::Concat
: return nullptr;
175 default: not_reached();
178 return const_cast<ArrayData
**>(arr
);
181 bool start_add_elem(ISS
& env
, Type
& ty
, Op op
) {
182 auto value
= tvNonStatic(ty
);
183 if (!value
|| !isArrayLikeType(value
->m_type
)) return false;
185 if (op
== Op::AddElemC
) {
186 reduce(env
, bc::PopC
{}, bc::PopC
{}, bc::PopC
{});
188 reduce(env
, bc::PopC
{}, bc::PopC
{});
190 env
.trackedElems
.emplace_back(
191 env
.state
.stack
.size(),
192 env
.unchangedBcs
+ env
.replacedBcs
.size()
195 auto const arr
= value
->m_data
.parr
;
196 env
.replacedBcs
.push_back(
198 if (arr
->isVecType()) return bc::Vec
{ arr
};
199 if (arr
->isDictType()) return bc::Dict
{ arr
};
200 if (arr
->isKeysetType()) return bc::Keyset
{ arr
};
201 always_assert(false);
204 env
.replacedBcs
.back().srcLoc
= env
.srcLoc
;
205 ITRACE(2, "(addelem* -> {}\n",
206 show(*env
.ctx
.func
, env
.replacedBcs
.back()));
207 push(env
, std::move(ty
));
213 * Alter the saved add_elem array in a way that preserves its provenance tag
214 * or adds a new one if applicable (i.e. the array is a vec or dict)
216 * The `mutate` parameter should be callable with an ArrayData** pointing to the
217 * add_elem array cached in the interp state and should write to it directly.
219 template <typename Fn
>
220 bool mutate_add_elem_array(ISS
& env
, Fn
&& mutate
) {
221 auto const arr
= add_elem_array(env
);
222 if (!arr
) return false;
227 void finish_tracked_elem(ISS
& env
) {
228 auto const arr
= add_elem_array(env
);
229 env
.trackedElems
.pop_back();
231 ArrayData::GetScalarArray(arr
);
236 void finish_tracked_elems(ISS
& env
, size_t depth
) {
237 while (!env
.trackedElems
.empty() && env
.trackedElems
.back().depth
>= depth
) {
238 finish_tracked_elem(env
);
242 uint32_t id_from_slot(ISS
& env
, int slot
) {
243 auto const id
= (env
.state
.stack
.end() - (slot
+ 1))->id
;
244 assertx(id
== StackElem::NoId
||
245 id
< env
.unchangedBcs
+ env
.replacedBcs
.size());
249 const Bytecode
* op_from_id(ISS
& env
, uint32_t id
) {
250 if (id
== StackElem::NoId
) return nullptr;
251 if (id
< env
.unchangedBcs
) return &env
.blk
.hhbcs
[id
];
252 auto const off
= id
- env
.unchangedBcs
;
253 assertx(off
< env
.replacedBcs
.size());
254 return &env
.replacedBcs
[off
];
257 void ensure_mutable(ISS
& env
, uint32_t id
) {
258 if (id
< env
.unchangedBcs
) {
259 auto const delta
= env
.unchangedBcs
- id
;
260 env
.replacedBcs
.resize(env
.replacedBcs
.size() + delta
);
261 for (auto i
= env
.replacedBcs
.size(); i
-- > delta
; ) {
262 env
.replacedBcs
[i
] = std::move(env
.replacedBcs
[i
- delta
]);
264 for (auto i
= 0; i
< delta
; i
++) {
265 env
.replacedBcs
[i
] = env
.blk
.hhbcs
[id
+ i
];
267 env
.unchangedBcs
= id
;
272 * Turn the instruction that wrote the slot'th element from the top of
273 * the stack into a Nop, adjusting the stack appropriately. If its the
274 * previous instruction, just rewind.
276 int kill_by_slot(ISS
& env
, int slot
) {
278 auto const id
= id_from_slot(env
, slot
);
279 assertx(id
!= StackElem::NoId
);
280 auto const sz
= env
.state
.stack
.size();
281 // if its the last bytecode we processed, we can rewind and avoid
282 // the reprocess overhead.
283 if (id
== env
.unchangedBcs
+ env
.replacedBcs
.size() - 1) {
285 return env
.state
.stack
.size() - sz
;
287 ensure_mutable(env
, id
);
288 auto& bc
= env
.replacedBcs
[id
- env
.unchangedBcs
];
289 auto const pop
= numPop(bc
);
290 auto const push
= numPush(bc
);
291 ITRACE(2, "kill_by_slot: slot={}, id={}, was {}\n",
292 slot
, id
, show(*env
.ctx
.func
, bc
));
293 bc
= bc_with_loc(bc
.srcLoc
, bc::Nop
{});
294 env
.state
.stack
.kill(pop
, push
, id
);
296 return env
.state
.stack
.size() - sz
;
300 * Check whether an instruction can be inserted immediately after the
301 * slot'th stack entry was written. This is only possible if slot was
302 * the last thing written by the instruction that wrote it (ie some
303 * bytecodes push more than one value - there's no way to insert a
304 * bytecode that will write *between* those values on the stack).
306 bool can_insert_after_slot(ISS
& env
, int slot
) {
307 auto const it
= env
.state
.stack
.end() - (slot
+ 1);
308 if (it
->id
== StackElem::NoId
) return false;
309 if (auto const next
= it
.next_elem(1)) {
310 return next
->id
!= it
->id
;
316 * Insert a sequence of bytecodes after the instruction that wrote the
317 * slot'th element from the top of the stack.
319 * The entire sequence pops numPop, and pushes numPush stack
320 * elements. Only the last bytecode can push anything onto the stack,
321 * and the types it pushes are pointed to by types (if you have more
322 * than one bytecode that pushes, call this more than once).
324 void insert_after_slot(ISS
& env
, int slot
,
325 int numPop
, int numPush
, const Type
* types
,
326 const BytecodeVec
& bcs
) {
327 assertx(can_insert_after_slot(env
, slot
));
329 auto const id
= id_from_slot(env
, slot
);
330 assertx(id
!= StackElem::NoId
);
331 ensure_mutable(env
, id
+ 1);
332 env
.state
.stack
.insert_after(numPop
, numPush
, types
, bcs
.size(), id
);
333 env
.replacedBcs
.insert(env
.replacedBcs
.begin() + (id
+ 1 - env
.unchangedBcs
),
334 bcs
.begin(), bcs
.end());
335 using namespace folly::gen
;
336 ITRACE(2, "insert_after_slot: slot={}, id={} [{}]\n",
339 map([&] (const Bytecode
& bc
) { return show(*env
.ctx
.func
, bc
); }) |
340 unsplit
<std::string
>(", "));
343 Bytecode
& mutate_last_op(ISS
& env
) {
344 assertx(will_reduce(env
));
346 if (!env
.replacedBcs
.size()) {
347 assertx(env
.unchangedBcs
);
348 env
.replacedBcs
.push_back(env
.blk
.hhbcs
[--env
.unchangedBcs
]);
350 return env
.replacedBcs
.back();
354 * Can be used to replace one op with another when rewind/reduce isn't
355 * safe (eg to change a SetL to a PopL - its not safe to rewind/reduce
356 * because the SetL changed both the Type and the equiv of its local).
358 void replace_last_op(ISS
& env
, Bytecode
&& bc
) {
359 auto& last
= mutate_last_op(env
);
360 auto const newPush
= numPush(bc
);
361 auto const oldPush
= numPush(last
);
362 auto const newPops
= numPop(bc
);
363 auto const oldPops
= numPop(last
);
365 assertx(newPush
<= oldPush
);
366 assertx(newPops
<= oldPops
);
368 if (newPush
!= oldPush
|| newPops
!= oldPops
) {
370 env
.state
.stack
.rewind(oldPops
- newPops
, oldPush
- newPush
);
372 ITRACE(2, "(replace: {}->{}\n",
373 show(*env
.ctx
.func
, last
), show(*env
.ctx
.func
, bc
));
374 last
= bc_with_loc(last
.srcLoc
, bc
);
379 //////////////////////////////////////////////////////////////////////
381 const Bytecode
* op_from_slot(ISS
& env
, int slot
, int prev
/* = 0 */) {
382 if (!will_reduce(env
)) return nullptr;
383 auto const id
= id_from_slot(env
, slot
);
384 if (id
== StackElem::NoId
) return nullptr;
385 if (id
< prev
) return nullptr;
386 return op_from_id(env
, id
- prev
);
389 const Bytecode
* last_op(ISS
& env
, int idx
/* = 0 */) {
390 if (!will_reduce(env
)) return nullptr;
392 if (env
.replacedBcs
.size() > idx
) {
393 return &env
.replacedBcs
[env
.replacedBcs
.size() - idx
- 1];
396 idx
-= env
.replacedBcs
.size();
397 if (env
.unchangedBcs
> idx
) {
398 return &env
.blk
.hhbcs
[env
.unchangedBcs
- idx
- 1];
404 * Assuming bc was just interped, rewind to the state immediately
405 * before it was interped.
407 * This is rarely what you want. Its used for constprop, where the
408 * bytecode has been interped, but not yet committed to the bytecode
409 * stream. We want to undo its effects, the spit out pops for its
410 * inputs, and commit a constant-generating bytecode.
412 void rewind(ISS
& env
, const Bytecode
& bc
) {
414 ITRACE(2, "(rewind: {}\n", show(*env
.ctx
.func
, bc
));
415 env
.state
.stack
.rewind(numPop(bc
), numPush(bc
));
416 env
.flags
.usedParams
.reset();
420 * Used for peephole opts. Will undo the *stack* effects of the last n
421 * committed byte codes, and remove them from the bytecode stream, in
422 * preparation for writing out an optimized replacement sequence.
424 * WARNING: Does not undo other changes to state, such as local types,
425 * local equivalency, and thisType. Take care when rewinding such
428 void rewind(ISS
& env
, int n
) {
431 while (env
.replacedBcs
.size()) {
432 rewind(env
, env
.replacedBcs
.back());
433 env
.replacedBcs
.pop_back();
437 rewind(env
, env
.blk
.hhbcs
[--env
.unchangedBcs
]);
441 void impl_vec(ISS
& env
, bool reduce
, BytecodeVec
&& bcs
) {
442 if (!will_reduce(env
)) reduce
= false;
445 using namespace folly::gen
;
446 ITRACE(2, "(reduce: {}\n",
448 map([&] (const Bytecode
& bc
) { return show(*env
.ctx
.func
, bc
); }) |
449 unsplit
<std::string
>(", "));
451 auto ef
= !env
.flags
.reduced
|| env
.flags
.effectFree
;
452 auto usedParams
= env
.flags
.usedParams
;
454 for (auto const& bc
: bcs
) {
456 env
.flags
.jmpDest
== NoBlockId
&&
457 "you can't use impl with branching opcodes before last position"
460 usedParams
|= env
.flags
.usedParams
;
461 if (!env
.flags
.effectFree
) ef
= false;
462 if (env
.state
.unreachable
|| env
.flags
.jmpDest
!= NoBlockId
) break;
464 env
.flags
.effectFree
= ef
;
465 env
.flags
.usedParams
= usedParams
;
466 } else if (!env
.flags
.reduced
) {
469 env
.flags
.reduced
= true;
474 SCOPE_EXIT
{ env
.analyzeDepth
--; };
476 // We should be at the start of a bytecode.
477 assertx(env
.flags
.wasPEI
&&
478 !env
.flags
.canConstProp
&&
479 !env
.flags
.effectFree
&&
480 env
.flags
.usedParams
.none());
482 env
.flags
.wasPEI
= false;
483 env
.flags
.canConstProp
= true;
484 env
.flags
.effectFree
= true;
485 env
.flags
.usedParams
.reset();
487 for (auto const& bc
: bcs
) {
488 assertx(env
.flags
.jmpDest
== NoBlockId
&&
489 "you can't use impl with branching opcodes before last position");
491 auto const wasPEI
= env
.flags
.wasPEI
;
492 auto const canConstProp
= env
.flags
.canConstProp
;
493 auto const effectFree
= env
.flags
.effectFree
;
495 ITRACE(3, " (impl {}\n", show(*env
.ctx
.func
, bc
));
496 env
.flags
.wasPEI
= true;
497 env
.flags
.canConstProp
= false;
498 env
.flags
.effectFree
= false;
499 default_dispatch(env
, bc
);
501 if (env
.flags
.canConstProp
) {
503 if (env
.flags
.effectFree
&& !env
.flags
.wasPEI
) return;
504 auto stk
= env
.state
.stack
.end();
505 for (auto i
= bc
.numPush(); i
--; ) {
507 if (!is_scalar(stk
->type
)) return;
509 env
.flags
.effectFree
= true;
510 env
.flags
.wasPEI
= false;
514 // If any of the opcodes in the impl list said they could throw,
515 // then the whole thing could throw.
516 env
.flags
.wasPEI
= env
.flags
.wasPEI
|| wasPEI
;
517 env
.flags
.canConstProp
= env
.flags
.canConstProp
&& canConstProp
;
518 env
.flags
.effectFree
= env
.flags
.effectFree
&& effectFree
;
519 if (env
.state
.unreachable
|| env
.flags
.jmpDest
!= NoBlockId
) break;
523 LocalId
equivLocalRange(ISS
& env
, const LocalRange
& range
) {
524 auto bestRange
= range
.first
;
525 auto equivFirst
= findLocEquiv(env
, range
.first
);
526 if (equivFirst
== NoLocalId
) return bestRange
;
528 if (equivFirst
< bestRange
) {
529 auto equivRange
= [&] {
530 // local equivalency includes differing by Uninit, so we need
531 // to check the types.
532 if (peekLocRaw(env
, equivFirst
) != peekLocRaw(env
, range
.first
)) {
536 for (uint32_t i
= 1; i
< range
.count
; ++i
) {
537 if (!locsAreEquiv(env
, equivFirst
+ i
, range
.first
+ i
) ||
538 peekLocRaw(env
, equivFirst
+ i
) !=
539 peekLocRaw(env
, range
.first
+ i
)) {
548 bestRange
= equivFirst
;
551 equivFirst
= findLocEquiv(env
, equivFirst
);
552 assertx(equivFirst
!= NoLocalId
);
553 } while (equivFirst
!= range
.first
);
558 SString
getNameFromType(const Type
& t
) {
559 if (!t
.subtypeOf(BStr
) && !t
.subtypeOf(BLazyCls
)) return nullptr;
560 if (is_specialized_string(t
)) return sval_of(t
);
561 if (is_specialized_lazycls(t
)) return lazyclsval_of(t
);
565 //////////////////////////////////////////////////////////////////////
570 * Very simple check to see if the top level class is reified or not
571 * If not we can reduce a VerifyTypeTS to a regular VerifyType
573 bool shouldReduceToNonReifiedVerifyType(ISS
& env
, SArray ts
) {
574 if (get_ts_kind(ts
) != TypeStructure::Kind::T_unresolved
) return false;
575 auto const clsName
= get_ts_classname(ts
);
576 auto const lookup
= env
.index
.lookup_class_or_type_alias(clsName
);
578 return !env
.index
.resolve_class(*lookup
.cls
)->couldHaveReifiedGenerics();
580 // Type aliases cannot have reified generics
581 return lookup
.typeAlias
;
586 //////////////////////////////////////////////////////////////////////
588 namespace interp_step
{
590 void in(ISS
& env
, const bc::Nop
&) { reduce(env
); }
592 void in(ISS
& env
, const bc::PopC
&) {
593 if (auto const last
= last_op(env
)) {
594 if (poppable(last
->op
)) {
598 if (last
->op
== Op::This
) {
599 // can't rewind This because it removed null from thisType (so
600 // CheckThis at this point is a no-op) - and note that it must
601 // have *been* nullable, or we'd have turned it into a
602 // `BareThis NeverNull`
603 replace_last_op(env
, bc::CheckThis
{});
606 if (last
->op
== Op::SetL
) {
607 // can't rewind a SetL because it changes local state
608 replace_last_op(env
, bc::PopL
{ last
->SetL
.loc1
});
611 if (last
->op
== Op::CGetL2
) {
612 auto loc
= last
->CGetL2
.nloc1
;
614 return reduce(env
, bc::PopC
{}, bc::CGetL
{ loc
});
622 void in(ISS
& env
, const bc::PopU
&) {
623 if (auto const last
= last_op(env
)) {
624 if (last
->op
== Op::NullUninit
) {
629 effect_free(env
); popU(env
);
632 void in(ISS
& env
, const bc::PopU2
&) {
634 auto equiv
= topStkEquiv(env
);
635 auto val
= popC(env
);
637 push(env
, std::move(val
), equiv
!= StackDupId
? equiv
: NoLocalId
);
640 void in(ISS
& env
, const bc::Dup
& /*op*/) {
642 auto equiv
= topStkEquiv(env
);
643 auto val
= popC(env
);
644 push(env
, val
, equiv
);
645 push(env
, std::move(val
), StackDupId
);
648 void in(ISS
& env
, const bc::AssertRATL
& op
) {
649 mayReadLocal(env
, op
.loc1
);
653 void in(ISS
& env
, const bc::AssertRATStk
&) {
657 void in(ISS
& env
, const bc::BreakTraceHint
&) { effect_free(env
); }
659 void in(ISS
& env
, const bc::CGetCUNop
&) {
661 auto const t
= popCU(env
);
662 push(env
, remove_uninit(t
));
665 void in(ISS
& env
, const bc::UGetCUNop
&) {
671 void in(ISS
& env
, const bc::Null
&) {
673 push(env
, TInitNull
);
676 void in(ISS
& env
, const bc::NullUninit
&) {
681 void in(ISS
& env
, const bc::True
&) {
686 void in(ISS
& env
, const bc::False
&) {
691 void in(ISS
& env
, const bc::Int
& op
) {
693 push(env
, ival(op
.arg1
));
696 void in(ISS
& env
, const bc::Double
& op
) {
698 push(env
, dval(op
.dbl1
));
701 void in(ISS
& env
, const bc::String
& op
) {
703 push(env
, sval(op
.str1
));
706 void in(ISS
& env
, const bc::Vec
& op
) {
707 assertx(op
.arr1
->isVecType());
709 push(env
, vec_val(op
.arr1
));
712 void in(ISS
& env
, const bc::Dict
& op
) {
713 assertx(op
.arr1
->isDictType());
715 push(env
, dict_val(op
.arr1
));
718 void in(ISS
& env
, const bc::Keyset
& op
) {
719 assertx(op
.arr1
->isKeysetType());
721 push(env
, keyset_val(op
.arr1
));
724 void in(ISS
& env
, const bc::NewDictArray
& op
) {
726 push(env
, op
.arg1
== 0 ? dict_empty() : some_dict_empty());
729 void in(ISS
& env
, const bc::NewStructDict
& op
) {
730 auto map
= MapElems
{};
731 for (auto it
= op
.keys
.end(); it
!= op
.keys
.begin(); ) {
733 make_tv
<KindOfPersistentString
>(*--it
),
734 MapElem::SStrKey(popC(env
))
737 push(env
, dict_map(std::move(map
)));
742 void in(ISS
& env
, const bc::NewVec
& op
) {
743 auto elems
= std::vector
<Type
>{};
744 elems
.reserve(op
.arg1
);
745 for (auto i
= uint32_t{0}; i
< op
.arg1
; ++i
) {
746 elems
.push_back(std::move(topC(env
, op
.arg1
- i
- 1)));
748 discard(env
, op
.arg1
);
751 push(env
, vec(std::move(elems
)));
754 void in(ISS
& env
, const bc::NewKeysetArray
& op
) {
755 assertx(op
.arg1
> 0);
756 auto map
= MapElems
{};
760 auto effectful
= false;
761 for (auto i
= uint32_t{0}; i
< op
.arg1
; ++i
) {
762 auto [key
, promotion
] = promote_classlike_to_key(popC(env
));
764 auto const keyValid
= key
.subtypeOf(BArrKey
);
765 if (!keyValid
) key
= intersection_of(std::move(key
), TArrKey
);
766 if (key
.is(BBottom
)) {
773 if (auto const v
= tv(key
)) {
774 map
.emplace_front(*v
, MapElem::KeyFromType(key
, key
));
780 ty
|= std::move(key
);
781 effectful
|= !keyValid
|| (promotion
== Promotion::YesMightThrow
);
790 push(env
, keyset_map(std::move(map
)));
792 push(env
, keyset_n(ty
));
800 void in(ISS
& env
, const bc::AddElemC
&) {
801 auto const v
= topC(env
, 0);
802 auto const [k
, promotion
] = promote_classlike_to_key(topC(env
, 1));
803 auto const promoteMayThrow
= (promotion
== Promotion::YesMightThrow
);
805 auto inTy
= (env
.state
.stack
.end() - 3).unspecialize();
806 // Unspecialize modifies the stack location
807 if (env
.undo
) env
.undo
->onStackWrite(env
.state
.stack
.size() - 3, inTy
);
809 auto outTy
= [&] (const Type
& key
) -> Optional
<Type
> {
810 if (!key
.subtypeOf(BArrKey
)) return std::nullopt
;
811 if (inTy
.subtypeOf(BDict
)) {
812 auto const r
= array_like_set(std::move(inTy
), key
, v
);
813 if (!r
.second
) return r
.first
;
818 if (outTy
&& !promoteMayThrow
&& will_reduce(env
)) {
819 if (!env
.trackedElems
.empty() &&
820 env
.trackedElems
.back().depth
+ 3 == env
.state
.stack
.size()) {
821 auto const handled
= [&] (const Type
& key
) {
822 if (!key
.subtypeOf(BArrKey
)) return false;
824 if (!ktv
) return false;
826 if (!vtv
) return false;
827 return mutate_add_elem_array(env
, [&](ArrayData
** arr
) {
828 *arr
= (*arr
)->setMove(*ktv
, *vtv
);
832 (env
.state
.stack
.end() - 3)->type
= std::move(*outTy
);
833 reduce(env
, bc::PopC
{}, bc::PopC
{});
834 ITRACE(2, "(addelem* -> {}\n",
836 env
.replacedBcs
[env
.trackedElems
.back().idx
- env
.unchangedBcs
]));
840 if (start_add_elem(env
, *outTy
, Op::AddElemC
)) return;
845 finish_tracked_elems(env
, env
.state
.stack
.size());
847 if (!outTy
) return push(env
, TInitCell
);
849 if (outTy
->subtypeOf(BBottom
)) {
851 } else if (!promoteMayThrow
) {
855 push(env
, std::move(*outTy
));
858 void in(ISS
& env
, const bc::AddNewElemC
&) {
860 auto inTy
= (env
.state
.stack
.end() - 2).unspecialize();
861 // Unspecialize modifies the stack location
862 if (env
.undo
) env
.undo
->onStackWrite(env
.state
.stack
.size() - 2, inTy
);
864 auto outTy
= [&] () -> Optional
<Type
> {
865 if (inTy
.subtypeOf(BVec
| BKeyset
)) {
866 auto const r
= array_like_newelem(std::move(inTy
), v
);
867 if (!r
.second
) return r
.first
;
872 if (outTy
&& will_reduce(env
)) {
873 if (!env
.trackedElems
.empty() &&
874 env
.trackedElems
.back().depth
+ 2 == env
.state
.stack
.size()) {
875 auto const handled
= [&] {
877 if (!vtv
) return false;
878 return mutate_add_elem_array(env
, [&](ArrayData
** arr
) {
879 *arr
= (*arr
)->appendMove(*vtv
);
883 (env
.state
.stack
.end() - 2)->type
= std::move(*outTy
);
884 reduce(env
, bc::PopC
{});
885 ITRACE(2, "(addelem* -> {}\n",
887 env
.replacedBcs
[env
.trackedElems
.back().idx
- env
.unchangedBcs
]));
891 if (start_add_elem(env
, *outTy
, Op::AddNewElemC
)) {
898 finish_tracked_elems(env
, env
.state
.stack
.size());
900 if (!outTy
) return push(env
, TInitCell
);
902 if (outTy
->is(BBottom
)) {
907 push(env
, std::move(*outTy
));
910 void in(ISS
& env
, const bc::NewCol
& op
) {
911 auto const type
= static_cast<CollectionType
>(op
.subop1
);
912 auto const name
= collections::typeToString(type
);
913 push(env
, objExact(builtin_class(env
.index
, name
)));
917 void in(ISS
& env
, const bc::NewPair
& /*op*/) {
918 popC(env
); popC(env
);
919 auto const name
= collections::typeToString(CollectionType::Pair
);
920 push(env
, objExact(builtin_class(env
.index
, name
)));
924 void in(ISS
& env
, const bc::ColFromArray
& op
) {
925 auto const src
= popC(env
);
926 auto const type
= static_cast<CollectionType
>(op
.subop1
);
927 assertx(type
!= CollectionType::Pair
);
928 if (type
== CollectionType::Vector
|| type
== CollectionType::ImmVector
) {
929 if (src
.subtypeOf(TVec
)) effect_free(env
);
931 assertx(type
== CollectionType::Map
||
932 type
== CollectionType::ImmMap
||
933 type
== CollectionType::Set
||
934 type
== CollectionType::ImmSet
);
935 if (src
.subtypeOf(TDict
)) effect_free(env
);
937 auto const name
= collections::typeToString(type
);
938 push(env
, objExact(builtin_class(env
.index
, name
)));
941 void in(ISS
& env
, const bc::CnsE
& op
) {
942 auto t
= env
.index
.lookup_constant(env
.ctx
, op
.str1
);
943 if (t
.subtypeOf(BBottom
)) unreachable(env
);
945 push(env
, std::move(t
));
950 void clsCnsImpl(ISS
& env
, const Type
& cls
, const Type
& name
) {
951 if (!cls
.couldBe(BCls
) || !name
.couldBe(BStr
)) {
957 auto lookup
= lookupClsConstant(env
, cls
, name
);
958 if (lookup
.found
== TriBool::No
|| lookup
.ty
.is(BBottom
)) {
964 if (cls
.subtypeOf(BCls
) &&
965 name
.subtypeOf(BStr
) &&
966 lookup
.found
== TriBool::Yes
&&
967 !lookup
.mightThrow
) {
972 push(env
, std::move(lookup
.ty
));
977 void in(ISS
& env
, const bc::ClsCns
& op
) {
978 auto const cls
= topC(env
);
980 if (cls
.subtypeOf(BCls
) && is_specialized_cls(cls
)) {
981 auto const& dcls
= dcls_of(cls
);
982 if (dcls
.isExact()) {
984 env
, bc::PopC
{}, bc::ClsCnsD
{ op
.str1
, dcls
.cls().name() }
990 clsCnsImpl(env
, cls
, sval(op
.str1
));
993 void in(ISS
& env
, const bc::ClsCnsL
& op
) {
994 auto const cls
= topC(env
);
995 auto const name
= locRaw(env
, op
.loc1
);
997 if (name
.subtypeOf(BStr
) && is_specialized_string(name
)) {
998 return reduce(env
, bc::ClsCns
{ sval_of(name
) });
1002 clsCnsImpl(env
, cls
, name
);
1005 void in(ISS
& env
, const bc::ClsCnsD
& op
) {
1006 auto const rcls
= env
.index
.resolve_class(op
.str2
);
1012 clsCnsImpl(env
, clsExact(*rcls
, true), sval(op
.str1
));
1015 void in(ISS
& env
, const bc::File
&) {
1016 if (!options
.SourceRootForFileBC
) {
1018 return push(env
, TSStr
);
1021 auto filename
= env
.ctx
.func
->originalFilename
1022 ? env
.ctx
.func
->originalFilename
1023 : env
.ctx
.func
->unit
;
1024 if (!FileUtil::isAbsolutePath(filename
->slice())) {
1025 filename
= makeStaticString(
1026 *options
.SourceRootForFileBC
+ filename
->toCppString()
1030 push(env
, sval(filename
));
1033 void in(ISS
& env
, const bc::Dir
&) {
1034 if (!options
.SourceRootForFileBC
) {
1036 return push(env
, TSStr
);
1039 auto filename
= env
.ctx
.func
->originalFilename
1040 ? env
.ctx
.func
->originalFilename
1041 : env
.ctx
.func
->unit
;
1042 if (!FileUtil::isAbsolutePath(filename
->slice())) {
1043 filename
= makeStaticString(
1044 *options
.SourceRootForFileBC
+ filename
->toCppString()
1048 push(env
, sval(makeStaticString(FileUtil::dirname(StrNR
{filename
}))));
1051 void in(ISS
& env
, const bc::Method
&) {
1052 auto const fullName
= [&] () -> const StringData
* {
1053 if (!env
.ctx
.func
->cls
) return env
.ctx
.func
->name
;
1054 return makeStaticString(
1055 folly::sformat("{}::{}", env
.ctx
.func
->cls
->name
, env
.ctx
.func
->name
)
1059 push(env
, sval(fullName
));
1062 void in(ISS
& env
, const bc::FuncCred
&) { effect_free(env
); push(env
, TObj
); }
1064 void in(ISS
& env
, const bc::ClassName
& op
) {
1065 auto const ty
= topC(env
);
1066 if (ty
.subtypeOf(BCls
) && is_specialized_cls(ty
)) {
1067 auto const& dcls
= dcls_of(ty
);
1068 if (dcls
.isExact()) {
1071 bc::String
{ dcls
.cls().name() });
1079 void in(ISS
& env
, const bc::LazyClassFromClass
&) {
1080 auto const ty
= topC(env
);
1081 if (ty
.subtypeOf(BCls
) && is_specialized_cls(ty
)) {
1082 auto const& dcls
= dcls_of(ty
);
1083 if (dcls
.isExact()) {
1086 bc::LazyClass
{ dcls
.cls().name() });
1091 push(env
, TLazyCls
);
1094 void in(ISS
& env
, const bc::EnumClassLabelName
& op
) {
1095 auto const ty
= topC(env
);
1096 if (ty
.subtypeOf(BEnumClassLabel
) && is_specialized_ecl(ty
)) {
1097 auto const& label
= eclval_of(ty
);
1100 bc::String
{ label
});
1102 if (ty
.subtypeOf(BEnumClassLabel
)) effect_free(env
);
1107 void concatHelper(ISS
& env
, uint32_t n
) {
1108 auto changed
= false;
1109 auto side_effects
= false;
1110 if (will_reduce(env
)) {
1111 auto litstr
= [&] (SString next
, uint32_t i
) -> SString
{
1112 auto const t
= topC(env
, i
);
1113 auto const v
= tv(t
);
1114 if (!v
) return nullptr;
1115 if (!isStringType(v
->m_type
) && !isIntType(v
->m_type
)) return nullptr;
1116 auto const cell
= eval_cell_value(
1118 auto const s
= makeStaticString(
1120 StringData::Make(tvAsCVarRef(&*v
).toString().get(), next
) :
1121 tvAsCVarRef(&*v
).toString().get());
1122 return make_tv
<KindOfString
>(s
);
1125 if (!cell
) return nullptr;
1126 return cell
->m_data
.pstr
;
1129 auto fold
= [&] (uint32_t slot
, uint32_t num
, SString result
) {
1130 auto const cell
= make_tv
<KindOfPersistentString
>(result
);
1131 auto const ty
= from_cell(cell
);
1132 BytecodeVec bcs
{num
, bc::PopC
{}};
1133 if (num
> 1) bcs
.push_back(gen_constant(cell
));
1135 reduce(env
, std::move(bcs
));
1137 insert_after_slot(env
, slot
, num
, num
> 1 ? 1 : 0, &ty
, bcs
);
1144 for (auto i
= 0; i
< n
; i
++) {
1145 if (!topC(env
, i
).subtypeOf(BArrKey
)) {
1146 side_effects
= true;
1151 if (!side_effects
) {
1152 for (auto i
= 0; i
< n
; i
++) {
1153 auto const prev
= op_from_slot(env
, i
);
1154 if (!prev
) continue;
1155 if (prev
->op
== Op::Concat
|| prev
->op
== Op::ConcatN
) {
1156 auto const extra
= kill_by_slot(env
, i
);
1164 SString result
= nullptr;
1168 // In order to collapse literals, we need to be able to insert
1169 // pops, and a constant after the sequence that generated the
1170 // literals. We can always insert after the last instruction
1171 // though, and we only need to check the first slot of a
1173 auto const next
= !i
|| result
|| can_insert_after_slot(env
, i
) ?
1174 litstr(result
, i
) : nullptr;
1175 if (next
== staticEmptyString()) {
1177 // don't fold away empty strings if the concat could trigger exceptions
1178 if (i
== 0 && !topC(env
, 1).subtypeOf(BArrKey
)) break;
1179 if (n
== 2 && i
== 1 && !topC(env
, 0).subtypeOf(BArrKey
)) break;
1187 fold(i
- nlit
, nlit
, result
);
1197 if (nlit
> 1) fold(i
- nlit
, nlit
, result
);
1207 if (!topC(env
).subtypeOf(BStr
)) {
1208 return reduce(env
, bc::CastString
{});
1214 // We can't reduce the emitted concats, or we'll end up with
1215 // infinite recursion.
1216 env
.flags
.wasPEI
= true;
1217 env
.flags
.effectFree
= false;
1218 env
.flags
.canConstProp
= false;
1220 auto concat
= [&] (uint32_t num
) {
1224 record(env
, bc::Concat
{});
1226 record(env
, bc::ConcatN
{ num
});
1234 if (n
> 1) concat(n
);
1237 void in(ISS
& env
, const bc::Concat
& /*op*/) {
1238 concatHelper(env
, 2);
1241 void in(ISS
& env
, const bc::ConcatN
& op
) {
1242 if (op
.arg1
== 2) return reduce(env
, bc::Concat
{});
1243 concatHelper(env
, op
.arg1
);
1246 template <class Op
, class Fun
>
1247 void arithImpl(ISS
& env
, const Op
& /*op*/, Fun fun
) {
1249 auto const t1
= popC(env
);
1250 auto const t2
= popC(env
);
1251 auto r
= fun(t2
, t1
);
1252 if (r
.is(BBottom
)) unreachable(env
);
1253 push(env
, std::move(r
));
1256 void in(ISS
& env
, const bc::Add
& op
) { arithImpl(env
, op
, typeAdd
); }
1257 void in(ISS
& env
, const bc::Sub
& op
) { arithImpl(env
, op
, typeSub
); }
1258 void in(ISS
& env
, const bc::Mul
& op
) { arithImpl(env
, op
, typeMul
); }
1259 void in(ISS
& env
, const bc::Div
& op
) { arithImpl(env
, op
, typeDiv
); }
1260 void in(ISS
& env
, const bc::Mod
& op
) { arithImpl(env
, op
, typeMod
); }
1261 void in(ISS
& env
, const bc::Pow
& op
) { arithImpl(env
, op
, typePow
); }
1262 void in(ISS
& env
, const bc::BitAnd
& op
) { arithImpl(env
, op
, typeBitAnd
); }
1263 void in(ISS
& env
, const bc::BitOr
& op
) { arithImpl(env
, op
, typeBitOr
); }
1264 void in(ISS
& env
, const bc::BitXor
& op
) { arithImpl(env
, op
, typeBitXor
); }
1265 void in(ISS
& env
, const bc::Shl
& op
) { arithImpl(env
, op
, typeShl
); }
1266 void in(ISS
& env
, const bc::Shr
& op
) { arithImpl(env
, op
, typeShr
); }
1268 void in(ISS
& env
, const bc::BitNot
& /*op*/) {
1269 auto const t
= popC(env
);
1270 auto const v
= tv(t
);
1271 if (!t
.couldBe(BInt
| BStr
| BSStr
| BLazyCls
| BCls
)) {
1273 return push(env
, TBottom
);
1278 auto cell
= eval_cell([&] {
1283 if (cell
) return push(env
, std::move(*cell
));
1285 push(env
, TInitCell
);
1290 template<bool NSame
>
1291 std::pair
<Type
,bool> resolveSame(ISS
& env
) {
1292 auto const l1
= topStkEquiv(env
, 0);
1293 auto const t1
= topC(env
, 0);
1294 auto const l2
= topStkEquiv(env
, 1);
1295 auto const t2
= topC(env
, 1);
1297 auto warningsEnabled
=
1298 (Cfg::Eval::EmitClsMethPointers
||
1299 Cfg::Eval::RaiseClassConversionNoticeSampleRate
> 0);
1301 auto const result
= [&] {
1302 auto const v1
= tv(t1
);
1303 auto const v2
= tv(t2
);
1305 if (l1
== StackDupId
||
1306 (l1
== l2
&& l1
!= NoLocalId
) ||
1307 (l1
<= MaxLocalId
&& l2
<= MaxLocalId
&& locsAreEquiv(env
, l1
, l2
))) {
1308 if (!t1
.couldBe(BDbl
) || !t2
.couldBe(BDbl
) ||
1309 (v1
&& (v1
->m_type
!= KindOfDouble
|| !std::isnan(v1
->m_data
.dbl
))) ||
1310 (v2
&& (v2
->m_type
!= KindOfDouble
|| !std::isnan(v2
->m_data
.dbl
)))) {
1311 return NSame
? TFalse
: TTrue
;
1316 if (auto r
= eval_cell_value([&]{ return tvSame(*v2
, *v1
); })) {
1317 // we wouldn't get here if cellSame raised a warning
1318 warningsEnabled
= false;
1319 return r
!= NSame
? TTrue
: TFalse
;
1323 return NSame
? typeNSame(t1
, t2
) : typeSame(t1
, t2
);
1326 if (warningsEnabled
&& result
== (NSame
? TFalse
: TTrue
)) {
1327 warningsEnabled
= false;
1329 return { result
, warningsEnabled
&& compare_might_raise(t1
, t2
) };
1332 template<bool Negate
>
1333 void sameImpl(ISS
& env
) {
1334 if (auto const last
= last_op(env
)) {
1335 if (last
->op
== Op::Null
) {
1337 reduce(env
, bc::IsTypeC
{ IsTypeOp::Null
});
1338 if (Negate
) reduce(env
, bc::Not
{});
1341 if (auto const prev
= last_op(env
, 1)) {
1342 if (prev
->op
== Op::Null
&&
1343 (last
->op
== Op::CGetL
|| last
->op
== Op::CGetL2
||
1344 last
->op
== Op::CGetQuietL
)) {
1345 auto const loc
= [&]() {
1346 if (last
->op
== Op::CGetL
) {
1347 return last
->CGetL
.nloc1
;
1348 } else if (last
->op
== Op::CGetL2
) {
1349 return last
->CGetL2
.nloc1
;
1350 } else if (last
->op
== Op::CGetQuietL
) {
1351 return NamedLocal
{kInvalidLocalName
, last
->CGetQuietL
.loc1
};
1353 always_assert(false);
1356 reduce(env
, bc::IsTypeL
{ loc
, IsTypeOp::Null
});
1357 if (Negate
) reduce(env
, bc::Not
{});
1363 auto pair
= resolveSame
<Negate
>(env
);
1371 push(env
, std::move(pair
.first
));
1374 template<class JmpOp
>
1375 bool sameJmpImpl(ISS
& env
, Op sameOp
, const JmpOp
& jmp
) {
1376 const StackElem
* elems
[2];
1377 env
.state
.stack
.peek(2, elems
, 1);
1379 auto const loc0
= elems
[1]->equivLoc
;
1380 auto const loc1
= elems
[0]->equivLoc
;
1381 // If loc0 == loc1, either they're both NoLocalId, so there's
1382 // nothing for us to deduce, or both stack elements are the same
1383 // value, so the only thing we could deduce is that they are or are
1384 // not NaN. But we don't track that, so just bail.
1385 if (loc0
== loc1
|| loc0
== StackDupId
) return false;
1387 auto const ty0
= elems
[1]->type
;
1388 auto const ty1
= elems
[0]->type
;
1389 auto const val0
= tv(ty0
);
1390 auto const val1
= tv(ty1
);
1392 assertx(!val0
|| !val1
);
1393 if ((loc0
== NoLocalId
&& !val0
&& ty1
.subtypeOf(ty0
)) ||
1394 (loc1
== NoLocalId
&& !val1
&& ty0
.subtypeOf(ty1
))) {
1398 // Same currently lies about the distinction between Func/Cls/Str
1399 if (ty0
.couldBe(BCls
) && ty1
.couldBe(BStr
)) return false;
1400 if (ty1
.couldBe(BCls
) && ty0
.couldBe(BStr
)) return false;
1401 if (ty0
.couldBe(BLazyCls
) && ty1
.couldBe(BStr
)) return false;
1402 if (ty1
.couldBe(BLazyCls
) && ty0
.couldBe(BStr
)) return false;
1404 auto isect
= intersection_of(ty0
, ty1
);
1406 // Unfortunately, floating point negative zero and positive zero are
1407 // different, but are identical using as far as Same is concerened. We should
1408 // avoid refining a value to 0.0 because it compares identically to 0.0
1409 if (isect
.couldBe(dval(0.0)) || isect
.couldBe(dval(-0.0))) {
1410 isect
= union_of(isect
, TDbl
);
1415 auto handle_same
= [&] {
1416 // Currently dce uses equivalency to prove that something isn't
1417 // the last reference - so we can only assert equivalency here if
1418 // we know that won't be affected. Its irrelevant for uncounted
1419 // things, and for TObj and TRes, $x === $y iff $x and $y refer to
1421 if (loc0
<= MaxLocalId
&&
1422 (ty0
.subtypeOf(BObj
| BRes
| BPrim
) ||
1423 ty1
.subtypeOf(BObj
| BRes
| BPrim
) ||
1424 (ty0
.subtypeOf(BUnc
) && ty1
.subtypeOf(BUnc
)))) {
1425 if (loc1
== StackDupId
) {
1426 setStkLocal(env
, loc0
, 0);
1427 } else if (loc1
<= MaxLocalId
&& !locsAreEquiv(env
, loc0
, loc1
)) {
1430 auto const other
= findLocEquiv(env
, loc
);
1431 if (other
== NoLocalId
) break;
1432 killLocEquiv(env
, loc
);
1433 addLocEquiv(env
, loc
, loc1
);
1436 addLocEquiv(env
, loc
, loc1
);
1439 return refineLocation(env
, loc1
!= NoLocalId
? loc1
: loc0
, [&] (Type ty
) {
1440 auto const needsUninit
=
1441 ty
.couldBe(BUninit
) &&
1442 !isect
.couldBe(BUninit
) &&
1443 isect
.couldBe(BInitNull
);
1444 auto ret
= ty
.subtypeOf(BUnc
) ? isect
: loosen_staticness(isect
);
1445 if (needsUninit
) ret
= union_of(std::move(ret
), TUninit
);
1451 auto handle_differ_side
= [&] (LocalId location
, const Type
& ty
) {
1452 if (!ty
.subtypeOf(BInitNull
) && !ty
.strictSubtypeOf(TBool
)) return true;
1453 return refineLocation(env
, location
, [&] (Type t
) {
1454 if (ty
.subtypeOf(BNull
)) {
1455 t
= remove_uninit(std::move(t
));
1456 if (t
.couldBe(BInitNull
) && !t
.subtypeOf(BInitNull
)) {
1457 t
= unopt(std::move(t
));
1460 } else if (ty
.strictSubtypeOf(TBool
) && t
.subtypeOf(BBool
)) {
1461 return ty
== TFalse
? TTrue
: TFalse
;
1467 auto handle_differ
= [&] {
1469 (loc0
== NoLocalId
|| handle_differ_side(loc0
, ty1
)) &&
1470 (loc1
== NoLocalId
|| handle_differ_side(loc1
, ty0
));
1473 auto const sameIsJmpTarget
=
1474 (sameOp
== Op::Same
) == (JmpOp::op
== Op::JmpNZ
);
1476 auto save
= env
.state
;
1477 auto const target_reachable
= sameIsJmpTarget
?
1478 handle_same() : handle_differ();
1479 if (!target_reachable
) jmp_nevertaken(env
);
1480 // swap, so we can restore this state if the branch is always taken.
1481 env
.state
.swap(save
);
1482 if (!(sameIsJmpTarget
? handle_differ() : handle_same())) {
1483 jmp_setdest(env
, jmp
.target1
);
1484 env
.state
.copy_from(std::move(save
));
1485 } else if (target_reachable
) {
1486 env
.propagate(jmp
.target1
, &save
);
1492 bc::JmpNZ
invertJmp(const bc::JmpZ
& jmp
) { return bc::JmpNZ
{ jmp
.target1
}; }
1493 bc::JmpZ
invertJmp(const bc::JmpNZ
& jmp
) { return bc::JmpZ
{ jmp
.target1
}; }
1497 void in(ISS
& env
, const bc::Same
&) { sameImpl
<false>(env
); }
1498 void in(ISS
& env
, const bc::NSame
&) { sameImpl
<true>(env
); }
1501 void cmpImpl(ISS
& env
, Fun fun
) {
1502 auto const t1
= popC(env
);
1503 auto const t2
= popC(env
);
1504 auto const v1
= tv(t1
);
1505 auto const v2
= tv(t2
);
1507 if (auto r
= eval_cell_value([&]{ return fun(*v2
, *v1
); })) {
1509 return push(env
, *r
? TTrue
: TFalse
);
1512 // TODO_4: evaluate when these can throw, non-constant type stuff.
1518 bool couldBeStringish(const Type
& t
) {
1519 return t
.couldBe(BCls
| BLazyCls
| BStr
);
1522 bool everEq(const Type
& t1
, const Type
& t2
) {
1523 // for comparison purposes we need to be careful about these coercions
1524 if (couldBeStringish(t1
) && couldBeStringish(t2
)) return true;
1525 return loosen_all(t1
).couldBe(loosen_all(t2
));
1528 bool cmpWillThrow(const Type
& t1
, const Type
& t2
) {
1529 // for comparison purposes we need to be careful about these coercions
1530 if (couldBeStringish(t1
) && couldBeStringish(t2
)) return false;
1532 auto couldBeIntAndDbl
= [](const Type
& t1
, const Type
& t2
) {
1533 return t1
.couldBe(BInt
) && t2
.couldBe(BDbl
);
1535 // relational comparisons allow for int v dbl
1536 if (couldBeIntAndDbl(t1
, t2
) || couldBeIntAndDbl(t2
, t1
)) return false;
1538 return !loosen_to_datatype(t1
).couldBe(loosen_to_datatype(t2
));
1541 void eqImpl(ISS
& env
, bool eq
) {
1542 auto rs
= resolveSame
<false>(env
);
1543 if (rs
.first
== TTrue
) {
1544 if (!rs
.second
) constprop(env
);
1546 return push(env
, eq
? TTrue
: TFalse
);
1549 if (!everEq(topC(env
, 0), topC(env
, 1))) {
1551 return push(env
, eq
? TFalse
: TTrue
);
1554 cmpImpl(env
, [&] (TypedValue c1
, TypedValue c2
) {
1555 return tvEqual(c1
, c2
) == eq
;
1559 bool cmpThrowCheck(ISS
& env
, const Type
& t1
, const Type
& t2
) {
1560 if (!cmpWillThrow(t1
, t2
)) return false;
1569 void in(ISS
& env
, const bc::Eq
&) { eqImpl(env
, true); }
1570 void in(ISS
& env
, const bc::Neq
&) { eqImpl(env
, false); }
1572 void in(ISS
& env
, const bc::Lt
&) {
1573 if (cmpThrowCheck(env
, topC(env
, 0), topC(env
, 1))) return;
1574 cmpImpl(env
, static_cast<bool (*)(TypedValue
, TypedValue
)>(tvLess
));
1576 void in(ISS
& env
, const bc::Gt
&) {
1577 if (cmpThrowCheck(env
, topC(env
, 0), topC(env
, 1))) return;
1578 cmpImpl(env
, static_cast<bool (*)(TypedValue
, TypedValue
)>(tvGreater
));
1580 void in(ISS
& env
, const bc::Lte
&) {
1581 if (cmpThrowCheck(env
, topC(env
, 0), topC(env
, 1))) return;
1582 cmpImpl(env
, tvLessOrEqual
);
1584 void in(ISS
& env
, const bc::Gte
&) {
1585 if (cmpThrowCheck(env
, topC(env
, 0), topC(env
, 1))) return;
1586 cmpImpl(env
, tvGreaterOrEqual
);
1589 void in(ISS
& env
, const bc::Cmp
&) {
1590 auto const t1
= topC(env
, 0);
1591 auto const t2
= topC(env
, 1);
1592 if (cmpThrowCheck(env
, t1
, t2
)) return;
1595 auto const v1
= tv(t1
);
1596 auto const v2
= tv(t2
);
1598 if (auto r
= eval_cell_value([&]{ return ival(tvCompare(*v2
, *v1
)); })) {
1600 return push(env
, std::move(*r
));
1604 // TODO_4: evaluate when these can throw, non-constant type stuff.
1608 void castBoolImpl(ISS
& env
, const Type
& t
, bool negate
) {
1609 auto const [e
, effectFree
] = emptiness(t
);
1616 case Emptiness::Empty
:
1617 case Emptiness::NonEmpty
:
1618 return push(env
, (e
== Emptiness::Empty
) == negate
? TTrue
: TFalse
);
1619 case Emptiness::Maybe
:
1626 void in(ISS
& env
, const bc::Not
&) {
1627 castBoolImpl(env
, popC(env
), true);
1630 void in(ISS
& env
, const bc::CastBool
&) {
1631 auto const t
= topC(env
);
1632 if (t
.subtypeOf(BBool
)) return reduce(env
);
1633 castBoolImpl(env
, popC(env
), false);
1636 void in(ISS
& env
, const bc::CastInt
&) {
1637 auto const t
= topC(env
);
1638 if (t
.subtypeOf(BInt
)) return reduce(env
);
1641 // Objects can raise a warning about converting to int.
1642 if (!t
.couldBe(BObj
)) nothrow(env
);
1643 if (auto const v
= tv(t
)) {
1644 auto cell
= eval_cell([&] {
1645 return make_tv
<KindOfInt64
>(tvToInt(*v
));
1647 if (cell
) return push(env
, std::move(*cell
));
1652 // Handle a casting operation, where "target" is the type being casted to. If
1653 // "fn" is provided, it will be called to cast any constant inputs. If "elide"
1654 // is set to true, if the source type is the same as the destination, the cast
1655 // will be optimized away.
1656 void castImpl(ISS
& env
, Type target
, void(*fn
)(TypedValue
*)) {
1657 auto const t
= topC(env
);
1658 if (t
.subtypeOf(target
)) return reduce(env
);
1662 if (auto val
= tv(t
)) {
1663 if (auto result
= eval_cell([&] { fn(&*val
); return *val
; })) {
1669 push(env
, std::move(target
));
1672 void in(ISS
& env
, const bc::CastDouble
&) {
1673 castImpl(env
, TDbl
, tvCastToDoubleInPlace
);
1676 void in(ISS
& env
, const bc::CastString
&) {
1677 castImpl(env
, TStr
, tvCastToStringInPlace
);
1680 void in(ISS
& env
, const bc::CastDict
&) {
1681 castImpl(env
, TDict
, tvCastToDictInPlace
);
1684 void in(ISS
& env
, const bc::CastVec
&) {
1685 castImpl(env
, TVec
, tvCastToVecInPlace
);
1688 void in(ISS
& env
, const bc::CastKeyset
&) {
1689 castImpl(env
, TKeyset
, tvCastToKeysetInPlace
);
1692 void in(ISS
& env
, const bc::DblAsBits
&) {
1696 auto const ty
= popC(env
);
1697 if (!ty
.couldBe(BDbl
)) return push(env
, ival(0));
1699 if (auto val
= tv(ty
)) {
1700 assertx(isDoubleType(val
->m_type
));
1701 val
->m_type
= KindOfInt64
;
1702 push(env
, from_cell(*val
));
1709 void in(ISS
& env
, const bc::Print
& /*op*/) {
1714 void in(ISS
& env
, const bc::Clone
& /*op*/) {
1715 auto val
= popC(env
);
1716 if (!val
.subtypeOf(BObj
)) {
1718 if (val
.is(BBottom
)) unreachable(env
);
1720 push(env
, std::move(val
));
1723 void in(ISS
& env
, const bc::Exit
&) { popC(env
); push(env
, TInitNull
); }
1724 void in(ISS
& env
, const bc::Fatal
&) { popC(env
); }
1725 void in(ISS
& env
, const bc::StaticAnalysisError
&) {}
1727 void in(ISS
& env
, const bc::Enter
& op
) {
1728 always_assert(op
.target1
== env
.ctx
.func
->mainEntry
);
1729 env
.propagate(env
.ctx
.func
->mainEntry
, &env
.state
);
1732 void in(ISS
& /*env*/, const bc::Jmp
&) {
1733 always_assert(0 && "blocks should not contain Jmp instructions");
1736 void in(ISS
& env
, const bc::Select
& op
) {
1737 auto const cond
= topC(env
);
1738 auto const t
= topC(env
, 1);
1739 auto const f
= topC(env
, 2);
1741 auto const [e
, effectFree
] = emptiness(cond
);
1748 case Emptiness::Maybe
:
1750 push(env
, union_of(t
, f
));
1752 case Emptiness::NonEmpty
:
1756 case Emptiness::Empty
:
1757 return reduce(env
, bc::PopC
{}, bc::PopC
{});
1764 template<class JmpOp
>
1765 bool isTypeHelper(ISS
& env
,
1770 if (typeOp
== IsTypeOp::Scalar
|| typeOp
== IsTypeOp::LegacyArrLike
||
1771 typeOp
== IsTypeOp::Func
) {
1775 auto const val
= [&] {
1776 if (op
!= Op::IsTypeC
) return locRaw(env
, location
);
1777 const StackElem
* elem
;
1778 env
.state
.stack
.peek(1, &elem
, 1);
1779 location
= elem
->equivLoc
;
1783 if (location
== NoLocalId
|| !val
.subtypeOf(BCell
)) return false;
1785 // If the type could be ClsMeth and Arr/Vec, skip location refining.
1786 // Otherwise, refine location based on the testType.
1787 auto testTy
= type_of_istype(typeOp
);
1789 assertx(val
.couldBe(testTy
) &&
1790 (!val
.subtypeOf(testTy
) || val
.subtypeOf(BObj
)));
1794 if (op
== Op::IsTypeC
) {
1795 if (!is_type_might_raise(testTy
, val
)) nothrow(env
);
1796 } else if (op
== Op::IssetL
) {
1798 } else if (!locCouldBeUninit(env
, location
) &&
1799 !is_type_might_raise(testTy
, val
)) {
1803 auto const negate
= (jmp
.op
== Op::JmpNZ
) == (op
!= Op::IssetL
);
1804 auto const was_true
= [&] (Type t
) {
1805 if (testTy
.subtypeOf(BNull
)) return intersection_of(t
, TNull
);
1806 assertx(!testTy
.couldBe(BNull
));
1807 return intersection_of(t
, testTy
);
1809 auto const was_false
= [&] (Type t
) {
1810 auto tinit
= remove_uninit(t
);
1811 if (testTy
.subtypeOf(BNull
)) {
1812 return (tinit
.couldBe(BInitNull
) && !tinit
.subtypeOf(BInitNull
))
1813 ? unopt(std::move(tinit
)) : tinit
;
1815 if (t
.couldBe(BInitNull
) && !t
.subtypeOf(BInitNull
)) {
1816 assertx(!testTy
.couldBe(BNull
));
1817 if (unopt(tinit
).subtypeOf(testTy
)) return TNull
;
1822 auto const taken
= [&] (Type t
) {
1823 return negate
? was_true(std::move(t
)) : was_false(std::move(t
));
1826 auto const fallthrough
= [&] (Type t
) {
1827 return negate
? was_false(std::move(t
)) : was_true(std::move(t
));
1830 refineLocation(env
, location
, taken
, jmp
.target1
, fallthrough
);
1834 template<class JmpOp
>
1835 bool instanceOfJmpImpl(ISS
& env
,
1836 const bc::InstanceOfD
& inst
,
1839 const StackElem
* elem
;
1840 env
.state
.stack
.peek(1, &elem
, 1);
1842 auto const locId
= elem
->equivLoc
;
1843 if (locId
== NoLocalId
|| interface_supports_non_objects(inst
.str1
)) {
1846 auto const rcls
= env
.index
.resolve_class(inst
.str1
);
1847 if (!rcls
) return false;
1849 auto const val
= elem
->type
;
1850 auto const instTy
= subObj(*rcls
);
1851 assertx(!val
.subtypeOf(instTy
) && val
.couldBe(instTy
));
1853 // If we have an optional type, whose unopt is guaranteed to pass
1854 // the instanceof check, then failing to pass implies it was null.
1855 auto const fail_implies_null
=
1856 val
.couldBe(BInitNull
) &&
1857 !val
.subtypeOf(BInitNull
) &&
1858 unopt(val
).subtypeOf(instTy
);
1861 auto const negate
= jmp
.op
== Op::JmpNZ
;
1862 auto const result
= [&] (Type t
, bool pass
) {
1863 return pass
? instTy
: fail_implies_null
? TNull
: t
;
1865 auto const taken
= [&] (Type t
) { return result(t
, negate
); };
1866 auto const fallthrough
= [&] (Type t
) { return result(t
, !negate
); };
1867 refineLocation(env
, locId
, taken
, jmp
.target1
, fallthrough
);
1871 template<class JmpOp
>
1872 bool isTypeStructCJmpImpl(ISS
& env
,
1873 const bc::IsTypeStructC
& inst
,
1876 const StackElem
* elems
[2];
1877 env
.state
.stack
.peek(2, elems
, 1);
1879 auto const locId
= elems
[0]->equivLoc
;
1880 if (locId
== NoLocalId
) return false;
1882 auto const a
= tv(elems
[1]->type
);
1883 if (!a
) return false;
1884 // if it wasn't valid, the JmpOp wouldn't be reachable
1885 assertx(isValidTSType(*a
, false));
1887 auto const is_nullable_ts
= is_ts_nullable(a
->m_data
.parr
);
1888 auto const ts_kind
= get_ts_kind(a
->m_data
.parr
);
1889 // type_of_type_structure does not resolve these types. It is important we
1890 // do resolve them here, or we may have issues when we reduce the checks to
1891 // InstanceOfD checks. This logic performs the same exact refinement as
1892 // instanceOfD will.
1893 if (is_nullable_ts
||
1894 (ts_kind
!= TypeStructure::Kind::T_class
&&
1895 ts_kind
!= TypeStructure::Kind::T_interface
&&
1896 ts_kind
!= TypeStructure::Kind::T_xhp
&&
1897 ts_kind
!= TypeStructure::Kind::T_unresolved
)) {
1901 auto const clsName
= get_ts_classname(a
->m_data
.parr
);
1903 if (interface_supports_non_objects(clsName
)) return false;
1905 auto const rcls
= env
.index
.resolve_class(clsName
);
1906 if (!rcls
) return false;
1908 auto const val
= elems
[0]->type
;
1909 auto const instTy
= subObj(*rcls
);
1910 if (val
.subtypeOf(instTy
) || !val
.couldBe(instTy
)) {
1914 auto const cls
= rcls
->cls();
1915 if (!cls
|| cls
->attrs
& AttrEnum
) return false;
1917 // If we have an optional type, whose unopt is guaranteed to pass
1918 // the instanceof check, then failing to pass implies it was null.
1919 auto const fail_implies_null
=
1920 val
.couldBe(BInitNull
) &&
1921 !val
.subtypeOf(BInitNull
) &&
1922 unopt(val
).subtypeOf(instTy
);
1926 auto const negate
= jmp
.op
== Op::JmpNZ
;
1927 auto const result
= [&] (Type t
, bool pass
) {
1928 return pass
? instTy
: fail_implies_null
? TNull
: t
;
1930 auto const taken
= [&] (Type t
) { return result(t
, negate
); };
1931 auto const fallthrough
= [&] (Type t
) { return result(t
, !negate
); };
1932 refineLocation(env
, locId
, taken
, jmp
.target1
, fallthrough
);
1936 template<class JmpOp
>
1937 void jmpImpl(ISS
& env
, const JmpOp
& op
) {
1938 auto const Negate
= std::is_same
<JmpOp
, bc::JmpNZ
>::value
;
1939 auto const location
= topStkEquiv(env
);
1940 auto const t
= topC(env
);
1941 auto const [e
, effectFree
] = emptiness(t
);
1942 if (e
== (Negate
? Emptiness::NonEmpty
: Emptiness::Empty
)) {
1943 reduce(env
, bc::PopC
{});
1944 return jmp_setdest(env
, op
.target1
);
1947 if (e
== (Negate
? Emptiness::Empty
: Emptiness::NonEmpty
) ||
1948 (next_real_block(env
.ctx
.func
, env
.blk
.fallthrough
) ==
1949 next_real_block(env
.ctx
.func
, op
.target1
))) {
1950 return reduce(env
, bc::PopC
{});
1954 if (env
.flags
.jmpDest
== NoBlockId
) return;
1955 auto const jmpDest
= env
.flags
.jmpDest
;
1956 env
.flags
.jmpDest
= NoBlockId
;
1958 reduce(env
, bc::PopC
{});
1959 env
.flags
.jmpDest
= jmpDest
;
1962 if (auto const last
= last_op(env
)) {
1963 if (last
->op
== Op::Not
) {
1965 return reduce(env
, invertJmp(op
));
1967 if (last
->op
== Op::Same
|| last
->op
== Op::NSame
) {
1968 if (sameJmpImpl(env
, last
->op
, op
)) return fix();
1969 } else if (last
->op
== Op::IssetL
) {
1970 if (isTypeHelper(env
,
1977 } else if (last
->op
== Op::IsTypeL
) {
1978 if (isTypeHelper(env
,
1979 last
->IsTypeL
.subop2
,
1980 last
->IsTypeL
.nloc1
.id
,
1985 } else if (last
->op
== Op::IsTypeC
) {
1986 if (isTypeHelper(env
,
1987 last
->IsTypeC
.subop1
,
1993 } else if (last
->op
== Op::InstanceOfD
) {
1994 if (instanceOfJmpImpl(env
, last
->InstanceOfD
, op
)) return fix();
1995 } else if (last
->op
== Op::IsTypeStructC
) {
1996 if (isTypeStructCJmpImpl(env
, last
->IsTypeStructC
, op
)) return fix();
2001 if (effectFree
) effect_free(env
);
2003 if (location
== NoLocalId
) return env
.propagate(op
.target1
, &env
.state
);
2005 refineLocation(env
, location
,
2006 Negate
? assert_nonemptiness
: assert_emptiness
,
2008 Negate
? assert_emptiness
: assert_nonemptiness
);
2014 void in(ISS
& env
, const bc::JmpNZ
& op
) { jmpImpl(env
, op
); }
2015 void in(ISS
& env
, const bc::JmpZ
& op
) { jmpImpl(env
, op
); }
2017 void in(ISS
& env
, const bc::Switch
& op
) {
2018 const auto t
= topC(env
);
2019 const auto v
= tv(t
);
2023 forEachTakenEdge(op
, [&] (BlockId id
) {
2024 env
.propagate(id
, &env
.state
);
2028 auto go
= [&] (BlockId blk
) {
2029 reduce(env
, bc::PopC
{});
2030 return jmp_setdest(env
, blk
);
2033 if (!t
.couldBe(BInt
)) {
2034 if (op
.subop1
== SwitchKind::Unbounded
) return bail();
2035 return go(op
.targets
.back());
2038 if (!v
) return bail();
2040 auto num_elems
= op
.targets
.size();
2041 if (op
.subop1
== SwitchKind::Unbounded
) {
2042 if (v
->m_data
.num
< 0 || v
->m_data
.num
>= num_elems
) return bail();
2043 return go(op
.targets
[v
->m_data
.num
]);
2046 assertx(num_elems
> 2);
2048 auto const i
= v
->m_data
.num
- op
.arg2
;
2049 return i
>= 0 && i
< num_elems
? go(op
.targets
[i
]) : go(op
.targets
.back());
2052 void in(ISS
& env
, const bc::SSwitch
& op
) {
2053 const auto t
= topC(env
);
2054 const auto v
= tv(t
);
2056 if (!couldBeStringish(t
)) {
2057 reduce(env
, bc::PopC
{});
2058 return jmp_setdest(env
, op
.targets
.back().second
);
2062 for (auto& kv
: op
.targets
) {
2063 auto match
= eval_cell_value([&] {
2064 if (!kv
.first
) return true;
2065 return v
->m_data
.pstr
->equal(kv
.first
);
2070 reduce(env
, bc::PopC
{});
2071 return jmp_setdest(env
, kv
.second
);
2077 forEachTakenEdge(op
, [&] (BlockId id
) {
2078 env
.propagate(id
, &env
.state
);
2082 void in(ISS
& env
, const bc::RetC
& /*op*/) {
2083 auto const locEquiv
= topStkLocal(env
);
2084 doRet(env
, popC(env
), false);
2085 if (locEquiv
!= NoLocalId
&& locEquiv
< env
.ctx
.func
->params
.size()) {
2086 env
.flags
.retParam
= locEquiv
;
2089 void in(ISS
& env
, const bc::RetM
& op
) {
2090 std::vector
<Type
> ret(op
.arg1
);
2091 for (int i
= 0; i
< op
.arg1
; i
++) {
2092 ret
[op
.arg1
- i
- 1] = popC(env
);
2094 doRet(env
, vec(std::move(ret
)), false);
2097 void in(ISS
& env
, const bc::RetCSuspended
&) {
2098 always_assert(env
.ctx
.func
->isAsync
&& !env
.ctx
.func
->isGenerator
);
2100 auto const t
= popC(env
);
2103 is_specialized_wait_handle(t
) ? wait_handle_inner(t
) : TInitCell
,
2108 void in(ISS
& env
, const bc::Throw
& /*op*/) {
2112 void in(ISS
& env
, const bc::ThrowNonExhaustiveSwitch
& /*op*/) {}
2114 void in(ISS
& env
, const bc::RaiseClassStringConversionNotice
& /*op*/) {}
2116 void in(ISS
& env
, const bc::ChainFaults
&) {
2120 void in(ISS
& env
, const bc::NativeImpl
&) {
2123 if (env
.ctx
.func
->isNative
) {
2124 return doRet(env
, native_function_return_type(env
.ctx
.func
), true);
2126 doRet(env
, TInitCell
, true);
2129 void in(ISS
& env
, const bc::CGetL
& op
) {
2130 if (locIsThis(env
, op
.nloc1
.id
)) {
2131 auto const& ty
= peekLocRaw(env
, op
.nloc1
.id
);
2132 if (!ty
.subtypeOf(BInitNull
)) {
2133 auto const subop
= ty
.couldBe(BUninit
) ?
2134 BareThisOp::Notice
: ty
.couldBe(BNull
) ?
2135 BareThisOp::NoNotice
: BareThisOp::NeverNull
;
2136 return reduce(env
, bc::BareThis
{ subop
});
2139 if (auto const last
= last_op(env
)) {
2140 if (last
->op
== Op::PopL
&&
2141 op
.nloc1
.id
== last
->PopL
.loc1
) {
2144 setLocRaw(env
, op
.nloc1
.id
, TCell
);
2145 return reduce(env
, bc::SetL
{ op
.nloc1
.id
});
2148 if (!peekLocCouldBeUninit(env
, op
.nloc1
.id
)) {
2149 auto const minLocEquiv
= findMinLocEquiv(env
, op
.nloc1
.id
, false);
2150 auto const loc
= minLocEquiv
!= NoLocalId
? minLocEquiv
: op
.nloc1
.id
;
2151 return reduce(env
, bc::CGetQuietL
{ loc
});
2153 mayReadLocal(env
, op
.nloc1
.id
);
2154 push(env
, locAsCell(env
, op
.nloc1
.id
), op
.nloc1
.id
);
2157 void in(ISS
& env
, const bc::CGetQuietL
& op
) {
2158 if (locIsThis(env
, op
.loc1
)) {
2159 return reduce(env
, bc::BareThis
{ BareThisOp::NoNotice
});
2161 if (auto const last
= last_op(env
)) {
2162 if (last
->op
== Op::PopL
&&
2163 op
.loc1
== last
->PopL
.loc1
) {
2166 setLocRaw(env
, op
.loc1
, TCell
);
2167 return reduce(env
, bc::SetL
{ op
.loc1
});
2170 auto const minLocEquiv
= findMinLocEquiv(env
, op
.loc1
, true);
2171 if (minLocEquiv
!= NoLocalId
) {
2172 return reduce(env
, bc::CGetQuietL
{ minLocEquiv
});
2177 mayReadLocal(env
, op
.loc1
);
2178 push(env
, locAsCell(env
, op
.loc1
), op
.loc1
);
2181 void in(ISS
& env
, const bc::CUGetL
& op
) {
2182 auto ty
= locRaw(env
, op
.loc1
);
2185 push(env
, std::move(ty
), op
.loc1
);
2188 void in(ISS
& env
, const bc::PushL
& op
) {
2189 auto const minLocEquiv
= findMinLocEquiv(env
, op
.loc1
, false);
2190 if (minLocEquiv
!= NoLocalId
) {
2191 return reduce(env
, bc::CGetQuietL
{ minLocEquiv
}, bc::UnsetL
{ op
.loc1
});
2194 if (auto const last
= last_op(env
)) {
2195 if (last
->op
== Op::PopL
&&
2196 last
->PopL
.loc1
== op
.loc1
) {
2197 // rewind is ok, because we're just going to unset the local
2198 // (and note the unset can't be a no-op because the PopL set it
2199 // to an InitCell). But its possible that before the PopL, the
2200 // local *was* unset, so maybe would have killed the no-op. The
2201 // only way to fix that is to reprocess the block with the new
2202 // instruction sequence and see what happens.
2205 return reduce(env
, bc::UnsetL
{ op
.loc1
});
2209 auto const& ty
= peekLocRaw(env
, op
.loc1
);
2210 if (ty
.subtypeOf(BUninit
)) {
2211 // It's unsafe to ever perform a PushL on an uninit location, but we may
2212 // have generated a PushL in the HackC if we determined that a CGetL
2213 // could only be reached if the local it referenced was initialized, and
2214 // now that we know the local is uninitialized we know it must be
2217 // This can happen because the liveness analysis in HackC is much more
2218 // primitive than HHBBC. If we see a CGetL instruction HackC will assume the
2219 // local must be initialized afterwards (or it would have thrown) but make
2220 // no attempt to detect cases where we would unconditionally throw.
2223 if (auto val
= tv(peekLocRaw(env
, op
.loc1
))) {
2224 return reduce(env
, bc::UnsetL
{ op
.loc1
}, gen_constant(*val
));
2228 impl(env
, bc::CGetQuietL
{ op
.loc1
}, bc::UnsetL
{ op
.loc1
});
2231 void in(ISS
& env
, const bc::CGetL2
& op
) {
2232 if (auto const last
= last_op(env
)) {
2233 if ((poppable(last
->op
) && !numPop(*last
)) ||
2234 ((last
->op
== Op::CGetL
|| last
->op
== Op::CGetQuietL
) &&
2235 !peekLocCouldBeUninit(env
, op
.nloc1
.id
))) {
2236 auto const other
= *last
;
2238 return reduce(env
, bc::CGetL
{ op
.nloc1
}, other
);
2242 if (!peekLocCouldBeUninit(env
, op
.nloc1
.id
)) {
2243 auto const minLocEquiv
= findMinLocEquiv(env
, op
.nloc1
.id
, false);
2244 if (minLocEquiv
!= NoLocalId
) {
2245 return reduce(env
, bc::CGetL2
{ { kInvalidLocalName
, minLocEquiv
} });
2249 mayReadLocal(env
, op
.nloc1
.id
);
2250 auto loc
= locAsCell(env
, op
.nloc1
.id
);
2251 auto topEquiv
= topStkLocal(env
);
2252 auto top
= popT(env
);
2253 push(env
, std::move(loc
), op
.nloc1
.id
);
2254 push(env
, std::move(top
), topEquiv
);
2257 void in(ISS
& env
, const bc::CGetG
&) { popC(env
); push(env
, TInitCell
); }
2259 void in(ISS
& env
, const bc::CGetS
& op
) {
2260 auto const tcls
= popC(env
);
2261 auto const tname
= popC(env
);
2263 auto const throws
= [&] {
2265 return push(env
, TBottom
);
2268 if (!tcls
.couldBe(BCls
)) return throws();
2270 auto lookup
= env
.index
.lookup_static(
2277 if (lookup
.found
== TriBool::No
|| lookup
.ty
.subtypeOf(BBottom
)) {
2281 auto const mustBeMutable
= ReadonlyOp::Mutable
== op
.subop1
;
2282 if (mustBeMutable
&& lookup
.readOnly
== TriBool::Yes
) {
2285 auto const mightReadOnlyThrow
=
2287 lookup
.readOnly
== TriBool::Maybe
;
2289 if (lookup
.found
== TriBool::Yes
&&
2290 lookup
.lateInit
== TriBool::No
&&
2291 lookup
.internal
== TriBool::No
&&
2292 !lookup
.classInitMightRaise
&&
2293 !mightReadOnlyThrow
&&
2294 tcls
.subtypeOf(BCls
) &&
2295 tname
.subtypeOf(BStr
)) {
2300 push(env
, std::move(lookup
.ty
));
2305 bool is_module_outside_active_deployment(const php::Unit
& unit
) {
2306 auto const moduleName
= unit
.moduleName
;
2307 auto const& packageInfo
= unit
.packageInfo
;
2308 if (auto const activeDeployment
= packageInfo
.getActiveDeployment()) {
2309 return !packageInfo
.moduleInDeployment(
2310 moduleName
, *activeDeployment
, DeployKind::Hard
);
2315 bool module_check_always_passes(ISS
& env
, const php::Class
& cls
) {
2316 auto const unit
= env
.index
.lookup_class_unit(cls
);
2317 if (is_module_outside_active_deployment(*unit
)) return false;
2318 if (!(cls
.attrs
& AttrInternal
)) return true;
2319 return unit
->moduleName
== env
.index
.lookup_func_unit(*env
.ctx
.func
)->moduleName
;
2322 bool module_check_always_passes(ISS
& env
, const res::Class
& rcls
) {
2323 if (auto const cls
= rcls
.cls()) {
2324 return module_check_always_passes(env
, *cls
);
2329 bool module_check_always_passes(ISS
& env
, const DCls
& dcls
) {
2330 if (dcls
.isExact() || dcls
.isSub()) {
2331 return module_check_always_passes(env
, dcls
.cls());
2332 } else if (dcls
.isIsect()) {
2333 for (auto const cls
: dcls
.isect()) {
2334 if (module_check_always_passes(env
, cls
)) return true;
2338 assertx(dcls
.isIsectAndExact());
2339 auto const [e
, i
] = dcls
.isectAndExact();
2340 if (module_check_always_passes(env
, e
)) return true;
2341 for (auto const cls
: *i
) {
2342 if (module_check_always_passes(env
, cls
)) return true;
2350 void in(ISS
& env
, const bc::ClassGetC
& op
) {
2351 auto const kind
= static_cast<ClassGetCMode
>(op
.subop1
);
2352 auto const t
= topC(env
);
2354 if (t
.subtypeOf(BCls
)) return reduce(env
);
2357 if (!t
.couldBe(BObj
| BCls
| BStr
| BLazyCls
)) {
2363 if (t
.subtypeOf(BObj
)) {
2365 case ClassGetCMode::Normal
:
2367 push(env
, objcls(t
));
2369 case ClassGetCMode::ExplicitConversion
:
2376 if (auto const clsname
= getNameFromType(t
)) {
2377 if (auto const rcls
= env
.index
.resolve_class(clsname
)) {
2378 auto const may_raise
= t
.subtypeOf(BStr
) && [&] {
2380 case ClassGetCMode::Normal
:
2381 return Cfg::Eval::RaiseStrToClsConversionNoticeSampleRate
> 0;
2382 case ClassGetCMode::ExplicitConversion
:
2383 return rcls
->mightCareAboutDynamicallyReferenced();
2388 module_check_always_passes(env
, *rcls
)) {
2391 push(env
, clsExact(*rcls
, true));
2402 void in(ISS
& env
, const bc::ClassGetTS
& op
) {
2403 // TODO(T31677864): implement real optimizations
2404 auto const ts
= popC(env
);
2405 if (!ts
.couldBe(BDict
)) {
2415 void in(ISS
& env
, const bc::AKExists
&) {
2416 auto const base
= popC(env
);
2417 auto const [key
, promotion
] = promote_classlike_to_key(popC(env
));
2419 auto result
= TBottom
;
2420 auto effectFree
= promotion
!= Promotion::YesMightThrow
;
2422 if (!base
.subtypeOf(BObj
| BArrLike
)) {
2427 if (base
.couldBe(BObj
)) {
2431 if (base
.couldBe(BArrLike
)) {
2432 auto const validKey
= key
.subtypeOf(BArrKey
);
2433 if (!validKey
) effectFree
= false;
2434 if (key
.couldBe(BArrKey
)) {
2436 array_like_elem(base
, validKey
? key
: intersection_of(key
, TArrKey
));
2437 if (elem
.first
.is(BBottom
)) {
2439 } else if (elem
.second
) {
2447 if (result
.is(BBottom
)) {
2448 assertx(!effectFree
);
2455 push(env
, std::move(result
));
2458 void in(ISS
& env
, const bc::GetMemoKeyL
& op
) {
2459 auto const& func
= env
.ctx
.func
;
2460 always_assert(func
->isMemoizeWrapper
);
2462 auto const tyIMemoizeParam
=
2463 subObj(builtin_class(env
.index
, s_IMemoizeParam
.get()));
2465 auto const inTy
= locAsCell(env
, op
.nloc1
.id
);
2467 // If the local could be uninit, we might raise a warning (as
2468 // usual). Converting an object to a memo key might invoke PHP code if it has
2469 // the IMemoizeParam interface, and if it doesn't, we'll throw.
2470 if (!locCouldBeUninit(env
, op
.nloc1
.id
) &&
2471 !inTy
.couldBe(BObj
| BVec
| BDict
)) {
2476 // If type constraints are being enforced and the local being turned into a
2477 // memo key is a parameter, then we can possibly using the type constraint to
2478 // infer a more efficient memo key mode.
2479 using MK
= MemoKeyConstraint
;
2480 Optional
<Type
> resolvedClsTy
;
2481 auto const mkc
= [&] {
2482 if (op
.nloc1
.id
>= env
.ctx
.func
->params
.size()) return MK::None
;
2483 auto const& tc
= env
.ctx
.func
->params
[op
.nloc1
.id
].typeConstraint
;
2484 if (tc
.isSubObject()) {
2485 auto const rcls
= env
.index
.resolve_class(tc
.clsName());
2486 assertx(rcls
.has_value());
2487 resolvedClsTy
= subObj(*rcls
);
2489 return memoKeyConstraintFromTC(tc
);
2492 // Use the type-constraint to reduce this operation to a more efficient memo
2493 // mode. Some of the modes can be reduced to simple bytecode operations
2494 // inline. Even with the type-constraints, we still need to check the inferred
2495 // type of the local. Something may have possibly clobbered the local between
2496 // the type-check and this op.
2499 // Always an int, so the key is always an identity mapping
2500 if (inTy
.subtypeOf(BInt
)) return reduce(env
, bc::CGetL
{ op
.nloc1
});
2503 // Always a bool, so the key is the bool cast to an int
2504 if (inTy
.subtypeOf(BBool
)) {
2505 return reduce(env
, bc::CGetL
{ op
.nloc1
}, bc::CastInt
{});
2509 // Always a string, so the key is always an identity mapping
2510 if (inTy
.subtypeOf(BStr
)) return reduce(env
, bc::CGetL
{ op
.nloc1
});
2513 // Either an int or string, so the key can be an identity mapping
2514 if (inTy
.subtypeOf(BArrKey
)) return reduce(env
, bc::CGetL
{ op
.nloc1
});
2517 // A nullable string. The key will either be the string or the integer
2519 if (inTy
.subtypeOf(BOptStr
)) {
2522 bc::CGetL
{ op
.nloc1
},
2524 bc::IsTypeL
{ op
.nloc1
, IsTypeOp::Null
},
2530 // A nullable int. The key will either be the integer, or the static empty
2532 if (inTy
.subtypeOf(BOptInt
)) {
2535 bc::CGetL
{ op
.nloc1
},
2536 bc::String
{ staticEmptyString() },
2537 bc::IsTypeL
{ op
.nloc1
, IsTypeOp::Null
},
2542 case MK::BoolOrNull
:
2543 // A nullable bool. The key will either be 0, 1, or 2.
2544 if (inTy
.subtypeOf(BOptBool
)) {
2547 bc::CGetL
{ op
.nloc1
},
2550 bc::IsTypeL
{ op
.nloc1
, IsTypeOp::Null
},
2556 // The double will be converted (losslessly) to an integer.
2557 if (inTy
.subtypeOf(BDbl
)) {
2558 return reduce(env
, bc::CGetL
{ op
.nloc1
}, bc::DblAsBits
{});
2562 // A nullable double. The key will be an integer, or the static empty
2564 if (inTy
.subtypeOf(BOptDbl
)) {
2567 bc::CGetL
{ op
.nloc1
},
2569 bc::String
{ staticEmptyString() },
2570 bc::IsTypeL
{ op
.nloc1
, IsTypeOp::Null
},
2576 // An object. If the object is definitely known to implement IMemoizeParam
2577 // we can simply call that method, casting the output to ensure its always
2578 // a string (which is what the generic mode does). If not, it will use the
2579 // generic mode, which can handle collections or classes which don't
2580 // implement getInstanceKey.
2581 if (resolvedClsTy
&&
2582 resolvedClsTy
->subtypeOf(tyIMemoizeParam
) &&
2583 inTy
.subtypeOf(tyIMemoizeParam
)) {
2586 bc::CGetL
{ op
.nloc1
},
2588 bc::FCallObjMethodD
{
2590 staticEmptyString(),
2591 ObjMethodOp::NullThrows
,
2592 s_getInstanceKey
.get()
2598 case MK::ObjectOrNull
:
2599 // An object or null. We can use the null safe version of a function call
2600 // when invoking getInstanceKey and then select from the result of that,
2601 // or the integer 0. This might seem wasteful, but the JIT does a good job
2602 // inlining away the call in the null case.
2603 if (resolvedClsTy
&&
2604 resolvedClsTy
->subtypeOf(tyIMemoizeParam
) &&
2605 inTy
.subtypeOf(opt(tyIMemoizeParam
))) {
2608 bc::CGetL
{ op
.nloc1
},
2610 bc::FCallObjMethodD
{
2612 staticEmptyString(),
2613 ObjMethodOp::NullSafe
,
2614 s_getInstanceKey
.get()
2618 bc::IsTypeL
{ op
.nloc1
, IsTypeOp::Null
},
2627 // No type constraint, or one that isn't usuable. Use the generic memoization
2628 // scheme which can handle any type:
2630 if (auto const val
= tv(inTy
)) {
2631 auto const key
= eval_cell(
2632 [&]{ return HHVM_FN(serialize_memoize_param
)(*val
); }
2634 if (key
) return push(env
, *key
);
2637 // Integer keys are always mapped to themselves
2638 if (inTy
.subtypeOf(BInt
)) return reduce(env
, bc::CGetL
{ op
.nloc1
});
2639 if (inTy
.subtypeOf(BOptInt
)) {
2642 bc::CGetL
{ op
.nloc1
},
2643 bc::String
{ s_nullMemoKey
.get() },
2644 bc::IsTypeL
{ op
.nloc1
, IsTypeOp::Null
},
2648 if (inTy
.subtypeOf(BBool
)) {
2651 bc::String
{ s_falseMemoKey
.get() },
2652 bc::String
{ s_trueMemoKey
.get() },
2653 bc::CGetL
{ op
.nloc1
},
2658 // A memo key can be an integer if the input might be an integer, and is a
2659 // string otherwise. Booleans and nulls are always static strings.
2661 if (inTy
.subtypeOf(BOptBool
)) return TSStr
;
2662 if (inTy
.couldBe(BInt
)) return union_of(TInt
, TStr
);
2665 push(env
, std::move(keyTy
));
2668 void in(ISS
& env
, const bc::IssetL
& op
) {
2669 if (locIsThis(env
, op
.loc1
)) {
2671 bc::BareThis
{ BareThisOp::NoNotice
},
2672 bc::IsTypeC
{ IsTypeOp::Null
},
2677 auto const loc
= locAsCell(env
, op
.loc1
);
2678 if (loc
.subtypeOf(BNull
)) return push(env
, TFalse
);
2679 if (!loc
.couldBe(BNull
)) return push(env
, TTrue
);
2683 void in(ISS
& env
, const bc::IsUnsetL
& op
) {
2686 auto const loc
= locAsCell(env
, op
.loc1
);
2687 if (loc
.subtypeOf(BUninit
)) return push(env
, TTrue
);
2688 if (!loc
.couldBe(BUninit
)) return push(env
, TFalse
);
2692 void in(ISS
& env
, const bc::IssetS
& op
) {
2693 auto const tcls
= popC(env
);
2694 auto const tname
= popC(env
);
2696 if (!tcls
.couldBe(BCls
)) {
2698 return push(env
, TBottom
);
2701 auto lookup
= env
.index
.lookup_static(
2708 if (!lookup
.classInitMightRaise
&&
2709 lookup
.internal
== TriBool::No
&&
2710 tcls
.subtypeOf(BCls
) &&
2711 tname
.subtypeOf(BStr
)) {
2716 if (lookup
.ty
.subtypeOf(BNull
)) return push(env
, TFalse
);
2717 if (!lookup
.ty
.couldBe(BNull
) && lookup
.lateInit
== TriBool::No
) {
2718 return push(env
, TTrue
);
2723 void in(ISS
& env
, const bc::IssetG
&) { popC(env
); push(env
, TBool
); }
2725 void isTypeObj(ISS
& env
, const Type
& ty
) {
2726 if (!ty
.couldBe(BObj
)) return push(env
, TFalse
);
2727 if (ty
.subtypeOf(BObj
)) {
2728 auto const incompl
= objExact(
2729 builtin_class(env
.index
, s_PHP_Incomplete_Class
.get()));
2730 if (Cfg::Eval::BuildMayNoticeOnMethCallerHelperIsObject
) {
2732 objExact(builtin_class(env
.index
, s_MethCallerHelper
.get()));
2733 if (ty
.couldBe(c
)) return push(env
, TBool
);
2735 if (!ty
.couldBe(incompl
)) return push(env
, TTrue
);
2736 if (ty
.subtypeOf(incompl
)) return push(env
, TFalse
);
2741 void isTypeImpl(ISS
& env
, const Type
& locOrCell
, IsTypeOp subop
) {
2743 case IsTypeOp::Scalar
: return push(env
, TBool
);
2744 case IsTypeOp::LegacyArrLike
: return push(env
, TBool
);
2745 case IsTypeOp::Obj
: return isTypeObj(env
, locOrCell
);
2746 case IsTypeOp::Func
:
2747 // If it is TFunc, it may still be meth_caller.
2748 if (locOrCell
.couldBe(TFunc
)) return push(env
, TBool
);
2753 auto const test
= type_of_istype(subop
);
2754 if (locOrCell
.subtypeOf(test
)) return push(env
, TTrue
);
2755 if (!locOrCell
.couldBe(test
)) return push(env
, TFalse
);
2760 void isTypeLImpl(ISS
& env
, const Op
& op
) {
2761 auto const loc
= locAsCell(env
, op
.nloc1
.id
);
2762 if (!locCouldBeUninit(env
, op
.nloc1
.id
) &&
2763 !is_type_might_raise(op
.subop2
, loc
)) {
2768 isTypeImpl(env
, loc
, op
.subop2
);
2772 void isTypeCImpl(ISS
& env
, const Op
& op
) {
2773 auto const t1
= popC(env
);
2774 if (!is_type_might_raise(op
.subop1
, t1
)) {
2779 isTypeImpl(env
, t1
, op
.subop1
);
2782 void in(ISS
& env
, const bc::IsTypeC
& op
) { isTypeCImpl(env
, op
); }
2783 void in(ISS
& env
, const bc::IsTypeL
& op
) { isTypeLImpl(env
, op
); }
2785 void in(ISS
& env
, const bc::InstanceOfD
& op
) {
2786 auto t1
= topC(env
);
2787 // Note: InstanceOfD can do autoload if the type might be a type
2788 // alias, so it's not nothrow unless we know it's an object type.
2789 if (auto const rcls
= env
.index
.resolve_class(op
.str1
)) {
2790 auto result
= [&] (const Type
& r
) {
2792 if (r
!= TBool
) constprop(env
);
2796 if (!interface_supports_non_objects(rcls
->name())) {
2797 auto const testTy
= subObj(*rcls
);
2798 if (t1
.subtypeOf(testTy
)) return result(TTrue
);
2799 if (!t1
.couldBe(testTy
)) return result(TFalse
);
2800 if (t1
.couldBe(BInitNull
) && !t1
.subtypeOf(BInitNull
)) {
2801 t1
= unopt(std::move(t1
));
2802 if (t1
.subtypeOf(testTy
)) {
2803 return reduce(env
, bc::IsTypeC
{ IsTypeOp::Null
}, bc::Not
{});
2806 return result(TBool
);
2809 // The class doesn't exist, so we can never have an instance of
2819 void in(ISS
& env
, const bc::InstanceOf
& /*op*/) {
2820 auto const t1
= topC(env
);
2821 auto const v1
= tv(t1
);
2822 if (v1
&& v1
->m_type
== KindOfPersistentString
) {
2823 return reduce(env
, bc::PopC
{},
2824 bc::InstanceOfD
{ v1
->m_data
.pstr
});
2827 if (t1
.subtypeOf(BObj
) && is_specialized_obj(t1
)) {
2828 auto const& dobj
= dobj_of(t1
);
2829 if (dobj
.isExact()) {
2830 return reduce(env
, bc::PopC
{},
2831 bc::InstanceOfD
{ dobj
.cls().name() });
2840 void in(ISS
& env
, const bc::IsLateBoundCls
& op
) {
2841 auto const cls
= env
.ctx
.cls
;
2842 if (cls
&& !(cls
->attrs
& AttrTrait
)) effect_free(env
);
2844 return push(env
, TBool
);
2849 bool isValidTypeOpForIsAs(const IsTypeOp
& op
) {
2851 case IsTypeOp::Null
:
2852 case IsTypeOp::Bool
:
2860 case IsTypeOp::Dict
:
2861 case IsTypeOp::Keyset
:
2862 case IsTypeOp::ArrLike
:
2863 case IsTypeOp::LegacyArrLike
:
2864 case IsTypeOp::Scalar
:
2865 case IsTypeOp::ClsMeth
:
2866 case IsTypeOp::Func
:
2867 case IsTypeOp::Class
:
2873 void isTypeStructImpl(ISS
& env
, SArray inputTS
) {
2874 auto const ts
= inputTS
;
2875 auto const t
= loosen_likeness(topC(env
, 1)); // operand to is/as
2877 bool may_raise
= true;
2878 auto result
= [&] (const Type
& out
) {
2879 popC(env
); // type structure
2880 popC(env
); // operand to is/as
2882 if (!may_raise
) nothrow(env
);
2883 return push(env
, out
);
2887 const Optional
<Type
> type
,
2888 const Optional
<Type
> deopt
= std::nullopt
2890 if (!type
|| is_type_might_raise(*type
, t
)) return result(TBool
);
2891 auto test
= type
.value();
2892 if (t
.subtypeOf(test
)) return result(TTrue
);
2893 if (!t
.couldBe(test
) && (!deopt
|| !t
.couldBe(deopt
.value()))) {
2894 return result(TFalse
);
2896 auto const op
= type_to_istypeop(test
);
2897 if (!op
|| !isValidTypeOpForIsAs(op
.value())) return result(TBool
);
2898 return reduce(env
, bc::PopC
{}, bc::IsTypeC
{ *op
});
2901 auto const is_nullable_ts
= is_ts_nullable(ts
);
2902 auto const is_definitely_null
= t
.subtypeOf(BNull
);
2903 auto const is_definitely_not_null
= !t
.couldBe(BNull
);
2905 if (is_nullable_ts
&& is_definitely_null
) return result(TTrue
);
2907 auto const ts_type
= type_of_type_structure(env
.index
, env
.ctx
, ts
);
2909 if (is_nullable_ts
&& !is_definitely_not_null
&& ts_type
== std::nullopt
) {
2910 // Ts is nullable and we know that t could be null but we dont know for sure
2911 // Also we didn't get a type out of the type structure
2912 return result(TBool
);
2915 if (ts_type
&& !is_type_might_raise(*ts_type
, t
)) may_raise
= false;
2916 switch (get_ts_kind(ts
)) {
2917 case TypeStructure::Kind::T_int
:
2918 case TypeStructure::Kind::T_bool
:
2919 case TypeStructure::Kind::T_float
:
2920 case TypeStructure::Kind::T_string
:
2921 case TypeStructure::Kind::T_num
:
2922 case TypeStructure::Kind::T_arraykey
:
2923 case TypeStructure::Kind::T_keyset
:
2924 case TypeStructure::Kind::T_void
:
2925 case TypeStructure::Kind::T_null
:
2926 return check(ts_type
);
2927 case TypeStructure::Kind::T_tuple
:
2928 return check(ts_type
, TVec
);
2929 case TypeStructure::Kind::T_shape
:
2930 return check(ts_type
, TDict
);
2931 case TypeStructure::Kind::T_dict
:
2932 return check(ts_type
);
2933 case TypeStructure::Kind::T_vec
:
2934 return check(ts_type
);
2935 case TypeStructure::Kind::T_nothing
:
2936 case TypeStructure::Kind::T_noreturn
:
2937 return result(TFalse
);
2938 case TypeStructure::Kind::T_mixed
:
2939 case TypeStructure::Kind::T_dynamic
:
2940 return result(TTrue
);
2941 case TypeStructure::Kind::T_nonnull
:
2942 if (is_definitely_null
) return result(TFalse
);
2943 if (is_definitely_not_null
) return result(TTrue
);
2946 bc::IsTypeC
{ IsTypeOp::Null
},
2948 case TypeStructure::Kind::T_class
:
2949 case TypeStructure::Kind::T_interface
:
2950 case TypeStructure::Kind::T_xhp
: {
2951 auto clsname
= get_ts_classname(ts
);
2952 auto const rcls
= env
.index
.resolve_class(clsname
);
2953 if (!rcls
) return result(TBool
);
2954 if (ts
->exists(s_generic_types
)) {
2955 if (!isTSAllWildcards(ts
)) return result(TBool
);
2956 if (rcls
->couldHaveReifiedGenerics()) return result(TBool
);
2958 return reduce(env
, bc::PopC
{}, bc::InstanceOfD
{ clsname
});
2960 case TypeStructure::Kind::T_unresolved
: {
2961 auto classname
= get_ts_classname(ts
);
2962 auto const has_generics
= ts
->exists(s_generic_types
);
2963 if (!has_generics
&& classname
->tsame(s_this
.get())) {
2964 return reduce(env
, bc::PopC
{}, bc::IsLateBoundCls
{});
2966 auto const rcls
= env
.index
.resolve_class(classname
);
2967 // We can only reduce to instance of if we know for sure that this class
2968 // can be resolved since instanceof undefined class does not throw
2969 if (!rcls
) return result(TBool
);
2970 auto const cls
= rcls
->cls();
2971 if (!cls
|| cls
->attrs
& AttrEnum
) return result(TBool
);
2972 if (has_generics
&& (cls
->hasReifiedGenerics
|| !isTSAllWildcards(ts
))) {
2973 // If it is a reified class or has non wildcard generics, we
2975 return result(TBool
);
2977 return reduce(env
, bc::PopC
{}, bc::InstanceOfD
{ rcls
->name() });
2979 case TypeStructure::Kind::T_enum
:
2980 case TypeStructure::Kind::T_resource
:
2981 case TypeStructure::Kind::T_vec_or_dict
:
2982 case TypeStructure::Kind::T_any_array
:
2983 case TypeStructure::Kind::T_union
:
2984 case TypeStructure::Kind::T_recursiveUnion
:
2985 // TODO(T29232862): implement
2986 return result(TBool
);
2987 case TypeStructure::Kind::T_typeaccess
:
2988 case TypeStructure::Kind::T_darray
:
2989 case TypeStructure::Kind::T_varray
:
2990 case TypeStructure::Kind::T_varray_or_darray
:
2991 case TypeStructure::Kind::T_reifiedtype
:
2992 return result(TBool
);
2993 case TypeStructure::Kind::T_fun
:
2994 case TypeStructure::Kind::T_typevar
:
2995 case TypeStructure::Kind::T_trait
:
2996 // We will error on these at the JIT
2997 return result(TBool
);
3003 const StaticString
s_hh_type_structure_no_throw("HH\\type_structure_no_throw");
3007 void in(ISS
& env
, const bc::IsTypeStructC
& op
) {
3008 if (!topC(env
).couldBe(BDict
)) {
3011 return unreachable(env
);
3013 auto const a
= tv(topC(env
));
3014 if (!a
|| !isValidTSType(*a
, false)) {
3017 return push(env
, TBool
);
3019 if (op
.subop1
== TypeStructResolveOp::Resolve
) {
3020 if (auto const ts
= resolve_type_structure(env
, a
->m_data
.parr
).sarray()) {
3025 bc::IsTypeStructC
{ TypeStructResolveOp::DontResolve
, op
.subop2
}
3028 if (auto const val
= get_ts_this_type_access(a
->m_data
.parr
)) {
3029 // Convert `$x is this::T` into
3030 // `$x is type_structure_no_throw(static::class, 'T')`
3031 // to take advantage of the caching that comes with the type_structure
3037 bc::LateBoundCls
{},
3039 bc::FCallFuncD
{FCallArgs(2), s_hh_type_structure_no_throw
.get()},
3040 bc::IsTypeStructC
{ TypeStructResolveOp::DontResolve
, op
.subop2
}
3044 isTypeStructImpl(env
, a
->m_data
.parr
);
3047 void in(ISS
& env
, const bc::ThrowAsTypeStructException
& op
) {
3053 void in(ISS
& env
, const bc::CombineAndResolveTypeStruct
& op
) {
3054 assertx(op
.arg1
> 0);
3056 auto const first
= tv(topC(env
));
3057 if (first
&& isValidTSType(*first
, false)) {
3058 auto const ts
= first
->m_data
.parr
;
3059 // Optimize single input that does not need any combination
3061 if (auto const r
= resolve_type_structure(env
, ts
).sarray()) {
3070 // Optimize double input that needs a single combination and looks of the
3072 if (op
.arg1
== 2 && get_ts_kind(ts
) == TypeStructure::Kind::T_reifiedtype
) {
3073 BytecodeVec instrs
{ bc::PopC
{} };
3074 auto const tv_true
= gen_constant(make_tv
<KindOfBoolean
>(true));
3075 if (ts
->exists(s_nullable
.get())) {
3076 instrs
.push_back(gen_constant(make_tv
<KindOfString
>(s_nullable
.get())));
3077 instrs
.push_back(tv_true
);
3078 instrs
.push_back(bc::AddElemC
{});
3080 if (ts
->exists(s_soft
.get())) {
3081 instrs
.push_back(gen_constant(make_tv
<KindOfString
>(s_soft
.get())));
3082 instrs
.push_back(tv_true
);
3083 instrs
.push_back(bc::AddElemC
{});
3085 return reduce(env
, std::move(instrs
));
3089 for (int i
= 0; i
< op
.arg1
; ++i
) {
3090 auto const t
= popC(env
);
3091 valid
&= t
.couldBe(BDict
);
3093 if (!valid
) return unreachable(env
);
3098 void in(ISS
& env
, const bc::RecordReifiedGeneric
& op
) {
3099 // TODO(T31677864): implement real optimizations
3100 auto const t
= popC(env
);
3101 if (!t
.couldBe(BVec
)) return unreachable(env
);
3102 if (t
.subtypeOf(BVec
)) nothrow(env
);
3106 void in(ISS
& env
, const bc::CheckClsReifiedGenericMismatch
& op
) {
3107 auto const location
= topStkEquiv(env
, 0);
3110 if (location
== NoLocalId
) return;
3111 auto const ok
= refineLocation(
3114 return get_type_of_reified_list(env
.ctx
.cls
->userAttributes
);
3117 if (!ok
) unreachable(env
);
3120 void in(ISS
& env
, const bc::ClassHasReifiedGenerics
& op
) {
3121 // TODO(T121050961) Optimize for lazy classes too
3122 auto const cls
= popC(env
);
3123 if (!cls
.couldBe(BCls
| BLazyCls
)) {
3125 return push(env
, TBottom
);
3127 if (!cls
.subtypeOf(BCls
)) {
3133 auto const t
= [&] {
3134 if (!is_specialized_cls(cls
) || !dcls_of(cls
).isExact()) {
3137 auto const& dcls
= dcls_of(cls
);
3138 if (!dcls
.cls().couldHaveReifiedGenerics()) {
3141 if (dcls
.cls().mustHaveReifiedGenerics()) {
3149 void in(ISS
& env
, const bc::GetClsRGProp
& op
) {
3150 // TODO(T121050961) Optimize for lazy classes too
3151 auto const cls
= popC(env
);
3152 if (!thisAvailable(env
) || !cls
.couldBe(BCls
| BLazyCls
)) {
3154 return push(env
, TBottom
);
3156 if (!cls
.subtypeOf(BCls
) ||
3157 !is_specialized_cls(cls
) ||
3158 !dcls_of(cls
).isExact()) {
3162 auto const &dcls
= dcls_of(cls
);
3163 if (!dcls
.cls().couldHaveReifiedGenerics()) {
3164 push(env
, TInitNull
);
3170 void in(ISS
& env
, const bc::HasReifiedParent
& op
) {
3171 // TODO(T121050961) Optimize for lazy classes too
3172 auto const cls
= popC(env
);
3173 if (!cls
.couldBe(BCls
| BLazyCls
)) {
3175 return push(env
, TBottom
);
3177 if (!cls
.subtypeOf(BCls
)) {
3183 auto const t
= [&] {
3184 if (!is_specialized_cls(cls
) || !dcls_of(cls
).isExact()) {
3187 auto const& dcls
= dcls_of(cls
);
3188 if (!dcls
.cls().couldHaveReifiedParent()) {
3191 if (dcls
.cls().mustHaveReifiedParent()) {
3199 void in(ISS
& env
, const bc::CheckClsRGSoft
& op
) {
3200 // TODO(T121050961) Optimize for lazy classes too
3201 auto const cls
= popC(env
);
3202 if (!cls
.couldBe(BCls
| BLazyCls
)) {
3206 if (!cls
.subtypeOf(BCls
) ||
3207 !is_specialized_cls(cls
) ||
3208 !dcls_of(cls
).isExact()) {
3211 auto const &dcls
= dcls_of(cls
);
3212 if (!dcls
.cls().couldHaveReifiedGenerics()) {
3220 * If the value on the top of the stack is known to be equivalent to the local
3221 * its being moved/copied to, return std::nullopt without modifying any
3222 * state. Otherwise, pop the stack value, perform the set, and return a pair
3223 * giving the value's type, and any other local its known to be equivalent to.
3225 template <typename Set
>
3226 Optional
<std::pair
<Type
, LocalId
>> moveToLocImpl(ISS
& env
,
3228 if (auto const prev
= last_op(env
, 1)) {
3229 if (prev
->op
== Op::CGetL2
&&
3230 prev
->CGetL2
.nloc1
.id
== op
.loc1
&&
3231 last_op(env
)->op
== Op::Concat
) {
3233 reduce(env
, bc::SetOpL
{ op
.loc1
, SetOpOp::ConcatEqual
});
3234 return std::nullopt
;
3238 auto equivLoc
= topStkEquiv(env
);
3239 // If the local could be a Ref, don't record equality because the stack
3240 // element and the local won't actually have the same type.
3241 if (equivLoc
== StackThisId
&& env
.state
.thisLoc
!= NoLocalId
) {
3242 if (env
.state
.thisLoc
== op
.loc1
||
3243 locsAreEquiv(env
, env
.state
.thisLoc
, op
.loc1
)) {
3244 return std::nullopt
;
3246 equivLoc
= env
.state
.thisLoc
;
3249 if (!is_volatile_local(env
.ctx
.func
, op
.loc1
)) {
3250 if (equivLoc
<= MaxLocalId
) {
3251 if (equivLoc
== op
.loc1
||
3252 locsAreEquiv(env
, equivLoc
, op
.loc1
)) {
3253 // We allow equivalency to ignore Uninit, so we need to check
3255 if (peekLocRaw(env
, op
.loc1
) == topC(env
)) {
3256 return std::nullopt
;
3259 } else if (equivLoc
== NoLocalId
) {
3262 if (!any(env
.collect
.opts
& CollectionOpts::Speculating
)) {
3266 equivLoc
= NoLocalId
;
3269 auto val
= popC(env
);
3270 setLoc(env
, op
.loc1
, val
);
3271 if (equivLoc
== StackThisId
) {
3272 assertx(env
.state
.thisLoc
== NoLocalId
);
3273 equivLoc
= env
.state
.thisLoc
= op
.loc1
;
3275 if (equivLoc
== StackDupId
) {
3276 setStkLocal(env
, op
.loc1
);
3277 } else if (equivLoc
!= op
.loc1
&& equivLoc
!= NoLocalId
) {
3278 addLocEquiv(env
, op
.loc1
, equivLoc
);
3280 return { std::make_pair(std::move(val
), equivLoc
) };
3285 void in(ISS
& env
, const bc::PopL
& op
) {
3286 // If the same value is already in the local, do nothing but pop
3287 // it. Otherwise, the set has been done by moveToLocImpl.
3288 if (!moveToLocImpl(env
, op
)) return reduce(env
, bc::PopC
{});
3291 void in(ISS
& env
, const bc::SetL
& op
) {
3292 // If the same value is already in the local, do nothing because SetL keeps
3293 // the value on the stack. If it isn't, we need to push it back onto the stack
3294 // because moveToLocImpl popped it.
3295 if (auto p
= moveToLocImpl(env
, op
)) {
3296 push(env
, std::move(p
->first
), p
->second
);
3302 void in(ISS
& env
, const bc::SetG
&) {
3303 auto t1
= popC(env
);
3305 push(env
, std::move(t1
));
3308 void in(ISS
& env
, const bc::SetS
& op
) {
3309 auto const val
= popC(env
);
3310 auto const tcls
= popC(env
);
3311 auto const tname
= popC(env
);
3313 auto const throws
= [&] {
3315 return push(env
, TBottom
);
3318 if (!tcls
.couldBe(BCls
)) return throws();
3320 auto merge
= mergeStaticProp(
3321 env
, tcls
, tname
, val
, true, false,
3322 op
.subop1
== ReadonlyOp::Readonly
3324 if (merge
.throws
== TriBool::Yes
|| merge
.adjusted
.subtypeOf(BBottom
)) {
3328 if (merge
.throws
== TriBool::No
&&
3329 tcls
.subtypeOf(BCls
) &&
3330 tname
.subtypeOf(BStr
)) {
3334 push(env
, std::move(merge
.adjusted
));
3337 void in(ISS
& env
, const bc::SetOpL
& op
) {
3338 auto const t1
= popC(env
);
3339 auto const loc
= locAsCell(env
, op
.loc1
);
3341 auto resultTy
= typeSetOp(op
.subop2
, loc
, t1
);
3342 setLoc(env
, op
.loc1
, resultTy
);
3343 if (resultTy
.is(BBottom
)) unreachable(env
);
3344 push(env
, std::move(resultTy
));
3347 void in(ISS
& env
, const bc::SetOpG
&) {
3348 popC(env
); popC(env
);
3349 push(env
, TInitCell
);
3352 void in(ISS
& env
, const bc::SetOpS
& op
) {
3353 auto const rhs
= popC(env
);
3354 auto const tcls
= popC(env
);
3355 auto const tname
= popC(env
);
3357 auto const throws
= [&] {
3359 return push(env
, TBottom
);
3362 if (!tcls
.couldBe(BCls
)) return throws();
3364 auto const lookup
= env
.index
.lookup_static(
3371 if (lookup
.found
== TriBool::No
|| lookup
.ty
.subtypeOf(BBottom
)) {
3375 auto const newTy
= typeSetOp(op
.subop1
, lookup
.ty
, rhs
);
3376 if (newTy
.subtypeOf(BBottom
)) return throws();
3378 auto merge
= mergeStaticProp(env
, tcls
, tname
, newTy
);
3379 if (merge
.throws
== TriBool::Yes
|| merge
.adjusted
.subtypeOf(BBottom
)) {
3383 // NB: Unlike IncDecS, SetOpS pushes the post-TypeConstraint
3384 // adjustment value.
3385 push(env
, std::move(merge
.adjusted
));
3388 void in(ISS
& env
, const bc::IncDecL
& op
) {
3389 auto loc
= locAsCell(env
, op
.nloc1
.id
);
3390 auto newT
= typeIncDec(op
.subop2
, loc
);
3392 if (newT
.subtypeOf(BBottom
)) {
3394 return push(env
, TBottom
);
3397 if (!locCouldBeUninit(env
, op
.nloc1
.id
) && loc
.subtypeOf(BNum
)) nothrow(env
);
3399 auto const pre
= isPre(op
.subop2
);
3400 if (!pre
) push(env
, std::move(loc
));
3401 setLoc(env
, op
.nloc1
.id
, newT
);
3402 if (pre
) push(env
, std::move(newT
));
3405 void in(ISS
& env
, const bc::IncDecG
&) { popC(env
); push(env
, TInitCell
); }
3407 void in(ISS
& env
, const bc::IncDecS
& op
) {
3408 auto const tcls
= popC(env
);
3409 auto const tname
= popC(env
);
3410 auto const pre
= isPre(op
.subop1
);
3412 auto const throws
= [&] {
3414 return push(env
, TBottom
);
3417 if (!tcls
.couldBe(BCls
)) return throws();
3419 auto lookup
= env
.index
.lookup_static(
3426 if (lookup
.found
== TriBool::No
|| lookup
.ty
.subtypeOf(BBottom
)) {
3430 auto newTy
= typeIncDec(op
.subop1
, lookup
.ty
);
3431 if (newTy
.subtypeOf(BBottom
)) return throws();
3433 auto const merge
= mergeStaticProp(env
, tcls
, tname
, newTy
);
3434 if (merge
.throws
== TriBool::Yes
|| merge
.adjusted
.subtypeOf(BBottom
)) {
3438 if (lookup
.found
== TriBool::Yes
&&
3439 lookup
.lateInit
== TriBool::No
&&
3440 lookup
.internal
== TriBool::No
&&
3441 !lookup
.classInitMightRaise
&&
3442 merge
.throws
== TriBool::No
&&
3443 tcls
.subtypeOf(BCls
) &&
3444 tname
.subtypeOf(BStr
) &&
3445 lookup
.ty
.subtypeOf(BNum
)) {
3449 // NB: IncDecS pushes the value pre-TypeConstraint modification
3450 push(env
, pre
? std::move(newTy
) : std::move(lookup
.ty
));
3453 void in(ISS
& env
, const bc::UnsetL
& op
) {
3454 // Peek so that we don't register a ready on the local if we're
3455 // going to optimize this away.
3456 if (peekLocRaw(env
, op
.loc1
).subtypeOf(TUninit
)) {
3460 if (auto const last
= last_op(env
)) {
3461 // No point in popping into the local if we're just going to
3462 // immediately unset it.
3463 if (last
->op
== Op::PopL
&&
3464 last
->PopL
.loc1
== op
.loc1
) {
3467 setLocRaw(env
, op
.loc1
, TCell
);
3468 return reduce(env
, bc::PopC
{}, bc::UnsetL
{ op
.loc1
});
3472 if (any(env
.collect
.opts
& CollectionOpts::Speculating
)) {
3477 setLocRaw(env
, op
.loc1
, TUninit
);
3480 void in(ISS
& env
, const bc::UnsetG
& /*op*/) {
3481 auto const t1
= popC(env
);
3482 if (!t1
.couldBe(BObj
| BRes
)) nothrow(env
);
3485 bool fcallCanSkipRepack(ISS
& env
, const FCallArgs
& fca
, const res::Func
& func
) {
3486 // Can't skip repack if potentially calling a function with too many args.
3487 if (fca
.numArgs() > func
.minNonVariadicParams()) return false;
3488 // Repack not needed if not unpacking and not having too many arguments.
3489 if (!fca
.hasUnpack()) return true;
3490 // Can't skip repack if unpack args are in a wrong position.
3491 if (fca
.numArgs() != func
.maxNonVariadicParams()) return false;
3493 // Repack not needed if unpack args have the correct type.
3494 auto const unpackArgs
= topC(env
, fca
.hasGenerics() ? 1 : 0);
3495 return unpackArgs
.subtypeOf(BVec
);
3498 bool coeffectRulesMatch(ISS
& env
,
3499 const FCallArgs
& fca
,
3500 const res::Func
& func
,
3501 uint32_t numExtraInputs
,
3502 const CoeffectRule
& caller
,
3503 const CoeffectRule
& callee
) {
3504 if (caller
.m_type
!= callee
.m_type
) return false;
3505 switch (caller
.m_type
) {
3506 case CoeffectRule::Type::CCThis
: {
3507 if (caller
.m_name
!= callee
.m_name
||
3508 caller
.m_types
!= callee
.m_types
) {
3511 if (!thisAvailable(env
)) return false;
3512 auto const loc
= topStkEquiv(env
, fca
.numInputs() + numExtraInputs
+ 1);
3513 return loc
== StackThisId
|| (loc
<= MaxLocalId
&& locIsThis(env
, loc
));
3515 case CoeffectRule::Type::CCParam
:
3516 if (caller
.m_name
!= callee
.m_name
) return false;
3518 case CoeffectRule::Type::FunParam
: {
3519 if (fca
.hasUnpack()) return false;
3520 if (fca
.numArgs() <= callee
.m_index
) return false;
3521 auto const l1
= caller
.m_index
;
3522 auto const l2
= topStkEquiv(env
, fca
.numInputs() - callee
.m_index
- 1);
3524 (l1
<= MaxLocalId
&&
3526 locsAreEquiv(env
, l1
, l2
));
3528 case CoeffectRule::Type::CCReified
:
3529 // TODO: optimize these
3531 case CoeffectRule::Type::ClosureParentScope
:
3532 case CoeffectRule::Type::GeneratorThis
:
3533 case CoeffectRule::Type::Caller
:
3534 case CoeffectRule::Type::Invalid
:
3540 bool fcallCanSkipCoeffectsCheck(ISS
& env
,
3541 const FCallArgs
& fca
,
3542 const res::Func
& func
,
3543 uint32_t numExtraInputs
) {
3544 auto const requiredCoeffectsOpt
= func
.requiredCoeffects();
3545 if (!requiredCoeffectsOpt
) return false;
3546 auto const required
= *requiredCoeffectsOpt
;
3547 auto const provided
=
3548 RuntimeCoeffects::fromValue(env
.ctx
.func
->requiredCoeffects
.value() |
3549 env
.ctx
.func
->coeffectEscapes
.value());
3550 if (!provided
.canCall(required
)) return false;
3551 auto const calleeRules
= func
.coeffectRules();
3552 // If we couldn't tell whether callee has rules or not, punt.
3553 if (!calleeRules
) return false;
3554 if (calleeRules
->empty()) return true;
3555 if (calleeRules
->size() == 1 && (*calleeRules
)[0].isCaller()) return true;
3556 auto const callerRules
= env
.ctx
.func
->coeffectRules
;
3557 return std::is_permutation(callerRules
.begin(), callerRules
.end(),
3558 calleeRules
->begin(), calleeRules
->end(),
3559 [&] (const CoeffectRule
& a
,
3560 const CoeffectRule
& b
) {
3561 return coeffectRulesMatch(env
, fca
, func
,
3567 template<typename FCallWithFCA
>
3568 bool fcallOptimizeChecks(
3570 const FCallArgs
& fca
,
3571 const res::Func
& func
,
3572 FCallWithFCA fcallWithFCA
,
3573 Optional
<uint32_t> inOutNum
,
3575 uint32_t numExtraInputs
3577 // Don't optimize away in-out checks if we might use the null safe
3578 // operator. If we do so, we need the in-out bits to shuffle the
3580 if (!maybeNullsafe
&& fca
.enforceInOut()) {
3581 if (inOutNum
== fca
.numRets() - 1) {
3583 for (auto i
= 0; i
< fca
.numArgs(); ++i
) {
3584 auto const kind
= func
.lookupParamPrep(i
);
3585 if (kind
.inOut
== TriBool::Maybe
) {
3590 if (yesOrNo(fca
.isInOut(i
)) != kind
.inOut
) {
3591 // The function/method may not exist, in which case we should raise a
3592 // different error. Just defer the checks to the runtime.
3593 auto const exact
= func
.exactFunc();
3594 if (!exact
) return false;
3597 auto const exCls
= makeStaticString("InvalidArgumentException");
3598 auto const err
= makeStaticString(
3599 formatParamInOutMismatch(
3600 exact
->name
->data(),
3608 bc::NewObjD
{ exCls
},
3612 bc::FCallCtor
{ FCallArgs(1), staticEmptyString() },
3622 // Optimize away the runtime inout-ness check.
3623 reduce(env
, fcallWithFCA(fca
.withoutInOut()));
3629 if (fca
.enforceReadonly()) {
3631 for (auto i
= 0; i
< fca
.numArgs(); ++i
) {
3632 if (!fca
.isReadonly(i
)) continue;
3633 auto const kind
= func
.lookupParamPrep(i
);
3634 if (kind
.readonly
== TriBool::Maybe
) {
3639 if (kind
.readonly
!= TriBool::Yes
) {
3640 // The function/method may not exist, in which case we should raise a
3641 // different error. Just defer the checks to the runtime.
3642 if (!func
.exactFunc()) return false;
3649 // Optimize away the runtime readonly-ness check.
3650 reduce(env
, fcallWithFCA(fca
.withoutReadonly()));
3655 if (fca
.enforceMutableReturn()) {
3656 if (func
.lookupReturnReadonly() == TriBool::No
) {
3657 reduce(env
, fcallWithFCA(fca
.withoutEnforceMutableReturn()));
3662 if (fca
.enforceReadonlyThis()) {
3663 if (func
.lookupReadonlyThis() == TriBool::Yes
) {
3664 reduce(env
, fcallWithFCA(fca
.withoutEnforceReadonlyThis()));
3669 // Infer whether the callee supports async eager return.
3670 if (fca
.asyncEagerTarget() != NoBlockId
) {
3671 if (func
.supportsAsyncEagerReturn() == TriBool::No
) {
3672 reduce(env
, fcallWithFCA(fca
.withoutAsyncEagerTarget()));
3677 if (!fca
.skipRepack() && fcallCanSkipRepack(env
, fca
, func
)) {
3678 reduce(env
, fcallWithFCA(fca
.withoutRepack()));
3682 if (!fca
.skipCoeffectsCheck() &&
3683 fcallCanSkipCoeffectsCheck(env
, fca
, func
, numExtraInputs
)) {
3684 reduce(env
, fcallWithFCA(fca
.withoutCoeffectsCheck()));
3693 const FCallArgs
& fca
,
3694 const res::Func
& func
,
3697 uint32_t numExtraInputs
3699 auto const foldableFunc
= func
.exactFunc();
3700 if (!foldableFunc
) return false;
3701 if (!shouldAttemptToFold(env
, foldableFunc
, fca
, context
, maybeDynamic
)) {
3705 assertx(!fca
.hasUnpack() && !fca
.hasGenerics() && fca
.numRets() == 1);
3707 auto const finish
= [&] (Type ty
) {
3708 auto const v
= tv(ty
);
3709 if (!v
) return false;
3711 for (uint32_t i
= 0; i
< numExtraInputs
; ++i
) repl
.push_back(bc::PopC
{});
3712 for (uint32_t i
= 0; i
< fca
.numArgs(); ++i
) repl
.push_back(bc::PopC
{});
3713 repl
.push_back(bc::PopU
{});
3714 if (topT(env
, fca
.numArgs() + 1 + numExtraInputs
).subtypeOf(TInitCell
)) {
3715 repl
.push_back(bc::PopC
{});
3717 assertx(topT(env
, fca
.numArgs() + 1 + numExtraInputs
).subtypeOf(TUninit
));
3718 repl
.push_back(bc::PopU
{});
3720 repl
.push_back(gen_constant(*v
));
3721 reduce(env
, std::move(repl
));
3725 if (foldableFunc
->attrs
& AttrBuiltin
&&
3726 foldableFunc
->attrs
& AttrIsFoldable
) {
3727 auto ret
= const_fold(env
, fca
.numArgs(), numExtraInputs
, *foldableFunc
,
3729 if (!ret
) return false;
3730 return finish(std::move(*ret
));
3733 CompactVector
<Type
> args(fca
.numArgs());
3734 auto const firstArgPos
= numExtraInputs
+ fca
.numInputs() - 1;
3735 for (auto i
= uint32_t{0}; i
< fca
.numArgs(); ++i
) {
3736 auto const& arg
= topT(env
, firstArgPos
- i
);
3737 auto const isScalar
= is_scalar(arg
);
3739 (env
.index
.func_depends_on_arg(foldableFunc
, i
) ||
3740 !arg
.subtypeOf(BInitCell
))) {
3743 args
[i
] = isScalar
? scalarize(arg
) : arg
;
3746 auto calleeCtx
= CallContext
{
3751 if (env
.collect
.unfoldableFuncs
.count(calleeCtx
)) return false;
3753 auto [foldableReturnType
, _
] = env
.index
.lookup_foldable_return_type(
3757 if (finish(std::move(foldableReturnType
))) return true;
3759 env
.collect
.unfoldableFuncs
.emplace(std::move(calleeCtx
));
3763 Type
typeFromWH(Type t
) {
3764 if (!t
.couldBe(BObj
)) {
3765 // Exceptions will be thrown if a non-object is awaited.
3769 // Throw away non-obj component.
3772 // If we aren't even sure this is a wait handle, there's nothing we can
3774 if (!is_specialized_wait_handle(t
)) {
3778 return wait_handle_inner(t
);
3781 void pushCallReturnType(ISS
& env
,
3783 const FCallArgs
& fca
,
3785 std::vector
<Type
> inOuts
) {
3786 auto const numRets
= fca
.numRets();
3788 assertx(fca
.asyncEagerTarget() == NoBlockId
);
3789 assertx(IMPLIES(nullsafe
, inOuts
.size() == numRets
- 1));
3791 for (auto i
= uint32_t{0}; i
< numRets
- 1; ++i
) popU(env
);
3792 if (!ty
.couldBe(BVecN
)) {
3793 // Function cannot have an in-out args match, so call will
3796 for (int32_t i
= 0; i
< numRets
; i
++) push(env
, TBottom
);
3797 return unreachable(env
);
3799 // We'll only hit the nullsafe null case, so the outputs are the
3801 for (auto& t
: inOuts
) push(env
, std::move(t
));
3802 push(env
, TInitNull
);
3806 // If we might use the nullsafe operator, we need to union in the
3807 // null case (which means the inout args are unchanged).
3808 if (is_specialized_array_like(ty
)) {
3809 for (int32_t i
= 1; i
< numRets
; i
++) {
3810 auto elem
= array_like_elem(ty
, ival(i
)).first
;
3811 if (nullsafe
) elem
|= inOuts
[i
-1];
3812 push(env
, std::move(elem
));
3817 ? opt(array_like_elem(ty
, ival(0)).first
)
3818 : array_like_elem(ty
, ival(0)).first
3821 for (int32_t i
= 0; i
< numRets
; ++i
) push(env
, TInitCell
);
3825 if (fca
.asyncEagerTarget() != NoBlockId
) {
3826 assertx(!ty
.is(BBottom
));
3827 push(env
, typeFromWH(ty
));
3828 assertx(!topC(env
).subtypeOf(BBottom
));
3829 env
.propagate(fca
.asyncEagerTarget(), &env
.state
);
3832 if (nullsafe
) ty
= opt(std::move(ty
));
3833 if (ty
.is(BBottom
)) {
3834 // The callee function never returns. It might throw, or loop
3837 return unreachable(env
);
3839 return push(env
, std::move(ty
));
3842 const StaticString s_defined
{ "defined" };
3843 const StaticString s_function_exists
{ "function_exists" };
3845 template<typename FCallWithFCA
>
3846 void fcallKnownImpl(
3848 const FCallArgs
& fca
,
3849 const res::Func
& func
,
3852 uint32_t numExtraInputs
,
3853 FCallWithFCA fcallWithFCA
,
3854 Optional
<uint32_t> inOutNum
3856 auto const numArgs
= fca
.numArgs();
3857 auto [returnType
, _
] = [&] {
3858 CompactVector
<Type
> args(numArgs
);
3859 auto const firstArgPos
= numExtraInputs
+ fca
.numInputs() - 1;
3860 for (auto i
= uint32_t{0}; i
< numArgs
; ++i
) {
3861 args
[i
] = topCV(env
, firstArgPos
- i
);
3864 return fca
.hasUnpack()
3865 ? env
.index
.lookup_return_type(env
.ctx
, &env
.collect
.methods
, func
)
3866 : env
.index
.lookup_return_type(
3867 env
.ctx
, &env
.collect
.methods
, args
, context
, func
3871 // If there's a caller/callee inout mismatch, then the call will
3873 if (fca
.enforceInOut()) {
3874 if (inOutNum
&& (*inOutNum
+ 1 != fca
.numRets())) {
3875 returnType
= TBottom
;
3879 if (fca
.asyncEagerTarget() != NoBlockId
&& typeFromWH(returnType
) == TBottom
) {
3880 // Kill the async eager target if the function never returns.
3881 reduce(env
, fcallWithFCA(std::move(fca
.withoutAsyncEagerTarget())));
3885 for (auto i
= uint32_t{0}; i
< numExtraInputs
; ++i
) popC(env
);
3886 if (fca
.hasGenerics()) popC(env
);
3887 if (fca
.hasUnpack()) popC(env
);
3888 std::vector
<Type
> inOuts
;
3889 for (auto i
= uint32_t{0}; i
< numArgs
; ++i
) {
3890 if (nullsafe
&& fca
.isInOut(numArgs
- i
- 1)) {
3891 inOuts
.emplace_back(popCV(env
));
3898 pushCallReturnType(env
, std::move(returnType
),
3899 fca
, nullsafe
, std::move(inOuts
));
3902 void fcallUnknownImpl(ISS
& env
,
3903 const FCallArgs
& fca
,
3904 const Type
& retTy
= TInitCell
) {
3905 if (fca
.hasGenerics()) popC(env
);
3906 if (fca
.hasUnpack()) popC(env
);
3907 auto const numArgs
= fca
.numArgs();
3908 auto const numRets
= fca
.numRets();
3909 for (auto i
= uint32_t{0}; i
< numArgs
; ++i
) popCV(env
);
3912 if (fca
.asyncEagerTarget() != NoBlockId
) {
3913 assertx(numRets
== 1);
3914 assertx(!retTy
.is(BBottom
));
3916 env
.propagate(fca
.asyncEagerTarget(), &env
.state
);
3919 for (auto i
= uint32_t{0}; i
< numRets
- 1; ++i
) popU(env
);
3920 for (auto i
= uint32_t{0}; i
< numRets
; ++i
) push(env
, retTy
);
3923 void in(ISS
& env
, const bc::FCallFuncD
& op
) {
3924 auto const rfunc
= env
.index
.resolve_func(op
.str2
);
3926 if (auto const wrapped
= rfunc
.triviallyWrappedFunc()) {
3927 return reduce(env
, bc::FCallFuncD
{ op
.fca
, *wrapped
});
3930 if (op
.fca
.hasGenerics()) {
3931 auto const tsList
= topC(env
);
3932 if (!tsList
.couldBe(BVec
)) {
3933 return unreachable(env
);
3936 if (!rfunc
.couldHaveReifiedGenerics()) {
3940 bc::FCallFuncD
{ op
.fca
.withoutGenerics(), op
.str2
}
3945 auto const updateBC
= [&] (FCallArgs fca
) {
3946 return bc::FCallFuncD
{ std::move(fca
), op
.str2
};
3949 auto const numInOut
= op
.fca
.enforceInOut()
3950 ? rfunc
.lookupNumInoutParams()
3953 if (fcallOptimizeChecks(env
, op
.fca
, rfunc
, updateBC
, numInOut
, false, 0) ||
3954 fcallTryFold(env
, op
.fca
, rfunc
, TBottom
, false, 0)) {
3958 if (auto const func
= rfunc
.exactFunc()) {
3959 if (optimize_builtin(env
, func
, op
.fca
)) return;
3962 fcallKnownImpl(env
, op
.fca
, rfunc
, TBottom
, false, 0, updateBC
, numInOut
);
3967 const StaticString
s_invoke("__invoke");
3969 s_DynamicContextOverrideUnsafe("__SystemLib\\DynamicContextOverrideUnsafe");
3971 bool isBadContext(const FCallArgs
& fca
) {
3972 return fca
.context() &&
3973 fca
.context()->tsame(s_DynamicContextOverrideUnsafe
.get());
3976 Context
getCallContext(const ISS
& env
, const FCallArgs
& fca
) {
3977 if (auto const name
= fca
.context()) {
3978 auto const rcls
= env
.index
.resolve_class(name
);
3979 if (rcls
&& rcls
->cls()) {
3980 return Context
{ env
.ctx
.unit
, env
.ctx
.func
, rcls
->cls() };
3982 return Context
{ env
.ctx
.unit
, env
.ctx
.func
, nullptr };
3987 void fcallObjMethodNullsafeNoFold(ISS
& env
,
3988 const FCallArgs
& fca
,
3990 assertx(fca
.asyncEagerTarget() == NoBlockId
);
3991 if (extraInput
) popC(env
);
3992 if (fca
.hasGenerics()) popC(env
);
3993 if (fca
.hasUnpack()) popC(env
);
3994 auto const numArgs
= fca
.numArgs();
3995 auto const numRets
= fca
.numRets();
3996 std::vector
<Type
> inOuts
;
3997 for (auto i
= uint32_t{0}; i
< numArgs
; ++i
) {
3998 if (fca
.enforceInOut() && fca
.isInOut(numArgs
- i
- 1)) {
3999 inOuts
.emplace_back(popCV(env
));
4006 for (auto i
= uint32_t{0}; i
< numRets
- 1; ++i
) popU(env
);
4007 assertx(inOuts
.size() == numRets
- 1);
4008 for (auto& t
: inOuts
) push(env
, std::move(t
));
4009 push(env
, TInitNull
);
4012 void fcallObjMethodNullsafe(ISS
& env
, const FCallArgs
& fca
, bool extraInput
) {
4013 // Don't fold if there's inout arguments. We could, in principal,
4014 // fold away the inout case like we do below, but we don't have the
4015 // bytecodes necessary to shuffle the stack.
4016 if (fca
.enforceInOut()) {
4017 for (uint32_t i
= 0; i
< fca
.numArgs(); ++i
) {
4018 if (fca
.isInOut(i
)) {
4019 return fcallObjMethodNullsafeNoFold(env
, fca
, extraInput
);
4025 if (extraInput
) repl
.push_back(bc::PopC
{});
4026 if (fca
.hasGenerics()) repl
.push_back(bc::PopC
{});
4027 if (fca
.hasUnpack()) repl
.push_back(bc::PopC
{});
4029 auto const numArgs
= fca
.numArgs();
4030 for (uint32_t i
= 0; i
< numArgs
; ++i
) {
4031 assertx(topC(env
, repl
.size()).subtypeOf(BInitCell
));
4032 repl
.push_back(bc::PopC
{});
4034 repl
.push_back(bc::PopU
{});
4035 repl
.push_back(bc::PopC
{});
4036 assertx(fca
.numRets() == 1);
4037 repl
.push_back(bc::Null
{});
4039 reduce(env
, std::move(repl
));
4042 template <typename UpdateBC
>
4043 void fcallObjMethodImpl(ISS
& env
, const FCallArgs
& fca
, SString methName
,
4044 bool nullThrows
, bool dynamic
, bool extraInput
,
4045 uint32_t inputPos
, SString clsHint
,
4046 UpdateBC updateBC
) {
4047 auto const input
= topC(env
, inputPos
);
4048 auto const location
= topStkEquiv(env
, inputPos
);
4049 auto const mayCallMethod
= input
.couldBe(BObj
);
4050 auto const mayUseNullsafe
= !nullThrows
&& input
.couldBe(BNull
);
4051 auto const mayThrowNonObj
= !input
.subtypeOf(nullThrows
? BObj
: BOptObj
);
4053 auto const refineLoc
= [&] {
4054 if (location
== NoLocalId
) return;
4055 if (!refineLocation(env
, location
, [&] (Type t
) {
4056 if (nullThrows
) return intersection_of(t
, TObj
);
4057 if (!t
.couldBe(BUninit
)) return intersection_of(t
, TOptObj
);
4058 if (!t
.couldBe(BObj
)) return intersection_of(t
, TNull
);
4065 auto const throws
= [&] {
4066 if (fca
.asyncEagerTarget() != NoBlockId
) {
4067 // Kill the async eager target if the function never returns.
4068 return reduce(env
, updateBC(fca
.withoutAsyncEagerTarget()));
4070 if (extraInput
) popC(env
);
4071 fcallUnknownImpl(env
, fca
, TBottom
);
4075 if (!mayCallMethod
&& !mayUseNullsafe
) {
4076 // This FCallObjMethodD may only throw
4080 if (!mayCallMethod
&& !mayThrowNonObj
) {
4081 // Null input, this may only return null, so do that.
4082 return fcallObjMethodNullsafe(env
, fca
, extraInput
);
4085 if (!mayCallMethod
) {
4086 // May only return null, but can't fold as we may still throw.
4087 assertx(mayUseNullsafe
&& mayThrowNonObj
);
4088 if (fca
.asyncEagerTarget() != NoBlockId
) {
4089 return reduce(env
, updateBC(fca
.withoutAsyncEagerTarget()));
4091 return fcallObjMethodNullsafeNoFold(env
, fca
, extraInput
);
4094 if (isBadContext(fca
)) return throws();
4096 auto const ctx
= getCallContext(env
, fca
);
4097 auto const ctxTy
= input
.couldBe(BObj
)
4098 ? intersection_of(input
, TObj
)
4100 auto const rfunc
= env
.index
.resolve_method(ctx
, ctxTy
, methName
);
4102 auto const numInOut
= fca
.enforceInOut()
4103 ? rfunc
.lookupNumInoutParams()
4106 auto const canFold
= !mayUseNullsafe
&& !mayThrowNonObj
;
4107 auto const numExtraInputs
= extraInput
? 1 : 0;
4108 if (fcallOptimizeChecks(env
, fca
, rfunc
, updateBC
,
4109 numInOut
, mayUseNullsafe
, numExtraInputs
) ||
4110 (canFold
&& fcallTryFold(env
, fca
, rfunc
, ctxTy
, dynamic
,
4115 if (clsHint
&& clsHint
->empty() && rfunc
.exactFunc()) {
4116 return reduce(env
, updateBC(fca
, rfunc
.exactFunc()->cls
->name
));
4119 fcallKnownImpl(env
, fca
, rfunc
, ctxTy
, mayUseNullsafe
, extraInput
? 1 : 0,
4120 updateBC
, numInOut
);
4124 void fcallFuncUnknown(ISS
& env
, const bc::FCallFunc
& op
) {
4126 fcallUnknownImpl(env
, op
.fca
);
4129 void fcallFuncClsMeth(ISS
& env
, const bc::FCallFunc
& op
) {
4130 assertx(topC(env
).subtypeOf(BClsMeth
));
4132 // TODO: optimize me
4133 fcallFuncUnknown(env
, op
);
4136 void fcallFuncFunc(ISS
& env
, const bc::FCallFunc
& op
) {
4137 assertx(topC(env
).subtypeOf(BFunc
));
4139 // TODO: optimize me
4140 fcallFuncUnknown(env
, op
);
4143 void fcallFuncObj(ISS
& env
, const bc::FCallFunc
& op
) {
4144 assertx(topC(env
).subtypeOf(BOptObj
));
4146 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4148 return bc::FCallFunc
{ std::move(fca
) };
4151 env
, op
.fca
, s_invoke
.get(),
4152 true, false, true, 0, nullptr,
4157 void fcallFuncStr(ISS
& env
, const bc::FCallFunc
& op
) {
4158 assertx(topC(env
).subtypeOf(BStr
));
4159 auto funcName
= getNameFromType(topC(env
));
4160 if (!funcName
) return fcallFuncUnknown(env
, op
);
4162 funcName
= normalizeNS(funcName
);
4163 if (!isNSNormalized(funcName
) || !notClassMethodPair(funcName
)) {
4164 return fcallFuncUnknown(env
, op
);
4167 auto const rfunc
= env
.index
.resolve_func(funcName
);
4168 if (!rfunc
.mightCareAboutDynCalls()) {
4169 return reduce(env
, bc::PopC
{}, bc::FCallFuncD
{ op
.fca
, funcName
});
4172 auto const updateBC
= [&] (FCallArgs fca
) {
4173 return bc::FCallFunc
{ std::move(fca
) };
4176 auto const numInOut
= op
.fca
.enforceInOut()
4177 ? rfunc
.lookupNumInoutParams()
4180 if (fcallOptimizeChecks(env
, op
.fca
, rfunc
, updateBC
, numInOut
, false, 1)) {
4183 fcallKnownImpl(env
, op
.fca
, rfunc
, TBottom
, false, 1, updateBC
, numInOut
);
4188 void in(ISS
& env
, const bc::FCallFunc
& op
) {
4189 auto const callable
= topC(env
);
4190 if (!callable
.couldBe(BObj
| BArrLike
| BStr
| BFunc
|
4191 BRFunc
| BClsMeth
| BRClsMeth
)) {
4192 if (op
.fca
.asyncEagerTarget() != NoBlockId
) {
4193 return reduce(env
, bc::FCallFunc
{ op
.fca
.withoutAsyncEagerTarget() });
4196 fcallUnknownImpl(env
, op
.fca
, TBottom
);
4197 return unreachable(env
);
4199 if (callable
.subtypeOf(BOptObj
)) return fcallFuncObj(env
, op
);
4200 if (callable
.subtypeOf(BFunc
)) return fcallFuncFunc(env
, op
);
4201 if (callable
.subtypeOf(BClsMeth
)) return fcallFuncClsMeth(env
, op
);
4202 if (callable
.subtypeOf(BStr
)) return fcallFuncStr(env
, op
);
4203 fcallFuncUnknown(env
, op
);
4206 void in(ISS
& env
, const bc::ResolveFunc
& op
) {
4210 void in(ISS
& env
, const bc::ResolveMethCaller
& op
) {
4215 void in(ISS
& env
, const bc::ResolveRFunc
& op
) {
4217 push(env
, union_of(TFunc
, TRFunc
));
4222 Type
ctxCls(ISS
& env
) {
4223 auto const s
= selfCls(env
);
4224 return setctx(s
? *s
: TCls
);
4227 Type
specialClsRefToCls(ISS
& env
, SpecialClsRef ref
) {
4228 if (!env
.ctx
.cls
) return TCls
;
4229 auto const op
= [&]()-> Optional
<Type
> {
4231 case SpecialClsRef::LateBoundCls
: return ctxCls(env
);
4232 case SpecialClsRef::SelfCls
: return selfClsExact(env
);
4233 case SpecialClsRef::ParentCls
: return parentClsExact(env
);
4235 always_assert(false);
4237 return op
? *op
: TCls
;
4240 template<bool reifiedVersion
= false>
4241 void resolveClsMethodSImpl(ISS
& env
, SpecialClsRef ref
, LSString meth_name
) {
4242 auto const clsTy
= specialClsRefToCls(env
, ref
);
4243 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, meth_name
);
4244 if (is_specialized_cls(clsTy
) && dcls_of(clsTy
).isExact() &&
4245 !rfunc
.couldHaveReifiedGenerics()) {
4246 auto const clsName
= dcls_of(clsTy
).cls().name();
4247 if (reifiedVersion
) {
4251 bc::ResolveClsMethodD
{ clsName
, meth_name
}
4254 return reduce(env
, bc::ResolveClsMethodD
{ clsName
, meth_name
});
4257 if (reifiedVersion
) popC(env
);
4258 if (!reifiedVersion
|| !rfunc
.couldHaveReifiedGenerics()) {
4259 push(env
, TClsMeth
);
4261 push(env
, union_of(TClsMeth
, TRClsMeth
));
4267 void in(ISS
& env
, const bc::ResolveClsMethod
& op
) {
4269 push(env
, TClsMeth
);
4272 void in(ISS
& env
, const bc::ResolveClsMethodD
& op
) {
4273 push(env
, TClsMeth
);
4276 void in(ISS
& env
, const bc::ResolveClsMethodS
& op
) {
4277 resolveClsMethodSImpl
<false>(env
, op
.subop1
, op
.str2
);
4280 void in(ISS
& env
, const bc::ResolveRClsMethod
&) {
4283 push(env
, union_of(TClsMeth
, TRClsMeth
));
4286 void in(ISS
& env
, const bc::ResolveRClsMethodD
&) {
4288 push(env
, union_of(TClsMeth
, TRClsMeth
));
4291 void in(ISS
& env
, const bc::ResolveRClsMethodS
& op
) {
4292 resolveClsMethodSImpl
<true>(env
, op
.subop1
, op
.str2
);
4295 void in(ISS
& env
, const bc::ResolveClass
& op
) {
4296 auto cls
= env
.index
.resolve_class(op
.str1
);
4302 if (module_check_always_passes(env
, *cls
)) {
4305 push(env
, clsExact(*cls
, true));
4308 void in(ISS
& env
, const bc::LazyClass
& op
) {
4310 push(env
, lazyclsval(op
.str1
));
4313 void in(ISS
& env
, const bc::EnumClassLabel
& op
) {
4315 push(env
, enumclasslabelval(op
.str1
));
4318 void in(ISS
& env
, const bc::FCallObjMethodD
& op
) {
4319 if (op
.fca
.hasGenerics()) {
4320 auto const tsList
= topC(env
);
4321 if (!tsList
.couldBe(BVec
)) {
4322 return unreachable(env
);
4325 auto const input
= topC(env
, op
.fca
.numInputs() + 1);
4326 auto const ctxTy
= input
.couldBe(BObj
)
4327 ? intersection_of(input
, TObj
)
4329 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, ctxTy
, op
.str4
);
4330 if (!rfunc
.couldHaveReifiedGenerics()) {
4334 bc::FCallObjMethodD
{
4335 op
.fca
.withoutGenerics(), op
.str2
, op
.subop3
, op
.str4
}
4340 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4341 if (!clsHint
) clsHint
= op
.str2
;
4342 return bc::FCallObjMethodD
{ std::move(fca
), clsHint
, op
.subop3
, op
.str4
};
4345 env
, op
.fca
, op
.str4
,
4346 op
.subop3
== ObjMethodOp::NullThrows
,
4347 false, false, op
.fca
.numInputs() + 1,
4352 void in(ISS
& env
, const bc::FCallObjMethod
& op
) {
4353 auto const methName
= getNameFromType(topC(env
));
4356 fcallUnknownImpl(env
, op
.fca
);
4360 auto const input
= topC(env
, op
.fca
.numInputs() + 2);
4361 auto const ctxTy
= input
.couldBe(BObj
)
4362 ? intersection_of(input
, TObj
)
4364 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, ctxTy
, methName
);
4365 if (!rfunc
.mightCareAboutDynCalls()) {
4369 bc::FCallObjMethodD
{ op
.fca
, op
.str2
, op
.subop3
, methName
}
4373 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4374 if (!clsHint
) clsHint
= op
.str2
;
4375 return bc::FCallObjMethod
{ std::move(fca
), clsHint
, op
.subop3
};
4378 env
, op
.fca
, methName
,
4379 op
.subop3
== ObjMethodOp::NullThrows
,
4380 true, true, op
.fca
.numInputs() + 2,
4387 template <typename Op
, class UpdateBC
>
4388 void fcallClsMethodImpl(ISS
& env
, const Op
& op
, Type clsTy
, SString methName
,
4389 bool dynamic
, uint32_t numExtraInputs
, SString clsHint
,
4390 UpdateBC updateBC
) {
4391 if (isBadContext(op
.fca
)) {
4392 if (op
.fca
.asyncEagerTarget() != NoBlockId
) {
4393 return reduce(env
, updateBC(op
.fca
.withoutAsyncEagerTarget()));
4395 for (auto i
= uint32_t{0}; i
< numExtraInputs
; ++i
) popC(env
);
4396 fcallUnknownImpl(env
, op
.fca
, TBottom
);
4401 auto const ctx
= getCallContext(env
, op
.fca
);
4402 auto const rfunc
= env
.index
.resolve_method(ctx
, clsTy
, methName
);
4404 auto const numInOut
= op
.fca
.enforceInOut()
4405 ? rfunc
.lookupNumInoutParams()
4408 if (fcallOptimizeChecks(env
, op
.fca
, rfunc
, updateBC
, numInOut
, false,
4410 fcallTryFold(env
, op
.fca
, rfunc
, clsTy
, dynamic
, numExtraInputs
)) {
4414 if (clsHint
&& rfunc
.exactFunc() && clsHint
->empty()) {
4415 return reduce(env
, updateBC(op
.fca
, rfunc
.exactFunc()->cls
->name
));
4418 fcallKnownImpl(env
, op
.fca
, rfunc
, clsTy
, false /* nullsafe */,
4419 numExtraInputs
, updateBC
, numInOut
);
4424 void in(ISS
& env
, const bc::FCallClsMethodD
& op
) {
4425 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4426 return bc::FCallClsMethodD
{ std::move(fca
), op
.str2
, op
.str3
};
4429 auto const rcls
= env
.index
.resolve_class(op
.str2
);
4431 if (op
.fca
.asyncEagerTarget() != NoBlockId
) {
4432 return reduce(env
, updateBC(op
.fca
.withoutAsyncEagerTarget()));
4434 fcallUnknownImpl(env
, op
.fca
, TBottom
);
4439 auto const clsTy
= clsExact(*rcls
, true);
4440 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, op
.str3
);
4442 if (op
.fca
.hasGenerics() && !rfunc
.couldHaveReifiedGenerics()) {
4446 bc::FCallClsMethodD
{
4447 op
.fca
.withoutGenerics(), op
.str2
, op
.str3
}
4451 if (auto const func
= rfunc
.exactFunc()) {
4452 assertx(func
->cls
!= nullptr);
4453 if (func
->cls
->name
->same(op
.str2
) &&
4454 optimize_builtin(env
, func
, op
.fca
)) {
4455 // When we use FCallBuiltin to call a static method, the litstr method
4456 // name will be a fully qualified cls::fn (e.g. "HH\Map::fromItems").
4458 // As a result, we can only do this optimization if the name of the
4459 // builtin function's class matches this op's class name immediate.
4464 fcallClsMethodImpl(env
, op
, clsTy
, op
.str3
, false, 0, nullptr, updateBC
);
4467 void in(ISS
& env
, const bc::FCallClsMethod
& op
) {
4468 auto const methName
= getNameFromType(topC(env
, 1));
4472 fcallUnknownImpl(env
, op
.fca
);
4476 auto const clsTy
= topC(env
);
4477 auto const ctxTy
= clsTy
.couldBe(BCls
)
4478 ? intersection_of(clsTy
, TCls
)
4480 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, ctxTy
, methName
);
4481 auto const skipLogAsDynamicCall
=
4482 !Cfg::Eval::LogKnownMethodsAsDynamicCalls
&&
4483 op
.subop3
== IsLogAsDynamicCallOp::DontLogAsDynamicCall
;
4484 if (is_specialized_cls(clsTy
) && dcls_of(clsTy
).isExact() &&
4485 module_check_always_passes(env
, dcls_of(clsTy
)) &&
4486 (!rfunc
.mightCareAboutDynCalls() || skipLogAsDynamicCall
)) {
4487 auto const clsName
= dcls_of(clsTy
).cls().name();
4492 bc::FCallClsMethodD
{ op
.fca
, clsName
, methName
}
4496 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4497 if (!clsHint
) clsHint
= op
.str2
;
4498 return bc::FCallClsMethod
{ std::move(fca
), clsHint
, op
.subop3
};
4500 fcallClsMethodImpl(env
, op
, clsTy
, methName
, true, 2, op
.str2
, updateBC
);
4503 void in(ISS
& env
, const bc::FCallClsMethodM
& op
) {
4504 auto const throws
= [&] {
4505 if (op
.fca
.asyncEagerTarget() != NoBlockId
) {
4506 // Kill the async eager target if the function never returns.
4509 bc::FCallClsMethodM
{
4510 op
.fca
.withoutAsyncEagerTarget(),
4518 fcallUnknownImpl(env
, op
.fca
, TBottom
);
4522 auto const t
= topC(env
);
4523 if (!t
.couldBe(BObj
| BCls
| BStr
| BLazyCls
)) return throws();
4525 auto const clsTy
= [&] {
4526 if (t
.subtypeOf(BCls
)) return t
;
4527 if (t
.subtypeOf(BObj
)) return objcls(t
);
4528 if (auto const clsname
= getNameFromType(t
)) {
4529 if (auto const rcls
= env
.index
.resolve_class(clsname
)) {
4530 return clsExact(*rcls
, true);
4537 if (clsTy
.is(BBottom
)) return throws();
4539 auto const methName
= op
.str4
;
4540 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, methName
);
4541 auto const maybeDynamicCall
= t
.couldBe(TStr
);
4542 auto const skipLogAsDynamicCall
=
4543 !Cfg::Eval::LogKnownMethodsAsDynamicCalls
&&
4544 op
.subop3
== IsLogAsDynamicCallOp::DontLogAsDynamicCall
;
4545 if (is_specialized_cls(clsTy
) && dcls_of(clsTy
).isExact() &&
4546 module_check_always_passes(env
, dcls_of(clsTy
)) &&
4547 (Cfg::Eval::RaiseStrToClsConversionNoticeSampleRate
== 0 || !maybeDynamicCall
) &&
4548 (!rfunc
.mightCareAboutDynCalls() ||
4549 !maybeDynamicCall
||
4550 skipLogAsDynamicCall
4553 auto const clsName
= dcls_of(clsTy
).cls().name();
4557 bc::FCallClsMethodD
{ op
.fca
, clsName
, methName
}
4561 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4562 if (!clsHint
) clsHint
= op
.str2
;
4563 return bc::FCallClsMethodM
{ std::move(fca
), clsHint
, op
.subop3
, methName
};
4565 fcallClsMethodImpl(env
, op
, clsTy
, methName
, maybeDynamicCall
, 1, op
.str2
, updateBC
);
4570 template <typename Op
, class UpdateBC
>
4571 void fcallClsMethodSImpl(ISS
& env
, const Op
& op
, SString methName
, bool dynamic
,
4572 bool extraInput
, UpdateBC updateBC
) {
4573 auto const clsTy
= specialClsRefToCls(env
, op
.subop3
);
4574 if (is_specialized_cls(clsTy
) && dcls_of(clsTy
).isExact() &&
4575 !dynamic
&& op
.subop3
== SpecialClsRef::LateBoundCls
) {
4576 auto const clsName
= dcls_of(clsTy
).cls().name();
4577 reduce(env
, bc::FCallClsMethodD
{ op
.fca
, clsName
, methName
});
4581 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, methName
);
4583 auto const numInOut
= op
.fca
.enforceInOut()
4584 ? rfunc
.lookupNumInoutParams()
4587 auto const numExtraInputs
= extraInput
? 1 : 0;
4588 if (fcallOptimizeChecks(env
, op
.fca
, rfunc
, updateBC
, numInOut
, false,
4590 fcallTryFold(env
, op
.fca
, rfunc
, ctxCls(env
), dynamic
,
4595 auto moduleCheck
= [&] {
4596 auto const func
= rfunc
.exactFunc();
4598 return module_check_always_passes(env
, *(func
->cls
));
4601 if (rfunc
.exactFunc() && op
.str2
->empty() && moduleCheck()) {
4602 return reduce(env
, updateBC(op
.fca
, rfunc
.exactFunc()->cls
->name
));
4605 fcallKnownImpl(env
, op
.fca
, rfunc
, ctxCls(env
), false /* nullsafe */,
4606 extraInput
? 1 : 0, updateBC
, numInOut
);
4611 void in(ISS
& env
, const bc::FCallClsMethodSD
& op
) {
4612 if (op
.fca
.hasGenerics()) {
4613 auto const clsTy
= specialClsRefToCls(env
, op
.subop3
);
4614 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, op
.str4
);
4615 if (!rfunc
.couldHaveReifiedGenerics()) {
4619 bc::FCallClsMethodSD
{
4620 op
.fca
.withoutGenerics(), op
.str2
, op
.subop3
, op
.str4
}
4625 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4626 if (!clsHint
) clsHint
= op
.str2
;
4627 return bc::FCallClsMethodSD
{ std::move(fca
), clsHint
, op
.subop3
, op
.str4
};
4629 fcallClsMethodSImpl(env
, op
, op
.str4
, false, false, updateBC
);
4632 void in(ISS
& env
, const bc::FCallClsMethodS
& op
) {
4633 auto const methName
= getNameFromType(topC(env
));
4636 fcallUnknownImpl(env
, op
.fca
);
4640 auto const clsTy
= specialClsRefToCls(env
, op
.subop3
);
4641 auto const rfunc
= env
.index
.resolve_method(env
.ctx
, clsTy
, methName
);
4642 if (!rfunc
.mightCareAboutDynCalls() && !rfunc
.couldHaveReifiedGenerics()) {
4646 bc::FCallClsMethodSD
{ op
.fca
, op
.str2
, op
.subop3
, methName
}
4650 auto const updateBC
= [&] (FCallArgs fca
, SString clsHint
= nullptr) {
4651 if (!clsHint
) clsHint
= op
.str2
;
4652 return bc::FCallClsMethodS
{ std::move(fca
), clsHint
, op
.subop3
};
4654 fcallClsMethodSImpl(env
, op
, methName
, true, true, updateBC
);
4657 void in(ISS
& env
, const bc::NewObjD
& op
) {
4658 auto const rcls
= env
.index
.resolve_class(op
.str1
);
4665 auto obj
= objExact(*rcls
);
4666 if (obj
.subtypeOf(BBottom
)) {
4668 return push(env
, TBottom
);
4671 auto const isCtx
= [&] {
4672 if (!env
.ctx
.cls
) return false;
4673 if (rcls
->couldBeOverriddenByRegular()) return false;
4674 auto const r
= env
.index
.resolve_class(*env
.ctx
.cls
);
4675 if (!r
) return false;
4676 return obj
== objExact(*r
);
4678 push(env
, setctx(std::move(obj
), isCtx
));
4681 void in(ISS
& env
, const bc::NewObjS
& op
) {
4682 auto const cls
= specialClsRefToCls(env
, op
.subop1
);
4683 if (!is_specialized_cls(cls
)) {
4688 auto const& dcls
= dcls_of(cls
);
4689 if (dcls
.isExact() && !dcls
.cls().couldHaveReifiedGenerics() &&
4690 module_check_always_passes(env
, dcls
) &&
4691 (!dcls
.cls().couldBeOverridden() ||
4692 equivalently_refined(cls
, unctx(cls
)))) {
4693 return reduce(env
, bc::NewObjD
{ dcls
.cls().name() });
4696 auto obj
= toobj(cls
);
4697 if (obj
.subtypeOf(BBottom
)) unreachable(env
);
4698 push(env
, std::move(obj
));
4701 void in(ISS
& env
, const bc::NewObj
& op
) {
4702 auto const cls
= topC(env
);
4703 if (!cls
.subtypeOf(BCls
) || !is_specialized_cls(cls
)) {
4709 auto const& dcls
= dcls_of(cls
);
4710 if (dcls
.isExact() && !dcls
.cls().mightCareAboutDynConstructs() &&
4711 module_check_always_passes(env
, dcls
)) {
4715 bc::NewObjD
{ dcls
.cls().name() }
4720 auto obj
= toobj(cls
);
4721 if (obj
.subtypeOf(BBottom
)) unreachable(env
);
4722 push(env
, std::move(obj
));
4727 bool objMightHaveConstProps(const Type
& t
) {
4728 assertx(t
.subtypeOf(BObj
));
4729 if (!is_specialized_obj(t
)) return true;
4730 auto const& dobj
= dobj_of(t
);
4731 if (dobj
.isExact()) return dobj
.cls().couldHaveConstProp();
4732 if (dobj
.isSub()) return dobj
.cls().subCouldHaveConstProp();
4733 if (dobj
.isIsect()) {
4734 for (auto const cls
: dobj
.isect()) {
4735 if (!cls
.subCouldHaveConstProp()) return false;
4739 assertx(dobj
.isIsectAndExact());
4740 auto const [e
, i
] = dobj
.isectAndExact();
4741 if (!e
.subCouldHaveConstProp()) return false;
4742 for (auto const cls
: *i
) {
4743 if (!cls
.subCouldHaveConstProp()) return false;
4750 void in(ISS
& env
, const bc::FCallCtor
& op
) {
4751 auto const obj
= topC(env
, op
.fca
.numInputs() + 1);
4752 assertx(op
.fca
.numRets() == 1);
4754 if (!obj
.subtypeOf(BObj
)) return fcallUnknownImpl(env
, op
.fca
);
4756 if (op
.fca
.lockWhileUnwinding() && !objMightHaveConstProps(obj
)) {
4758 env
, bc::FCallCtor
{ op
.fca
.withoutLockWhileUnwinding(), op
.str2
}
4762 auto const rfunc
= env
.index
.resolve_ctor(obj
);
4764 auto const updateFCA
= [&] (FCallArgs
&& fca
) {
4765 return bc::FCallCtor
{ std::move(fca
), op
.str2
};
4768 auto const numInOut
= op
.fca
.enforceInOut()
4769 ? rfunc
.lookupNumInoutParams()
4772 auto const canFold
= obj
.subtypeOf(BObj
);
4773 if (fcallOptimizeChecks(env
, op
.fca
, rfunc
, updateFCA
, numInOut
, false, 0) ||
4774 (canFold
&& fcallTryFold(env
, op
.fca
, rfunc
,
4775 obj
, false /* dynamic */, 0))) {
4779 if (rfunc
.exactFunc() && op
.str2
->empty()) {
4780 // We've found the exact func that will be called, set the hint.
4781 return reduce(env
, bc::FCallCtor
{ op
.fca
, rfunc
.exactFunc()->cls
->name
});
4784 fcallKnownImpl(env
, op
.fca
, rfunc
, obj
, false /* nullsafe */, 0,
4785 updateFCA
, numInOut
);
4788 void in(ISS
& env
, const bc::LockObj
& op
) {
4789 auto const t
= topC(env
);
4792 return push(env
, t
);
4794 if (!t
.subtypeOf(BObj
)) return bail();
4795 if (!is_specialized_obj(t
) || objMightHaveConstProps(t
)) {
4802 void in(ISS
& env
, const bc::IterBase
&) {
4803 auto const t
= topC(env
);
4804 if (t
.subtypeOf(BArrLike
)) return reduce(env
);
4806 auto const iterator
= subObj(builtin_class(env
.index
, s_Iterator
.get()));
4807 if (t
.subtypeOf(iterator
)) return reduce(env
);
4809 auto tOut
= TBottom
;
4810 auto hasEffects
= !t
.subtypeOf(BArrLike
| BObj
);
4811 if (t
.couldBe(BArrLike
)) tOut
|= intersection_of(t
, TArrLike
);
4812 if (t
.couldBe(BObj
)) {
4813 auto const tObj
= intersection_of(t
, TObj
);
4814 auto const map
= subObj(builtin_class(env
.index
, s_ConstMap
.get()));
4815 auto const set
= subObj(builtin_class(env
.index
, s_ConstSet
.get()));
4816 auto const vector
= subObj(builtin_class(env
.index
, s_ConstVector
.get()));
4817 if (tObj
.subtypeOf(iterator
)) {
4819 } else if (tObj
.subtypeOf(map
)) {
4821 } else if (tObj
.subtypeOf(set
)) {
4822 tOut
|= TDict
; // Sets are still backed by dicts rather than keysets
4823 } else if (tObj
.subtypeOf(vector
)) {
4824 tOut
|= TVec
; // Note: this includes Pair
4826 tOut
|= union_of(TArrLike
, TObj
);
4831 if (!hasEffects
) effect_free(env
);
4836 void in(ISS
& env
, const bc::LIterInit
& op
) {
4837 auto const ita
= op
.ita
;
4838 auto const baseLoc
= op
.loc2
;
4839 auto const sourceLoc
= [&] {
4840 auto const loc
= findIterBaseLoc(env
, baseLoc
);
4841 if (loc
== baseLoc
&& has_flag(ita
.flags
, IterArgsFlags::BaseConst
)) {
4842 // Can't improve this iterator further.
4847 auto const base
= locAsCell(env
, baseLoc
);
4848 auto ity
= iter_types(base
);
4850 auto const fallthrough
= [&] {
4851 setIter(env
, ita
.iterId
, LiveIter
{ ity
, sourceLoc
, NoLocalId
, env
.bid
,
4853 // Do this after setting the iterator, in case it clobbers the base local
4855 setLoc(env
, ita
.valId
, std::move(ity
.value
));
4857 setLoc(env
, ita
.keyId
, std::move(ity
.key
));
4858 setIterKey(env
, ita
.iterId
, ita
.keyId
);
4862 assertx(iterIsDead(env
, ita
.iterId
));
4864 if (!ity
.mayThrowOnInit
) {
4865 if (ity
.count
== IterTypes::Count::Empty
&& will_reduce(env
)) {
4867 return jmp_setdest(env
, op
.target3
);
4872 switch (ity
.count
) {
4873 case IterTypes::Count::Empty
:
4874 mayReadLocal(env
, ita
.valId
);
4875 if (ita
.hasKey()) mayReadLocal(env
, ita
.keyId
);
4876 jmp_setdest(env
, op
.target3
);
4878 case IterTypes::Count::Single
:
4879 case IterTypes::Count::NonEmpty
:
4881 return jmp_nevertaken(env
);
4882 case IterTypes::Count::ZeroOrOne
:
4883 case IterTypes::Count::Any
:
4884 // Take the branch before setting locals if the iter is already
4885 // empty, but after popping. Similar for the other IterInits
4887 env
.propagate(op
.target3
, &env
.state
);
4891 always_assert(false);
4894 void in(ISS
& env
, const bc::LIterNext
& op
) {
4895 auto const ita
= op
.ita
;
4896 auto const curVal
= peekLocRaw(env
, ita
.valId
);
4897 auto const curKey
= ita
.hasKey() ? peekLocRaw(env
, ita
.keyId
) : TBottom
;
4899 auto noThrow
= false;
4900 auto const noTaken
= match
<bool>(
4901 env
.state
.iters
[ita
.iterId
],
4903 always_assert(false && "IterNext on dead iter");
4906 [&] (const LiveIter
& ti
) {
4907 if (!ti
.types
.mayThrowOnNext
) noThrow
= true;
4908 if (ti
.baseLocal
!= NoLocalId
) hasInvariantIterBase(env
);
4909 switch (ti
.types
.count
) {
4910 case IterTypes::Count::Single
:
4911 case IterTypes::Count::ZeroOrOne
:
4913 case IterTypes::Count::NonEmpty
:
4914 case IterTypes::Count::Any
:
4915 setLoc(env
, ita
.valId
, ti
.types
.value
);
4917 setLoc(env
, ita
.keyId
, ti
.types
.key
);
4918 setIterKey(env
, ita
.iterId
, ita
.keyId
);
4921 case IterTypes::Count::Empty
:
4922 always_assert(false);
4928 if (noTaken
&& noThrow
&& will_reduce(env
)) {
4929 auto const iterId
= safe_cast
<IterId
>(ita
.iterId
);
4930 reduce(env
, bc::LIterFree
{ iterId
});
4933 mayReadLocal(env
, op
.loc2
);
4934 mayReadLocal(env
, ita
.valId
);
4935 if (ita
.hasKey()) mayReadLocal(env
, ita
.keyId
);
4937 if (noThrow
) nothrow(env
);
4940 jmp_nevertaken(env
);
4941 freeIter(env
, ita
.iterId
);
4945 env
.propagate(op
.target3
, &env
.state
);
4947 freeIter(env
, ita
.iterId
);
4948 setLocRaw(env
, ita
.valId
, curVal
);
4949 if (ita
.hasKey()) setLocRaw(env
, ita
.keyId
, curKey
);
4952 void in(ISS
& env
, const bc::LIterFree
& op
) {
4954 freeIter(env
, op
.iter1
);
4958 * Any include/require (or eval) op kills all locals, and private properties.
4960 void inclOpImpl(ISS
& env
) {
4964 killPrivateStatics(env
);
4965 push(env
, TInitCell
);
4968 void in(ISS
& env
, const bc::Incl
&) { inclOpImpl(env
); }
4969 void in(ISS
& env
, const bc::InclOnce
&) { inclOpImpl(env
); }
4970 void in(ISS
& env
, const bc::Req
&) { inclOpImpl(env
); }
4971 void in(ISS
& env
, const bc::ReqOnce
&) { inclOpImpl(env
); }
4972 void in(ISS
& env
, const bc::ReqDoc
&) { inclOpImpl(env
); }
4973 void in(ISS
& env
, const bc::Eval
&) { inclOpImpl(env
); }
4975 void in(ISS
& env
, const bc::This
&) {
4976 if (thisAvailable(env
)) {
4977 return reduce(env
, bc::BareThis
{ BareThisOp::NeverNull
});
4979 auto const ty
= thisTypeNonNull(env
);
4980 push(env
, ty
, StackThisId
);
4981 setThisAvailable(env
);
4982 if (ty
.subtypeOf(BBottom
)) unreachable(env
);
4985 void in(ISS
& env
, const bc::LateBoundCls
& op
) {
4986 if (env
.ctx
.cls
) effect_free(env
);
4987 auto const ty
= selfCls(env
);
4988 push(env
, setctx(ty
? *ty
: TCls
));
4991 void in(ISS
& env
, const bc::CheckThis
&) {
4992 if (thisAvailable(env
)) {
4995 setThisAvailable(env
);
4998 void in(ISS
& env
, const bc::BareThis
& op
) {
4999 if (thisAvailable(env
)) {
5000 if (op
.subop1
!= BareThisOp::NeverNull
) {
5001 return reduce(env
, bc::BareThis
{ BareThisOp::NeverNull
});
5005 auto const ty
= thisType(env
);
5006 if (ty
.subtypeOf(BBottom
)) {
5008 return push(env
, TBottom
);
5011 switch (op
.subop1
) {
5012 case BareThisOp::Notice
:
5014 case BareThisOp::NoNotice
:
5017 case BareThisOp::NeverNull
:
5018 setThisAvailable(env
);
5019 if (!env
.state
.unreachable
) effect_free(env
);
5020 return push(env
, ty
, StackThisId
);
5023 push(env
, ty
, StackThisId
);
5027 * Amongst other things, we use this to mark units non-persistent.
5029 void in(ISS
& env
, const bc::OODeclExists
& op
) {
5030 auto flag
= popC(env
);
5031 auto name
= popC(env
);
5032 if (!flag
.couldBe(BBool
) || !name
.couldBe(BStr
)) {
5038 if (flag
.subtypeOf(BBool
) && name
.subtypeOf(BStr
)) {
5042 auto const v
= tv(name
);
5043 if (!v
) return push(env
, TBool
);
5045 assertx(isStringType(v
->m_type
));
5046 auto const rcls
= env
.index
.resolve_class(v
->m_data
.pstr
);
5047 if (!rcls
) return push(env
, TFalse
);
5048 auto const cls
= rcls
->cls();
5050 // We know the Class* exists, but not its type.
5051 if (!cls
) return push(env
, TBool
);
5053 auto const exist
= [&] () -> bool {
5054 switch (op
.subop1
) {
5055 case OODeclExistsOp::Class
:
5056 return !(cls
->attrs
& (AttrInterface
| AttrTrait
));
5057 case OODeclExistsOp::Interface
:
5058 return cls
->attrs
& AttrInterface
;
5059 case OODeclExistsOp::Trait
:
5060 return cls
->attrs
& AttrTrait
;
5065 push(env
, exist
? TTrue
: TFalse
);
5070 bool couldBeMocked(const Type
& t
) {
5071 auto const dcls
= [&] () -> const DCls
* {
5072 if (is_specialized_cls(t
)) {
5074 } else if (is_specialized_obj(t
)) {
5079 // In practice this should not occur since this is used mostly on
5080 // the result of looked up type constraints.
5081 if (!dcls
) return true;
5082 if (dcls
->isExact() || dcls
->isSub()) return dcls
->cls().couldBeMocked();
5083 if (dcls
->isIsect()) {
5084 for (auto const cls
: dcls
->isect()) {
5085 if (!cls
.couldBeMocked()) return false;
5089 assertx(dcls
->isIsectAndExact());
5090 auto const [e
, i
] = dcls
->isectAndExact();
5091 if (!e
.couldBeMocked()) return false;
5092 for (auto const cls
: *i
) {
5093 if (!cls
.couldBeMocked()) return false;
5098 bool couldHaveReifiedType(const ISS
& env
, const TypeConstraint
& tc
) {
5099 if (env
.ctx
.func
->isClosureBody
) {
5100 for (auto i
= env
.ctx
.func
->params
.size();
5101 i
< env
.ctx
.func
->locals
.size();
5103 auto const name
= env
.ctx
.func
->locals
[i
].name
;
5104 if (!name
) return false; // named locals do not appear after unnamed local
5105 if (isMangledReifiedGenericInClosure(name
)) return true;
5109 if (tc
.isAnyObject()) return true;
5110 if (!tc
.isSubObject()) return false;
5111 auto const cls
= env
.index
.resolve_class(tc
.clsName());
5112 assertx(cls
.has_value());
5113 return cls
->couldHaveReifiedGenerics();
5118 using TCVec
= std::vector
<const TypeConstraint
*>;
5120 void in(ISS
& env
, const bc::VerifyParamType
& op
) {
5121 auto [newTy
, remove
, effectFree
] =
5122 verify_param_type(env
.index
, env
.ctx
, op
.loc1
, topC(env
));
5124 if (remove
) return reduce(env
);
5125 if (newTy
.subtypeOf(BBottom
)) unreachable(env
);
5133 push(env
, std::move(newTy
));
5136 void in(ISS
& env
, const bc::VerifyParamTypeTS
& op
) {
5137 auto const a
= topC(env
);
5138 if (!a
.couldBe(BDict
)) {
5143 auto const constraint
= env
.ctx
.func
->params
[op
.loc1
].typeConstraint
;
5144 // TODO(T31677864): We are being extremely pessimistic here, relax it
5145 if (!env
.ctx
.func
->isReified
&&
5146 (!env
.ctx
.cls
|| !env
.ctx
.cls
->hasReifiedGenerics
) &&
5147 !couldHaveReifiedType(env
, constraint
)) {
5148 return reduce(env
, bc::PopC
{});
5151 if (auto const inputTS
= tv(a
)) {
5152 if (!isValidTSType(*inputTS
, false)) {
5157 auto const resolvedTS
=
5158 resolve_type_structure(env
, inputTS
->m_data
.parr
).sarray();
5159 if (resolvedTS
&& resolvedTS
!= inputTS
->m_data
.parr
) {
5160 reduce(env
, bc::PopC
{});
5161 reduce(env
, bc::Dict
{ resolvedTS
});
5162 reduce(env
, bc::VerifyParamTypeTS
{ op
.loc1
});
5165 if (shouldReduceToNonReifiedVerifyType(env
, inputTS
->m_data
.parr
)) {
5166 return reduce(env
, bc::PopC
{});
5169 if (auto const last
= last_op(env
)) {
5170 if (last
->op
== Op::CombineAndResolveTypeStruct
) {
5171 if (auto const last2
= last_op(env
, 1)) {
5172 if (last2
->op
== Op::Dict
&&
5173 shouldReduceToNonReifiedVerifyType(env
, last2
->Dict
.arr1
)) {
5174 return reduce(env
, bc::PopC
{});
5179 mayReadLocal(env
, op
.loc1
, false);
5183 void verifyRetImpl(ISS
& env
, const TCVec
& tcs
,
5184 bool reduce_nullonly
, bool ts_flavor
) {
5185 assertx(!tcs
.empty());
5186 // If it is the ts flavor, then second thing on the stack, otherwise
5188 auto stackT
= topC(env
, (int)ts_flavor
);
5190 auto refined
= TInitCell
;
5192 auto effectFree
= true;
5195 stackT
.couldBe(BInitNull
) &&
5196 !stackT
.subtypeOf(BInitNull
);
5197 for (auto const& tc
: tcs
) {
5198 auto const type
= lookup_constraint(env
.index
, env
.ctx
, *tc
, stackT
);
5199 if (stackT
.moreRefined(type
.lower
)) {
5200 refined
= intersection_of(std::move(refined
), stackT
);
5204 if (!stackT
.couldBe(type
.upper
)) {
5205 if (ts_flavor
) popC(env
);
5208 return unreachable(env
);
5214 (!ts_flavor
|| tc
->isThis()) &&
5215 unopt(stackT
).moreRefined(type
.lower
);
5218 auto result
= intersection_of(stackT
, type
.upper
);
5219 if (type
.coerceClassToString
== TriBool::Yes
) {
5220 assertx(!type
.lower
.couldBe(BCls
| BLazyCls
));
5221 assertx(type
.upper
.couldBe(BStr
| BCls
| BLazyCls
));
5222 if (result
.couldBe(BCls
| BLazyCls
)) {
5223 result
= promote_classish(std::move(result
));
5224 if (effectFree
&& (ts_flavor
||
5225 Cfg::Eval::ClassStringHintNoticesSampleRate
> 0 ||
5226 !promote_classish(stackT
).moreRefined(type
.lower
))) {
5232 } else if (type
.coerceClassToString
== TriBool::Maybe
) {
5233 if (result
.couldBe(BCls
| BLazyCls
)) result
|= TSStr
;
5239 refined
= intersection_of(std::move(refined
), result
);
5240 if (refined
.is(BBottom
)) {
5241 if (ts_flavor
) popC(env
);
5244 return unreachable(env
);
5250 // We wouldn't get here if reified types were definitely not
5251 // involved, so just bail.
5252 auto const stackEquiv
= topStkEquiv(env
, 1);
5255 push(env
, std::move(stackT
), stackEquiv
);
5261 // In cases where stackT includes InitNull, but would pass the
5262 // type-constraint if it was not InitNull, we can lower to a
5265 if (ts_flavor
) return reduce(env
, bc::PopC
{}, bc::VerifyRetNonNullC
{});
5266 return reduce(env
, bc::VerifyRetNonNullC
{});
5274 if (ts_flavor
) popC(env
);
5276 push(env
, std::move(refined
));
5279 void in(ISS
& env
, const bc::VerifyOutType
& op
) {
5281 auto const& pinfo
= env
.ctx
.func
->params
[op
.loc1
];
5282 tcs
.push_back(&pinfo
.typeConstraint
);
5283 for (auto const& t
: pinfo
.upperBounds
.m_constraints
) tcs
.push_back(&t
);
5284 verifyRetImpl(env
, tcs
, false, false);
5287 void in(ISS
& env
, const bc::VerifyRetTypeC
& /*op*/) {
5289 tcs
.push_back(&env
.ctx
.func
->retTypeConstraint
);
5290 for (auto const& t
: env
.ctx
.func
->returnUBs
.m_constraints
) tcs
.push_back(&t
);
5291 verifyRetImpl(env
, tcs
, true, false);
5294 void in(ISS
& env
, const bc::VerifyRetTypeTS
& /*op*/) {
5295 auto const a
= topC(env
);
5296 if (!a
.couldBe(BDict
)) {
5301 auto const constraint
= env
.ctx
.func
->retTypeConstraint
;
5302 // TODO(T31677864): We are being extremely pessimistic here, relax it
5303 if (!env
.ctx
.func
->isReified
&&
5304 (!env
.ctx
.cls
|| !env
.ctx
.cls
->hasReifiedGenerics
) &&
5305 !couldHaveReifiedType(env
, constraint
)) {
5306 return reduce(env
, bc::PopC
{}, bc::VerifyRetTypeC
{});
5308 if (auto const inputTS
= tv(a
)) {
5309 if (!isValidTSType(*inputTS
, false)) {
5314 auto const resolvedTS
=
5315 resolve_type_structure(env
, inputTS
->m_data
.parr
).sarray();
5316 if (resolvedTS
&& resolvedTS
!= inputTS
->m_data
.parr
) {
5317 reduce(env
, bc::PopC
{});
5318 reduce(env
, bc::Dict
{ resolvedTS
});
5319 reduce(env
, bc::VerifyRetTypeTS
{});
5322 if (shouldReduceToNonReifiedVerifyType(env
, inputTS
->m_data
.parr
)) {
5323 return reduce(env
, bc::PopC
{}, bc::VerifyRetTypeC
{});
5326 if (auto const last
= last_op(env
)) {
5327 if (last
->op
== Op::CombineAndResolveTypeStruct
) {
5328 if (auto const last2
= last_op(env
, 1)) {
5329 if (last2
->op
== Op::Dict
&&
5330 shouldReduceToNonReifiedVerifyType(env
, last2
->Dict
.arr1
)) {
5331 return reduce(env
, bc::PopC
{}, bc::VerifyRetTypeC
{});
5336 TCVec tcs
{&constraint
};
5337 for (auto const& t
: env
.ctx
.func
->returnUBs
.m_constraints
) tcs
.push_back(&t
);
5338 verifyRetImpl(env
, tcs
, true, true);
5341 void in(ISS
& env
, const bc::VerifyRetNonNullC
&) {
5342 auto const constraint
= env
.ctx
.func
->retTypeConstraint
;
5343 if (constraint
.isSoft()) return;
5345 auto stackT
= topC(env
);
5346 if (!stackT
.couldBe(BInitNull
)) return reduce(env
);
5347 if (stackT
.subtypeOf(BInitNull
)) {
5350 return unreachable(env
);
5353 push(env
, unopt(std::move(stackT
)));
5356 void in(ISS
& env
, const bc::SelfCls
&) {
5357 auto const self
= selfClsExact(env
);
5366 void in(ISS
& env
, const bc::ParentCls
&) {
5367 auto const parent
= parentClsExact(env
);
5376 void in(ISS
& env
, const bc::CreateCl
& op
) {
5377 auto const nargs
= op
.arg1
;
5379 auto const rcls
= env
.index
.resolve_class(op
.str2
);
5381 discard(env
, nargs
);
5383 return push(env
, TBottom
);
5386 auto const cls
= rcls
->cls();
5389 "A closure class ({}) failed to resolve",
5392 assertx(cls
->unit
== env
.ctx
.unit
);
5393 assertx(is_closure(*cls
));
5396 * Every closure should have a unique allocation site, but we may see it
5397 * multiple times in a given round of analyzing this function. Each time we
5398 * may have more information about the used variables; the types should only
5399 * possibly grow. If it's already there we need to merge the used vars in
5400 * with what we saw last time.
5403 CompactVector
<Type
> usedVars(nargs
);
5404 for (auto i
= uint32_t{0}; i
< nargs
; ++i
) {
5405 usedVars
[nargs
- i
- 1] = unctx(popCU(env
));
5407 merge_closure_use_vars_into(
5408 env
.collect
.closureUseTypes
,
5416 if (env
.ctx
.cls
&& is_used_trait(*env
.ctx
.cls
)) {
5417 // Be pessimistic if we're within a trait. The closure will get
5418 // rescoped potentially multiple times at runtime.
5421 subObj(builtin_class(env
.index
, s_Closure
.get()))
5424 push(env
, objExact(*rcls
));
5428 void in(ISS
& env
, const bc::CreateCont
& /*op*/) {
5429 // First resume is always next() which pushes null.
5430 push(env
, TInitNull
);
5433 void in(ISS
& env
, const bc::ContEnter
&) { popC(env
); push(env
, TInitCell
); }
5434 void in(ISS
& env
, const bc::ContRaise
&) { popC(env
); push(env
, TInitCell
); }
5436 void in(ISS
& env
, const bc::Yield
&) {
5438 push(env
, TInitCell
);
5441 void in(ISS
& env
, const bc::YieldK
&) {
5444 push(env
, TInitCell
);
5447 void in(ISS
& /*env*/, const bc::ContCheck
&) {}
5448 void in(ISS
& env
, const bc::ContValid
&) { push(env
, TBool
); }
5449 void in(ISS
& env
, const bc::ContKey
&) { push(env
, TInitCell
); }
5450 void in(ISS
& env
, const bc::ContCurrent
&) { push(env
, TInitCell
); }
5451 void in(ISS
& env
, const bc::ContGetReturn
&) { push(env
, TInitCell
); }
5453 void pushTypeFromWH(ISS
& env
, Type t
) {
5454 auto inner
= typeFromWH(t
);
5455 // The next opcode is unreachable if awaiting a non-object or WaitH<Bottom>.
5456 if (inner
.subtypeOf(BBottom
)) unreachable(env
);
5457 push(env
, std::move(inner
));
5460 void in(ISS
& env
, const bc::WHResult
&) {
5461 pushTypeFromWH(env
, popC(env
));
5464 void in(ISS
& env
, const bc::Await
&) {
5465 pushTypeFromWH(env
, popC(env
));
5468 void in(ISS
& env
, const bc::AwaitAll
& op
) {
5469 auto const equiv
= equivLocalRange(env
, op
.locrange
);
5470 if (equiv
!= op
.locrange
.first
) {
5473 bc::AwaitAll
{LocalRange
{equiv
, op
.locrange
.count
}}
5477 for (uint32_t i
= 0; i
< op
.locrange
.count
; ++i
) {
5478 mayReadLocal(env
, op
.locrange
.first
+ i
);
5481 push(env
, TInitNull
);
5484 void in(ISS
& env
, const bc::SetImplicitContextByValue
&) {
5490 s_Memoize("__Memoize"),
5491 s_MemoizeLSB("__MemoizeLSB");
5493 void in(ISS
& env
, const bc::CreateSpecialImplicitContext
&) {
5494 auto const memoKey
= popC(env
);
5495 auto const type
= popC(env
);
5497 if (!type
.couldBe(BInt
) || !memoKey
.couldBe(BOptStr
)) {
5499 return push(env
, TBottom
);
5502 if (type
.subtypeOf(BInt
) && memoKey
.subtypeOf(BOptStr
)) {
5506 if (auto const v
= tv(type
); v
&& tvIsInt(*v
)) {
5507 switch (static_cast<ImplicitContext::State
>(v
->m_data
.num
)) {
5508 case ImplicitContext::State::Value
:
5509 case ImplicitContext::State::SoftSet
:
5510 return push(env
, TOptObj
);
5511 case ImplicitContext::State::SoftInaccessible
:
5512 case ImplicitContext::State::Inaccessible
:
5513 return push(env
, TObj
);
5517 return push(env
, TOptObj
);
5520 void in(ISS
& env
, const bc::Idx
&) {
5521 auto const def
= popC(env
);
5522 auto const [key
, promotion
] = promote_classlike_to_key(popC(env
));
5523 auto const base
= popC(env
);
5525 assertx(!def
.is(BBottom
));
5527 auto effectFree
= promotion
!= Promotion::YesMightThrow
;
5528 auto result
= TBottom
;
5530 auto const finish
= [&] {
5531 if (result
.is(BBottom
)) {
5532 assertx(!effectFree
);
5539 push(env
, std::move(result
));
5542 if (key
.couldBe(BNull
)) result
|= def
;
5543 if (key
.subtypeOf(BNull
)) return finish();
5545 if (!base
.subtypeOf(BArrLike
| BObj
| BStr
)) result
|= def
;
5547 if (base
.couldBe(BArrLike
)) {
5548 if (!key
.subtypeOf(BOptArrKey
)) effectFree
= false;
5549 if (key
.couldBe(BArrKey
)) {
5550 auto elem
= array_like_elem(
5552 key
.subtypeOf(BArrKey
) ? key
: intersection_of(key
, TArrKey
)
5554 result
|= std::move(elem
.first
);
5555 if (!elem
.second
) result
|= def
;
5558 if (base
.couldBe(BObj
)) {
5559 result
|= TInitCell
;
5562 if (base
.couldBe(BStr
)) {
5565 if (!key
.subtypeOf(BOptArrKey
)) effectFree
= false;
5571 void in(ISS
& env
, const bc::ArrayIdx
&) {
5572 auto def
= popC(env
);
5573 auto const [key
, promotion
] = promote_classlike_to_key(popC(env
));
5574 auto const base
= popC(env
);
5576 assertx(!def
.is(BBottom
));
5578 auto effectFree
= promotion
!= Promotion::YesMightThrow
;
5579 auto result
= TBottom
;
5581 auto const finish
= [&] {
5582 if (result
.is(BBottom
)) {
5583 assertx(!effectFree
);
5590 push(env
, std::move(result
));
5593 if (key
.couldBe(BNull
)) result
|= def
;
5594 if (key
.subtypeOf(BNull
)) return finish();
5596 if (!base
.subtypeOf(BArrLike
)) effectFree
= false;
5597 if (!base
.couldBe(BArrLike
)) return finish();
5599 if (!key
.subtypeOf(BOptArrKey
)) effectFree
= false;
5600 if (!key
.couldBe(BArrKey
)) return finish();
5602 auto elem
= array_like_elem(
5604 key
.subtypeOf(BArrKey
) ? key
: intersection_of(key
, TArrKey
)
5606 result
|= std::move(elem
.first
);
5607 if (!elem
.second
) result
|= std::move(def
);
5612 void implArrayMarkLegacy(ISS
& env
, bool legacy
) {
5613 auto const recursive
= popC(env
);
5614 auto const value
= popC(env
);
5616 if (auto const tv_recursive
= tv(recursive
)) {
5617 if (auto const tv_value
= tv(value
)) {
5618 if (tvIsBool(*tv_recursive
)) {
5619 auto const result
= eval_cell([&]{
5620 return val(*tv_recursive
).num
5621 ? arrprov::markTvRecursively(*tv_value
, legacy
)
5622 : arrprov::markTvShallow(*tv_value
, legacy
);
5634 // TODO(kshaunak): We could add some type info here.
5635 push(env
, TInitCell
);
5639 void in(ISS
& env
, const bc::ArrayMarkLegacy
&) {
5640 implArrayMarkLegacy(env
, true);
5643 void in(ISS
& env
, const bc::ArrayUnmarkLegacy
&) {
5644 implArrayMarkLegacy(env
, false);
5647 void in(ISS
& env
, const bc::CheckProp
&) {
5648 if (env
.ctx
.cls
->attrs
& AttrNoOverride
) {
5649 return reduce(env
, bc::False
{});
5655 void in(ISS
& env
, const bc::InitProp
& op
) {
5656 auto const t
= topC(env
);
5657 switch (op
.subop2
) {
5658 case InitPropOp::Static
: {
5659 auto const rcls
= env
.index
.resolve_class(env
.ctx
.cls
->name
);
5660 // If class isn't instantiable, this bytecode isn't reachable
5665 clsExact(*rcls
, true),
5673 case InitPropOp::NonStatic
:
5674 mergeThisProp(env
, op
.str1
, t
);
5678 for (auto const& prop
: env
.ctx
.func
->cls
->properties
) {
5679 if (prop
.name
!= op
.str1
) continue;
5681 ITRACE(1, "InitProp: {} = {}\n", op
.str1
, show(t
));
5684 [&] (const TypeConstraint
& tc
) -> std::pair
<Type
, bool> {
5685 assertx(tc
.validForProp());
5686 if (Cfg::Eval::CheckPropTypeHints
== 0) return { t
, true };
5687 auto const lookup
= lookup_constraint(env
.index
, env
.ctx
, tc
, t
);
5688 if (t
.moreRefined(lookup
.lower
)) return { t
, true };
5689 if (Cfg::Eval::ClassStringHintNoticesSampleRate
> 0) return { t
, false };
5690 if (!t
.couldBe(lookup
.upper
)) return { t
, false };
5691 if (lookup
.coerceClassToString
!= TriBool::Yes
) return { t
, false };
5692 auto promoted
= promote_classish(t
);
5693 if (!promoted
.moreRefined(lookup
.lower
)) return { t
, false };
5694 return { std::move(promoted
), true };
5697 auto const [refined
, effectFree
] = [&] () -> std::pair
<Type
, bool> {
5698 auto [refined
, effectFree
] = refine(prop
.typeConstraint
);
5699 for (auto ub
: prop
.ubs
.m_constraints
) {
5700 if (!effectFree
) break;
5701 auto [refined2
, effectFree2
] = refine(ub
);
5702 refined
&= refined2
;
5703 if (refined
.is(BBottom
)) effectFree
= false;
5704 effectFree
&= effectFree2
;
5706 return { std::move(refined
), effectFree
};
5709 auto const val
= [effectFree
= effectFree
] (const Type
& t
) {
5710 if (!effectFree
) return make_tv
<KindOfUninit
>();
5711 if (auto const v
= tv(t
)) return *v
;
5712 return make_tv
<KindOfUninit
>();
5715 auto const deepInit
=
5716 (prop
.attrs
& AttrDeepInit
) &&
5717 (type(val
) == KindOfUninit
) &&
5718 could_contain_objects(refined
);
5726 if (type(val
) == KindOfUninit
) break;
5727 return reduce(env
, bc::PopC
{});
5733 void in(ISS
& env
, const bc::Silence
& op
) {
5735 switch (op
.subop2
) {
5736 case SilenceOp::Start
:
5737 setLoc(env
, op
.loc1
, TInt
);
5739 case SilenceOp::End
:
5740 locRaw(env
, op
.loc1
);
5747 template <typename Op
, typename Rebind
>
5748 bool memoGetImpl(ISS
& env
, const Op
& op
, Rebind
&& rebind
) {
5749 always_assert(env
.ctx
.func
->isMemoizeWrapper
);
5750 always_assert(op
.locrange
.first
+ op
.locrange
.count
5751 <= env
.ctx
.func
->locals
.size());
5753 if (will_reduce(env
)) {
5754 // If we can use an equivalent, earlier range, then use that instead.
5755 auto const equiv
= equivLocalRange(env
, op
.locrange
);
5756 if (equiv
!= op
.locrange
.first
) {
5757 reduce(env
, rebind(LocalRange
{ equiv
, op
.locrange
.count
}));
5762 auto [retTy
, effectFree
] = memoGet(env
);
5764 // MemoGet can raise if we give a non arr-key local, or if we're in a method
5765 // and $this isn't available.
5766 auto allArrKey
= true;
5767 for (uint32_t i
= 0; i
< op
.locrange
.count
; ++i
) {
5768 // Peek here, because if we decide to reduce the bytecode, we
5769 // don't want to mark the locals as being read.
5770 allArrKey
&= peekLocRaw(env
, op
.locrange
.first
+ i
).subtypeOf(BArrKey
);
5773 (!env
.ctx
.func
->cls
||
5774 (env
.ctx
.func
->attrs
& AttrStatic
) ||
5775 thisAvailable(env
))) {
5776 if (will_reduce(env
)) {
5777 if (retTy
.subtypeOf(BBottom
)) {
5779 jmp_setdest(env
, op
.target1
);
5782 // deal with constprop manually; otherwise we will propagate the
5783 // taken edge and *then* replace the MemoGet with a constant.
5785 if (auto const v
= tv(retTy
)) {
5786 reduce(env
, gen_constant(*v
));
5794 // We don't remove the op, so mark the locals as being read.
5795 for (uint32_t i
= 0; i
< op
.locrange
.count
; ++i
) {
5796 mayReadLocal(env
, op
.locrange
.first
+ i
);
5799 if (retTy
.is(BBottom
)) {
5800 jmp_setdest(env
, op
.target1
);
5804 env
.propagate(op
.target1
, &env
.state
);
5805 push(env
, std::move(retTy
));
5811 void in(ISS
& env
, const bc::MemoGet
& op
) {
5814 [&] (const LocalRange
& l
) { return bc::MemoGet
{ op
.target1
, l
}; }
5818 void in(ISS
& env
, const bc::MemoGetEager
& op
) {
5819 always_assert(env
.ctx
.func
->isAsync
&& !env
.ctx
.func
->isGenerator
);
5821 auto const reduced
= memoGetImpl(
5823 [&] (const LocalRange
& l
) {
5824 return bc::MemoGetEager
{ op
.target1
, op
.target2
, l
};
5827 if (reduced
) return;
5829 auto const t
= popC(env
);
5831 push(env
, wait_handle(t
));
5832 env
.propagate(op
.target2
, &env
.state
);
5840 template <typename Op
>
5841 void memoSetImpl(ISS
& env
, const Op
& op
, bool eager
) {
5842 always_assert(env
.ctx
.func
->isMemoizeWrapper
);
5843 always_assert(op
.locrange
.first
+ op
.locrange
.count
5844 <= env
.ctx
.func
->locals
.size());
5846 // If we can use an equivalent, earlier range, then use that instead.
5847 auto const equiv
= equivLocalRange(env
, op
.locrange
);
5848 if (equiv
!= op
.locrange
.first
) {
5851 Op
{ LocalRange
{ equiv
, op
.locrange
.count
} }
5855 // If the call to the memoize implementation was optimized away to
5856 // an immediate instruction, record that fact so that we can
5857 // optimize away the corresponding MemoGet.
5858 auto effectFree
= [&] {
5859 auto const last
= last_op(env
);
5860 return last
&& pushes_immediate(last
->op
);
5863 // MemoSet can raise if we give a non arr-key local, or if we're in a method
5864 // and $this isn't available.
5865 auto allArrKey
= true;
5866 for (uint32_t i
= 0; i
< op
.locrange
.count
; ++i
) {
5867 allArrKey
&= locRaw(env
, op
.locrange
.first
+ i
).subtypeOf(BArrKey
);
5870 (!env
.ctx
.func
->cls
||
5871 (env
.ctx
.func
->attrs
& AttrStatic
) ||
5872 thisAvailable(env
))) {
5882 if (!env
.ctx
.func
->isAsync
|| eager
) return t
;
5883 return is_specialized_wait_handle(t
)
5884 ? wait_handle_inner(t
)
5889 push(env
, std::move(t
));
5894 void in(ISS
& env
, const bc::MemoSet
& op
) {
5895 memoSetImpl(env
, op
, false);
5898 void in(ISS
& env
, const bc::MemoSetEager
& op
) {
5899 always_assert(env
.ctx
.func
->isAsync
&& !env
.ctx
.func
->isGenerator
);
5900 memoSetImpl(env
, op
, true);
5907 //////////////////////////////////////////////////////////////////////
5909 void dispatch(ISS
& env
, const Bytecode
& op
) {
5910 #define O(opcode, ...) case Op::opcode: interp_step::in(env, op.opcode); return;
5911 switch (op
.op
) { OPCODES
}
5916 //////////////////////////////////////////////////////////////////////
5918 void interpStep(ISS
& env
, const Bytecode
& bc
) {
5919 ITRACE(2, " {} ({})\n",
5920 show(*env
.ctx
.func
, bc
),
5921 env
.unchangedBcs
+ env
.replacedBcs
.size());
5924 // If there are throw exit edges, make a copy of the state (except
5925 // stacks) in case we need to propagate across throw exits (if
5927 if (!env
.stateBefore
&& env
.blk
.throwExit
!= NoBlockId
) {
5928 env
.stateBefore
.emplace(with_throwable_only(env
.index
, env
.state
));
5933 default_dispatch(env
, bc
);
5935 if (env
.flags
.reduced
) return;
5937 auto const_prop
= [&] {
5938 if (!env
.flags
.canConstProp
) return false;
5940 auto const numPushed
= bc
.numPush();
5941 TinyVector
<TypedValue
> cells
;
5944 while (i
< numPushed
) {
5945 auto const v
= tv(topT(env
, i
));
5946 if (!v
) return false;
5947 cells
.push_back(*v
);
5951 if (env
.flags
.wasPEI
) {
5952 ITRACE(2, " nothrow (due to constprop)\n");
5953 env
.flags
.wasPEI
= false;
5955 if (!env
.flags
.effectFree
) {
5956 ITRACE(2, " effect_free (due to constprop)\n");
5957 env
.flags
.effectFree
= true;
5960 // If we're doing inline interp, don't actually perform the
5961 // constprop. If we do, we can infer static types that won't
5962 // actually exist at runtime.
5963 if (any(env
.collect
.opts
& CollectionOpts::Inlining
)) {
5964 ITRACE(2, " inlining, skipping actual constprop\n");
5970 auto const numPop
= bc
.numPop();
5971 for (auto j
= 0; j
< numPop
; j
++) {
5972 auto const flavor
= bc
.popFlavor(j
);
5973 if (flavor
== Flavor::C
) {
5974 interpStep(env
, bc::PopC
{});
5975 } else if (flavor
== Flavor::U
) {
5976 interpStep(env
, bc::PopU
{});
5978 assertx(flavor
== Flavor::CU
);
5979 auto const& popped
= topT(env
);
5980 if (popped
.subtypeOf(BUninit
)) {
5981 interpStep(env
, bc::PopU
{});
5983 assertx(popped
.subtypeOf(BInitCell
));
5984 interpStep(env
, bc::PopC
{});
5990 push(env
, from_cell(cells
[i
]));
5991 record(env
, gen_constant(cells
[i
]));
5996 if (const_prop()) return;
5998 assertx(!env
.flags
.effectFree
|| !env
.flags
.wasPEI
);
5999 if (env
.flags
.wasPEI
) {
6000 ITRACE(2, " PEI.\n");
6001 if (env
.stateBefore
) {
6002 env
.propagate(env
.blk
.throwExit
, &*env
.stateBefore
);
6005 env
.stateBefore
.reset();
6010 void interpOne(ISS
& env
, const Bytecode
& bc
) {
6011 env
.srcLoc
= bc
.srcLoc
;
6012 interpStep(env
, bc
);
6015 BlockId
speculate(Interp
& interp
) {
6016 auto low_water
= interp
.state
.stack
.size();
6018 interp
.collect
.opts
= interp
.collect
.opts
| CollectionOpts::Speculating
;
6020 interp
.collect
.opts
= interp
.collect
.opts
- CollectionOpts::Speculating
;
6023 auto failed
= false;
6024 ISS env
{ interp
, [&] (BlockId
, const State
*) { failed
= true; } };
6026 FTRACE(4, " Speculate B{}\n", interp
.bid
);
6027 for (auto const& bc
: interp
.blk
->hhbcs
) {
6028 assertx(!interp
.state
.unreachable
);
6029 auto const numPop
= bc
.numPop() + (bc
.op
== Op::Dup
? -1 : 0);
6030 if (interp
.state
.stack
.size() - numPop
< low_water
) {
6031 low_water
= interp
.state
.stack
.size() - numPop
;
6036 env
.collect
.mInstrState
.clear();
6037 FTRACE(3, " Bailing from speculate because propagate was called\n");
6041 auto const& flags
= env
.flags
;
6042 if (!flags
.effectFree
) {
6043 env
.collect
.mInstrState
.clear();
6044 FTRACE(3, " Bailing from speculate because not effect free\n");
6048 assertx(!flags
.returned
);
6050 if (flags
.jmpDest
!= NoBlockId
&& interp
.state
.stack
.size() == low_water
) {
6051 FTRACE(2, " Speculate found target block {}\n", flags
.jmpDest
);
6052 return flags
.jmpDest
;
6056 if (interp
.state
.stack
.size() != low_water
) {
6058 " Bailing from speculate because the speculated block "
6059 "left items on the stack\n");
6063 if (interp
.blk
->fallthrough
== NoBlockId
) {
6065 " Bailing from speculate because there was no fallthrough");
6069 FTRACE(2, " Speculate found fallthrough block {}\n",
6070 interp
.blk
->fallthrough
);
6072 return interp
.blk
->fallthrough
;
6075 BlockId
speculateHelper(ISS
& env
, BlockId orig
, bool updateTaken
) {
6076 assertx(orig
!= NoBlockId
);
6078 if (!will_reduce(env
)) return orig
;
6080 auto const last
= last_op(env
);
6081 bool endsInControlFlow
= last
&& instrIsNonCallControlFlow(last
->op
);
6085 State temp
{env
.state
, State::Compact
{}};
6087 auto const& func
= env
.ctx
.func
;
6088 auto const targetBlk
= func
.blocks()[target
].get();
6089 if (!targetBlk
->multiPred
) break;
6090 auto const ok
= [&] {
6091 switch (targetBlk
->hhbcs
.back().op
) {
6105 env
.index
, env
.ctx
, env
.collect
, target
, targetBlk
, temp
6108 auto const old_size
= temp
.stack
.size();
6109 auto const new_target
= speculate(interp
);
6110 if (new_target
== NoBlockId
) break;
6112 const ssize_t delta
= old_size
- temp
.stack
.size();
6113 assertx(delta
>= 0);
6114 if (delta
&& endsInControlFlow
) break;
6117 target
= new_target
;
6118 temp
.stack
.compact();
6121 if (endsInControlFlow
&& updateTaken
) {
6123 auto needsUpdate
= target
!= orig
;
6128 if (bid
!= orig
) needsUpdate
= true;
6133 auto& bc
= mutate_last_op(env
);
6134 assertx(bc
.op
!= Op::Enter
);
6137 [&] (BlockId
& bid
) {
6138 bid
= bid
== orig
? target
: NoBlockId
;
6145 auto const& popped
= topT(env
);
6146 if (popped
.subtypeOf(BInitCell
)) {
6147 interpStep(env
, bc::PopC
{});
6149 assertx(popped
.subtypeOf(BUninit
));
6150 interpStep(env
, bc::PopU
{});
6159 //////////////////////////////////////////////////////////////////////
6161 RunFlags
run(Interp
& interp
, const State
& in
,
6162 const PropagateFn
& propagate
,
6163 const RollbackFn
& rollback
) {
6165 FTRACE(2, "\nout {}{}\n",
6166 state_string(*interp
.ctx
.func
, interp
.state
, interp
.collect
),
6167 property_state_string(interp
.collect
.props
));
6170 auto env
= ISS
{ interp
, propagate
};
6171 auto ret
= RunFlags
{};
6172 auto finish
= [&] (BlockId fallthrough
) {
6173 ret
.updateInfo
.fallthrough
= fallthrough
;
6174 ret
.updateInfo
.unchangedBcs
= env
.unchangedBcs
;
6175 ret
.updateInfo
.replacedBcs
= std::move(env
.replacedBcs
);
6179 BytecodeVec retryBcs
;
6180 auto retryOffset
= interp
.blk
->hhbcs
.size();
6181 auto size
= retryOffset
;
6182 BlockId retryFallthrough
= interp
.blk
->fallthrough
;
6187 finish_tracked_elems(env
, 0);
6188 if (!env
.reprocess
) break;
6189 FTRACE(2, " Reprocess mutated block {}\n", interp
.bid
);
6190 assertx(env
.unchangedBcs
< retryOffset
|| env
.replacedBcs
.size());
6192 retryOffset
= env
.unchangedBcs
;
6193 retryBcs
= std::move(env
.replacedBcs
);
6194 env
.unchangedBcs
= 0;
6195 env
.state
.copy_from(in
);
6196 env
.reprocess
= false;
6197 env
.replacedBcs
.clear();
6198 env
.stateBefore
.reset();
6199 size
= retryOffset
+ retryBcs
.size();
6201 ret
.usedParams
.reset();
6206 auto const& bc
= idx
< retryOffset
?
6207 interp
.blk
->hhbcs
[idx
] : retryBcs
[idx
- retryOffset
];
6211 auto const& flags
= env
.flags
;
6213 ret
.usedParams
|= flags
.usedParams
;
6215 if (flags
.wasPEI
) ret
.noThrow
= false;
6217 if (interp
.collect
.effectFree
&& !flags
.effectFree
) {
6218 interp
.collect
.effectFree
= false;
6219 if (any(interp
.collect
.opts
& CollectionOpts::EffectFreeOnly
)) {
6220 env
.collect
.mInstrState
.clear();
6221 FTRACE(2, " Bailing because not effect free\n");
6222 return finish(NoBlockId
);
6226 if (flags
.returned
) {
6227 always_assert(idx
== size
);
6228 if (env
.reprocess
) continue;
6230 always_assert(interp
.blk
->fallthrough
== NoBlockId
);
6231 assertx(!ret
.returned
);
6232 FTRACE(2, " returned {}\n", show(*flags
.returned
));
6233 ret
.retParam
= flags
.retParam
;
6234 ret
.returned
= flags
.returned
;
6235 return finish(NoBlockId
);
6238 if (flags
.jmpDest
!= NoBlockId
) {
6239 always_assert(idx
== size
);
6240 auto const hasFallthrough
= [&] {
6241 if (flags
.jmpDest
!= interp
.blk
->fallthrough
) {
6242 FTRACE(2, " <took branch; no fallthrough>\n");
6243 auto const last
= last_op(env
);
6244 return !last
|| !instrIsNonCallControlFlow(last
->op
);
6246 FTRACE(2, " <branch never taken>\n");
6250 if (hasFallthrough
) retryFallthrough
= flags
.jmpDest
;
6251 if (env
.reprocess
) continue;
6252 finish_tracked_elems(env
, 0);
6253 auto const newDest
= speculateHelper(env
, flags
.jmpDest
, true);
6254 propagate(newDest
, &interp
.state
);
6255 return finish(hasFallthrough
? newDest
: NoBlockId
);
6258 if (interp
.state
.unreachable
) {
6259 if (env
.reprocess
) {
6263 FTRACE(2, " <bytecode fallthrough is unreachable>\n");
6264 finish_tracked_elems(env
, 0);
6265 return finish(NoBlockId
);
6269 FTRACE(2, " <end block>\n");
6270 if (retryFallthrough
!= NoBlockId
) {
6271 retryFallthrough
= speculateHelper(env
, retryFallthrough
, false);
6272 propagate(retryFallthrough
, &interp
.state
);
6274 return finish(retryFallthrough
);
6277 StepFlags
step(Interp
& interp
, const Bytecode
& op
) {
6278 ISS env
{ interp
, [] (BlockId
, const State
*) {} };
6280 default_dispatch(env
, op
);
6281 if (env
.state
.unreachable
) {
6282 env
.collect
.mInstrState
.clear();
6284 assertx(env
.trackedElems
.empty());
6288 void default_dispatch(ISS
& env
, const Bytecode
& op
) {
6289 if (!env
.trackedElems
.empty()) {
6290 auto const pops
= [&] () -> uint32_t {
6293 case Op::AddNewElemC
:
6294 return numPop(op
) - 1;
6303 finish_tracked_elems(env
, env
.state
.stack
.size() - pops
);
6306 if (instrFlags(op
.op
) & TF
&& env
.flags
.jmpDest
== NoBlockId
) {
6308 } else if (env
.state
.unreachable
) {
6309 env
.collect
.mInstrState
.clear();
6313 //////////////////////////////////////////////////////////////////////