2 +----------------------------------------------------------------------+
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/hhbbc/interp.h"
23 #include <folly/Optional.h>
25 #include "hphp/util/trace.h"
26 #include "hphp/runtime/base/array-init.h"
27 #include "hphp/runtime/base/collections.h"
28 #include "hphp/runtime/base/static-string-table.h"
29 #include "hphp/runtime/base/tv-arith.h"
30 #include "hphp/runtime/base/tv-comparisons.h"
31 #include "hphp/runtime/base/tv-conversions.h"
32 #include "hphp/runtime/vm/runtime.h"
33 #include "hphp/runtime/vm/unit-util.h"
35 #include "hphp/runtime/ext/hh/ext_hh.h"
37 #include "hphp/hhbbc/analyze.h"
38 #include "hphp/hhbbc/bc.h"
39 #include "hphp/hhbbc/cfg.h"
40 #include "hphp/hhbbc/class-util.h"
41 #include "hphp/hhbbc/eval-cell.h"
42 #include "hphp/hhbbc/index.h"
43 #include "hphp/hhbbc/interp-state.h"
44 #include "hphp/hhbbc/optimize.h"
45 #include "hphp/hhbbc/representation.h"
46 #include "hphp/hhbbc/type-builtins.h"
47 #include "hphp/hhbbc/type-ops.h"
48 #include "hphp/hhbbc/type-system.h"
49 #include "hphp/hhbbc/unit-util.h"
51 #include "hphp/hhbbc/interp-internal.h"
53 namespace HPHP
{ namespace HHBBC
{
55 //////////////////////////////////////////////////////////////////////
59 const StaticString
s_Throwable("__SystemLib\\Throwable");
60 const StaticString
s_empty("");
61 const StaticString
s_construct("__construct");
62 const StaticString
s_86ctor("86ctor");
63 const StaticString
s_PHP_Incomplete_Class("__PHP_Incomplete_Class");
64 const StaticString
s_IMemoizeParam("HH\\IMemoizeParam");
65 const StaticString
s_getInstanceKey("getInstanceKey");
66 const StaticString
s_Closure("Closure");
67 const StaticString
s_byRefWarn("Only variables should be passed by reference");
68 const StaticString
s_byRefError("Only variables can be passed by reference");
69 const StaticString
s_trigger_error("trigger_error");
72 //////////////////////////////////////////////////////////////////////
74 void impl_vec(ISS
& env
, bool reduce
, std::vector
<Bytecode
>&& bcs
) {
75 std::vector
<Bytecode
> currentReduction
;
76 if (!options
.StrengthReduce
) reduce
= false;
78 for (auto it
= begin(bcs
); it
!= end(bcs
); ++it
) {
79 assert(env
.flags
.jmpFlag
== StepFlags::JmpFlags::Either
&&
80 "you can't use impl with branching opcodes before last position");
82 auto const wasPEI
= env
.flags
.wasPEI
;
83 auto const canConstProp
= env
.flags
.canConstProp
;
84 auto const effectFree
= env
.flags
.effectFree
;
86 FTRACE(3, " (impl {}\n", show(env
.ctx
.func
, *it
));
87 env
.flags
.wasPEI
= true;
88 env
.flags
.canConstProp
= false;
89 env
.flags
.effectFree
= false;
90 env
.flags
.strengthReduced
= folly::none
;
91 default_dispatch(env
, *it
);
93 if (env
.flags
.strengthReduced
) {
94 if (instrFlags(env
.flags
.strengthReduced
->back().op
) & TF
) {
98 std::move(begin(*env
.flags
.strengthReduced
),
99 end(*env
.flags
.strengthReduced
),
100 std::back_inserter(currentReduction
));
103 if (instrFlags(it
->op
) & TF
) {
107 if (env
.flags
.canConstProp
&&
108 env
.collect
.propagate_constants
&&
109 env
.collect
.propagate_constants(*it
, env
.state
, currentReduction
)) {
110 env
.flags
.canConstProp
= false;
111 env
.flags
.wasPEI
= false;
112 env
.flags
.effectFree
= true;
114 currentReduction
.push_back(std::move(*it
));
119 // If any of the opcodes in the impl list said they could throw,
120 // then the whole thing could throw.
121 env
.flags
.wasPEI
= env
.flags
.wasPEI
|| wasPEI
;
122 env
.flags
.canConstProp
= env
.flags
.canConstProp
&& canConstProp
;
123 env
.flags
.effectFree
= env
.flags
.effectFree
&& effectFree
;
124 if (env
.state
.unreachable
) break;
128 env
.flags
.strengthReduced
= std::move(currentReduction
);
130 env
.flags
.strengthReduced
= folly::none
;
134 namespace interp_step
{
136 void in(ISS
& env
, const bc::Nop
&) { nothrow(env
); }
137 void in(ISS
& env
, const bc::DiscardClsRef
& op
) {
139 takeClsRefSlot(env
, op
.slot
);
141 void in(ISS
& env
, const bc::PopC
&) { nothrow(env
); popC(env
); }
142 void in(ISS
& env
, const bc::PopV
&) { nothrow(env
); popV(env
); }
143 void in(ISS
& env
, const bc::PopU
&) { nothrow(env
); popU(env
); }
144 void in(ISS
& env
, const bc::PopR
&) {
145 auto t
= topT(env
, 0);
146 if (t
.subtypeOf(TCell
)) {
147 return reduce(env
, bc::UnboxRNop
{}, bc::PopC
{});
153 void in(ISS
& env
, const bc::EntryNop
&) { nothrow(env
); }
155 void in(ISS
& env
, const bc::Dup
& /*op*/) {
157 auto const topEquiv
= topStkEquiv(env
);
158 auto val
= popC(env
);
159 push(env
, val
, topEquiv
);
160 push(env
, std::move(val
), topEquiv
);
163 void in(ISS
& env
, const bc::AssertRATL
& op
) {
164 mayReadLocal(env
, op
.loc1
);
168 void in(ISS
& env
, const bc::AssertRATStk
&) {
172 void in(ISS
& env
, const bc::BreakTraceHint
&) { nothrow(env
); }
174 void in(ISS
& env
, const bc::Box
&) {
180 void in(ISS
& env
, const bc::BoxR
&) {
182 if (topR(env
).subtypeOf(TRef
)) {
183 return reduce(env
, bc::BoxRNop
{});
189 void in(ISS
& env
, const bc::Unbox
&) {
192 push(env
, TInitCell
);
195 void in(ISS
& env
, const bc::UnboxR
&) {
196 auto const t
= topR(env
);
197 if (t
.subtypeOf(TInitCell
)) return reduce(env
, bc::UnboxRNop
{});
200 push(env
, TInitCell
);
203 void in(ISS
& env
, const bc::RGetCNop
&) { nothrow(env
); }
205 void in(ISS
& env
, const bc::CGetCUNop
&) {
207 auto const t
= popCU(env
);
208 push(env
, remove_uninit(t
));
211 void in(ISS
& env
, const bc::UGetCUNop
&) {
217 void in(ISS
& env
, const bc::UnboxRNop
&) {
221 if (!t
.subtypeOf(TInitCell
)) t
= TInitCell
;
222 push(env
, std::move(t
));
225 void in(ISS
& env
, const bc::BoxRNop
&) {
228 if (!t
.subtypeOf(TRef
)) t
= TRef
;
229 push(env
, std::move(t
));
232 void in(ISS
& env
, const bc::Null
&) {
234 push(env
, TInitNull
);
237 void in(ISS
& env
, const bc::NullUninit
&) {
242 void in(ISS
& env
, const bc::True
&) {
247 void in(ISS
& env
, const bc::False
&) {
252 void in(ISS
& env
, const bc::Int
& op
) {
254 push(env
, ival(op
.arg1
));
257 void in(ISS
& env
, const bc::Double
& op
) {
259 push(env
, dval(op
.dbl1
));
262 void in(ISS
& env
, const bc::String
& op
) {
264 push(env
, sval(op
.str1
));
267 void in(ISS
& env
, const bc::Array
& op
) {
268 assert(op
.arr1
->isPHPArray());
270 push(env
, aval(op
.arr1
));
273 void in(ISS
& env
, const bc::Vec
& op
) {
274 assert(op
.arr1
->isVecArray());
276 push(env
, vec_val(op
.arr1
));
279 void in(ISS
& env
, const bc::Dict
& op
) {
280 assert(op
.arr1
->isDict());
282 push(env
, dict_val(op
.arr1
));
285 void in(ISS
& env
, const bc::Keyset
& op
) {
286 assert(op
.arr1
->isKeyset());
288 push(env
, keyset_val(op
.arr1
));
291 void in(ISS
& env
, const bc::NewArray
& op
) {
292 push(env
, op
.arg1
== 0 ?
293 effect_free(env
), aempty() : counted_aempty());
296 void in(ISS
& env
, const bc::NewDictArray
& op
) {
297 push(env
, op
.arg1
== 0 ?
298 effect_free(env
), dict_empty() : counted_dict_empty());
301 void in(ISS
& env
, const bc::NewMixedArray
& op
) {
302 push(env
, op
.arg1
== 0 ?
303 effect_free(env
), aempty() : counted_aempty());
306 void in(ISS
& env
, const bc::NewPackedArray
& op
) {
307 auto elems
= std::vector
<Type
>{};
308 elems
.reserve(op
.arg1
);
309 for (auto i
= uint32_t{0}; i
< op
.arg1
; ++i
) {
310 elems
.push_back(std::move(topC(env
, op
.arg1
- i
- 1)));
312 discard(env
, op
.arg1
);
313 push(env
, arr_packed(std::move(elems
)));
317 void in(ISS
& env
, const bc::NewStructArray
& op
) {
318 auto map
= MapElems
{};
319 for (auto it
= op
.keys
.end(); it
!= op
.keys
.begin(); ) {
320 map
.emplace_front(make_tv
<KindOfPersistentString
>(*--it
), popC(env
));
322 push(env
, arr_map(std::move(map
)));
326 void in(ISS
& env
, const bc::NewVecArray
& op
) {
327 auto elems
= std::vector
<Type
>{};
328 elems
.reserve(op
.arg1
);
329 for (auto i
= uint32_t{0}; i
< op
.arg1
; ++i
) {
330 elems
.push_back(std::move(topC(env
, op
.arg1
- i
- 1)));
332 discard(env
, op
.arg1
);
334 push(env
, vec(std::move(elems
)));
337 void in(ISS
& env
, const bc::NewKeysetArray
& op
) {
339 auto map
= MapElems
{};
343 for (auto i
= uint32_t{0}; i
< op
.arg1
; ++i
) {
344 auto k
= disect_strict_key(popC(env
));
345 if (k
.type
== TBottom
) {
350 if (auto const v
= k
.tv()) {
351 map
.emplace_front(*v
, k
.type
);
356 ty
|= std::move(k
.type
);
359 push(env
, keyset_map(std::move(map
)));
362 push(env
, keyset_n(ty
));
369 void in(ISS
& env
, const bc::NewLikeArrayL
& op
) {
370 locAsCell(env
, op
.loc1
);
371 push(env
, counted_aempty());
374 void in(ISS
& env
, const bc::AddElemC
& /*op*/) {
375 auto const v
= popC(env
);
376 auto const k
= popC(env
);
378 auto const outTy
= [&] (Type ty
) ->
379 folly::Optional
<std::pair
<Type
,ThrowMode
>> {
380 if (ty
.subtypeOf(TArr
)) {
381 return array_set(std::move(ty
), k
, v
);
383 if (ty
.subtypeOf(TDict
)) {
384 return dict_set(std::move(ty
), k
, v
);
390 return push(env
, union_of(TArr
, TDict
));
393 if (outTy
->first
.subtypeOf(TBottom
)) {
395 } else if (outTy
->second
== ThrowMode::None
) {
397 if (env
.collect
.trackConstantArrays
) constprop(env
);
399 push(env
, std::move(outTy
->first
));
402 void in(ISS
& env
, const bc::AddElemV
& /*op*/) {
403 popV(env
); popC(env
);
404 auto const ty
= popC(env
);
406 ty
.subtypeOf(TArr
) ? TArr
407 : ty
.subtypeOf(TDict
) ? TDict
408 : union_of(TArr
, TDict
);
412 void in(ISS
& env
, const bc::AddNewElemC
&) {
415 auto const outTy
= [&] (Type ty
) -> folly::Optional
<Type
> {
416 if (ty
.subtypeOf(TArr
)) {
417 return array_newelem(std::move(ty
), std::move(v
)).first
;
423 return push(env
, TInitCell
);
426 if (outTy
->subtypeOf(TBottom
)) {
429 if (env
.collect
.trackConstantArrays
) constprop(env
);
431 push(env
, std::move(*outTy
));
434 void in(ISS
& env
, const bc::AddNewElemV
&) {
440 void in(ISS
& env
, const bc::NewCol
& op
) {
441 auto const type
= static_cast<CollectionType
>(op
.subop1
);
442 auto const name
= collections::typeToString(type
);
443 push(env
, objExact(env
.index
.builtin_class(name
)));
446 void in(ISS
& env
, const bc::NewPair
& /*op*/) {
447 popC(env
); popC(env
);
448 auto const name
= collections::typeToString(CollectionType::Pair
);
449 push(env
, objExact(env
.index
.builtin_class(name
)));
452 void in(ISS
& env
, const bc::ColFromArray
& op
) {
454 auto const type
= static_cast<CollectionType
>(op
.subop1
);
455 auto const name
= collections::typeToString(type
);
456 push(env
, objExact(env
.index
.builtin_class(name
)));
459 void doCns(ISS
& env
, SString str
, SString fallback
) {
460 if (!options
.HardConstProp
) return push(env
, TInitCell
);
462 auto t
= env
.index
.lookup_constant(env
.ctx
, str
, fallback
);
464 // There's no entry for this constant in the index. It must be
465 // the first iteration, so we'll add a dummy entry to make sure
466 // there /is/ something next time around.
468 val
.m_type
= kReadOnlyConstant
;
469 env
.collect
.cnsMap
.emplace(str
, val
);
471 // make sure we're re-analyzed
472 env
.collect
.readsUntrackedConstants
= true;
473 } else if (t
->strictSubtypeOf(TInitCell
)) {
474 // constprop will take care of nothrow *if* its a constant; and if
475 // its not, we might trigger autoload.
478 push(env
, std::move(*t
));
481 void in(ISS
& env
, const bc::Cns
& op
) { doCns(env
, op
.str1
, nullptr); }
482 void in(ISS
& env
, const bc::CnsE
& op
) { doCns(env
, op
.str1
, nullptr); }
483 void in(ISS
& env
, const bc::CnsU
& op
) { doCns(env
, op
.str1
, op
.str2
); }
485 void in(ISS
& env
, const bc::ClsCns
& op
) {
486 auto const& t1
= peekClsRefSlot(env
, op
.slot
);
487 if (is_specialized_cls(t1
)) {
488 auto const dcls
= dcls_of(t1
);
489 if (dcls
.type
== DCls::Exact
) {
490 return reduce(env
, bc::DiscardClsRef
{ op
.slot
},
491 bc::ClsCnsD
{ op
.str1
, dcls
.cls
.name() });
494 takeClsRefSlot(env
, op
.slot
);
495 push(env
, TInitCell
);
498 void in(ISS
& env
, const bc::ClsCnsD
& op
) {
499 if (auto const rcls
= env
.index
.resolve_class(env
.ctx
, op
.str2
)) {
500 auto t
= env
.index
.lookup_class_constant(env
.ctx
, *rcls
, op
.str1
);
501 if (options
.HardConstProp
) constprop(env
);
502 push(env
, std::move(t
));
505 push(env
, TInitCell
);
508 void in(ISS
& env
, const bc::File
&) { nothrow(env
); push(env
, TSStr
); }
509 void in(ISS
& env
, const bc::Dir
&) { nothrow(env
); push(env
, TSStr
); }
510 void in(ISS
& env
, const bc::Method
&) { nothrow(env
); push(env
, TSStr
); }
512 void in(ISS
& env
, const bc::ClsRefName
& op
) {
514 takeClsRefSlot(env
, op
.slot
);
518 void concatHelper(ISS
& env
, uint32_t n
) {
520 StringData
* result
= nullptr;
522 auto const t
= topC(env
, i
);
523 auto const v
= tv(t
);
525 if (!isStringType(v
->m_type
) &&
526 v
->m_type
!= KindOfNull
&&
527 v
->m_type
!= KindOfBoolean
&&
528 v
->m_type
!= KindOfInt64
&&
529 v
->m_type
!= KindOfDouble
) {
532 auto const cell
= eval_cell_value([&] {
533 auto const s
= makeStaticString(
535 StringData::Make(tvAsCVarRef(&*v
).toString().get(), result
) :
536 tvAsCVarRef(&*v
).toString().get());
537 return make_tv
<KindOfString
>(s
);
540 result
= cell
->m_data
.pstr
;
543 if (result
&& i
>= 2) {
544 std::vector
<Bytecode
> bcs(i
, bc::PopC
{});
545 bcs
.push_back(gen_constant(make_tv
<KindOfString
>(result
)));
547 bcs
.push_back(bc::ConcatN
{ n
- i
+ 1 });
549 return reduce(env
, std::move(bcs
));
555 void in(ISS
& env
, const bc::Concat
& /*op*/) {
556 concatHelper(env
, 2);
559 void in(ISS
& env
, const bc::ConcatN
& op
) {
560 if (op
.arg1
== 2) return reduce(env
, bc::Concat
{});
561 concatHelper(env
, op
.arg1
);
564 template <class Op
, class Fun
>
565 void arithImpl(ISS
& env
, const Op
& /*op*/, Fun fun
) {
567 auto const t1
= popC(env
);
568 auto const t2
= popC(env
);
569 push(env
, fun(t2
, t1
));
572 void in(ISS
& env
, const bc::Add
& op
) { arithImpl(env
, op
, typeAdd
); }
573 void in(ISS
& env
, const bc::Sub
& op
) { arithImpl(env
, op
, typeSub
); }
574 void in(ISS
& env
, const bc::Mul
& op
) { arithImpl(env
, op
, typeMul
); }
575 void in(ISS
& env
, const bc::Div
& op
) { arithImpl(env
, op
, typeDiv
); }
576 void in(ISS
& env
, const bc::Mod
& op
) { arithImpl(env
, op
, typeMod
); }
577 void in(ISS
& env
, const bc::Pow
& op
) { arithImpl(env
, op
, typePow
); }
578 void in(ISS
& env
, const bc::BitAnd
& op
) { arithImpl(env
, op
, typeBitAnd
); }
579 void in(ISS
& env
, const bc::BitOr
& op
) { arithImpl(env
, op
, typeBitOr
); }
580 void in(ISS
& env
, const bc::BitXor
& op
) { arithImpl(env
, op
, typeBitXor
); }
581 void in(ISS
& env
, const bc::AddO
& op
) { arithImpl(env
, op
, typeAddO
); }
582 void in(ISS
& env
, const bc::SubO
& op
) { arithImpl(env
, op
, typeSubO
); }
583 void in(ISS
& env
, const bc::MulO
& op
) { arithImpl(env
, op
, typeMulO
); }
584 void in(ISS
& env
, const bc::Shl
& op
) { arithImpl(env
, op
, typeShl
); }
585 void in(ISS
& env
, const bc::Shr
& op
) { arithImpl(env
, op
, typeShr
); }
587 void in(ISS
& env
, const bc::BitNot
& /*op*/) {
588 auto const t
= popC(env
);
589 auto const v
= tv(t
);
592 auto cell
= eval_cell([&] {
597 if (cell
) return push(env
, std::move(*cell
));
599 push(env
, TInitCell
);
604 bool couldBeHackArr(Type t
) {
605 return t
.couldBe(TVec
) || t
.couldBe(TDict
) || t
.couldBe(TKeyset
);
610 template<bool Negate
>
611 void sameImpl(ISS
& env
) {
612 auto const t1
= popC(env
);
613 auto const t2
= popC(env
);
614 auto const v1
= tv(t1
);
615 auto const v2
= tv(t2
);
617 auto const mightWarn
= [&]{
618 // EvalHackArrCompatNotices will notice on === and !== between PHP arrays
620 if (!RuntimeOption::EvalHackArrCompatNotices
) return false;
621 if (t1
.couldBe(TArr
) && couldBeHackArr(t2
)) return true;
622 if (couldBeHackArr(t1
) && t2
.couldBe(TArr
)) return true;
631 if (auto r
= eval_cell_value([&]{ return cellSame(*v2
, *v1
); })) {
632 return push(env
, r
!= Negate
? TTrue
: TFalse
);
635 push(env
, Negate
? typeNSame(t1
, t2
) : typeSame(t1
, t2
));
638 void in(ISS
& env
, const bc::Same
&) { sameImpl
<false>(env
); }
639 void in(ISS
& env
, const bc::NSame
&) { sameImpl
<true>(env
); }
642 void binOpBoolImpl(ISS
& env
, Fun fun
) {
643 auto const t1
= popC(env
);
644 auto const t2
= popC(env
);
645 auto const v1
= tv(t1
);
646 auto const v2
= tv(t2
);
648 if (auto r
= eval_cell_value([&]{ return fun(*v2
, *v1
); })) {
650 return push(env
, *r
? TTrue
: TFalse
);
653 // TODO_4: evaluate when these can throw, non-constant type stuff.
658 void binOpInt64Impl(ISS
& env
, Fun fun
) {
659 auto const t1
= popC(env
);
660 auto const t2
= popC(env
);
661 auto const v1
= tv(t1
);
662 auto const v2
= tv(t2
);
664 if (auto r
= eval_cell_value([&]{ return ival(fun(*v2
, *v1
)); })) {
666 return push(env
, std::move(*r
));
669 // TODO_4: evaluate when these can throw, non-constant type stuff.
673 void in(ISS
& env
, const bc::Eq
&) {
674 binOpBoolImpl(env
, [&] (Cell c1
, Cell c2
) { return cellEqual(c1
, c2
); });
676 void in(ISS
& env
, const bc::Neq
&) {
677 binOpBoolImpl(env
, [&] (Cell c1
, Cell c2
) { return !cellEqual(c1
, c2
); });
679 void in(ISS
& env
, const bc::Lt
&) {
680 binOpBoolImpl(env
, [&] (Cell c1
, Cell c2
) { return cellLess(c1
, c2
); });
682 void in(ISS
& env
, const bc::Gt
&) {
683 binOpBoolImpl(env
, [&] (Cell c1
, Cell c2
) { return cellGreater(c1
, c2
); });
685 void in(ISS
& env
, const bc::Lte
&) { binOpBoolImpl(env
, cellLessOrEqual
); }
686 void in(ISS
& env
, const bc::Gte
&) { binOpBoolImpl(env
, cellGreaterOrEqual
); }
688 void in(ISS
& env
, const bc::Cmp
&) {
689 binOpInt64Impl(env
, [&] (Cell c1
, Cell c2
) { return cellCompare(c1
, c2
); });
692 void in(ISS
& env
, const bc::Xor
&) {
693 binOpBoolImpl(env
, [&] (Cell c1
, Cell c2
) {
694 return cellToBool(c1
) ^ cellToBool(c2
);
698 void castBoolImpl(ISS
& env
, const Type
& t
, bool negate
) {
702 auto const e
= emptiness(t
);
704 case Emptiness::Empty
:
705 case Emptiness::NonEmpty
:
706 return push(env
, (e
== Emptiness::Empty
) == negate
? TTrue
: TFalse
);
707 case Emptiness::Maybe
:
714 void in(ISS
& env
, const bc::Not
&) {
715 castBoolImpl(env
, popC(env
), true);
718 void in(ISS
& env
, const bc::CastBool
&) {
719 auto const t
= topC(env
);
720 if (t
.subtypeOf(TBool
)) return reduce(env
, bc::Nop
{});
721 castBoolImpl(env
, popC(env
), false);
724 void in(ISS
& env
, const bc::CastInt
&) {
726 auto const t
= topC(env
);
727 if (t
.subtypeOf(TInt
)) return reduce(env
, bc::Nop
{});
729 // Objects can raise a warning about converting to int.
730 if (!t
.couldBe(TObj
)) nothrow(env
);
731 if (auto const v
= tv(t
)) {
732 auto cell
= eval_cell([&] {
733 return make_tv
<KindOfInt64
>(cellToInt(*v
));
735 if (cell
) return push(env
, std::move(*cell
));
740 void castImpl(ISS
& env
, Type target
, void(*fn
)(TypedValue
*)) {
741 auto const t
= topC(env
);
742 if (t
.subtypeOf(target
)) return reduce(env
, bc::Nop
{});
745 if (auto val
= tv(t
)) {
746 if (auto result
= eval_cell([&] { fn(&*val
); return *val
; })) {
752 push(env
, std::move(target
));
755 void in(ISS
& env
, const bc::CastDouble
&) {
756 castImpl(env
, TDbl
, tvCastToDoubleInPlace
);
759 void in(ISS
& env
, const bc::CastString
&) {
760 castImpl(env
, TStr
, tvCastToStringInPlace
);
763 void in(ISS
& env
, const bc::CastArray
&) {
764 castImpl(env
, TArr
, tvCastToArrayInPlace
);
767 void in(ISS
& env
, const bc::CastObject
&) { castImpl(env
, TObj
, nullptr); }
769 void in(ISS
& env
, const bc::CastDict
&) {
770 castImpl(env
, TDict
, tvCastToDictInPlace
);
773 void in(ISS
& env
, const bc::CastVec
&) {
774 castImpl(env
, TVec
, tvCastToVecInPlace
);
777 void in(ISS
& env
, const bc::CastKeyset
&) {
778 castImpl(env
, TKeyset
, tvCastToKeysetInPlace
);
781 void in(ISS
& env
, const bc::CastVArray
&) {
782 auto const t
= popC(env
);
783 if (auto val
= tv(t
)) {
784 auto result
= eval_cell(
786 tvCastToVArrayInPlace(&*val
);
792 return push(env
, std::move(*result
));
798 void in(ISS
& env
, const bc::CastDArray
&) {
799 castImpl(env
, TArr
, tvCastToDArrayInPlace
);
802 void in(ISS
& env
, const bc::Print
& /*op*/) {
807 void in(ISS
& env
, const bc::Clone
& /*op*/) {
808 auto val
= popC(env
);
809 if (!val
.subtypeOf(TObj
)) {
810 val
= is_opt(val
) ? unopt(std::move(val
)) : TObj
;
812 push(env
, std::move(val
));
815 void in(ISS
& env
, const bc::Exit
&) { popC(env
); push(env
, TInitNull
); }
816 void in(ISS
& env
, const bc::Fatal
&) { popC(env
); }
818 void in(ISS
& /*env*/, const bc::JmpNS
&) {
819 always_assert(0 && "blocks should not contain JmpNS instructions");
822 void in(ISS
& /*env*/, const bc::Jmp
&) {
823 always_assert(0 && "blocks should not contain Jmp instructions");
826 template<bool Negate
, class JmpOp
>
827 void jmpImpl(ISS
& env
, const JmpOp
& op
) {
829 auto const locId
= topStkEquiv(env
);
830 auto const e
= emptiness(popC(env
));
831 if (e
== (Negate
? Emptiness::NonEmpty
: Emptiness::Empty
)) {
832 jmp_nofallthrough(env
);
833 env
.propagate(op
.target
, env
.state
);
837 if (e
== (Negate
? Emptiness::Empty
: Emptiness::NonEmpty
)) {
842 if (next_real_block(*env
.ctx
.func
, env
.blk
.fallthrough
) ==
843 next_real_block(*env
.ctx
.func
, op
.target
)) {
848 if (locId
== NoLocalId
) return env
.propagate(op
.target
, env
.state
);
850 auto loc
= peekLocRaw(env
, locId
);
851 assertx(!loc
.couldBe(TRef
)); // we shouldn't have an equivLoc if it was
853 auto const converted_true
= assert_nonemptiness(loc
);
854 auto const converted_false
= assert_emptiness(std::move(loc
));
856 refineLoc(env
, locId
, Negate
? converted_true
: converted_false
);
857 env
.propagate(op
.target
, env
.state
);
858 refineLoc(env
, locId
, Negate
? converted_false
: converted_true
);
861 void in(ISS
& env
, const bc::JmpNZ
& op
) { jmpImpl
<true>(env
, op
); }
862 void in(ISS
& env
, const bc::JmpZ
& op
) { jmpImpl
<false>(env
, op
); }
864 template<class JmpOp
>
865 void group(ISS
& env
, const bc::IsTypeL
& istype
, const JmpOp
& jmp
) {
866 if (istype
.subop2
== IsTypeOp::Scalar
) return impl(env
, istype
, jmp
);
868 auto const loc
= derefLoc(env
, istype
.loc1
);
869 auto const testTy
= type_of_istype(istype
.subop2
);
870 if (loc
.subtypeOf(testTy
) || !loc
.couldBe(testTy
)) {
871 return impl(env
, istype
, jmp
);
874 if (!locCouldBeUninit(env
, istype
.loc1
)) nothrow(env
);
876 auto const negate
= jmp
.op
== Op::JmpNZ
;
877 auto const was_true
= [&] {
879 if (testTy
.subtypeOf(TNull
)) return TInitNull
;
880 auto const unopted
= unopt(loc
);
881 if (unopted
.subtypeOf(testTy
)) return unopted
;
885 auto const was_false
= [&] {
887 auto const unopted
= unopt(loc
);
888 if (testTy
.subtypeOf(TNull
)) return unopted
;
889 if (unopted
.subtypeOf(testTy
)) return TInitNull
;
894 refineLoc(env
, istype
.loc1
, negate
? was_true
: was_false
);
895 env
.propagate(jmp
.target
, env
.state
);
896 refineLoc(env
, istype
.loc1
, negate
? was_false
: was_true
);
901 folly::Optional
<Cell
> staticLocHelper(ISS
& env
, LocalId l
, Type init
) {
902 if (is_volatile_local(env
.ctx
.func
, l
)) return folly::none
;
903 unbindLocalStatic(env
, l
);
904 setLocRaw(env
, l
, TRef
);
905 bindLocalStatic(env
, l
, std::move(init
));
906 if (!env
.ctx
.func
->isMemoizeWrapper
&&
907 !env
.ctx
.func
->isClosureBody
&&
908 env
.collect
.localStaticTypes
.size() > l
) {
909 auto t
= env
.collect
.localStaticTypes
[l
];
910 if (auto v
= tv(t
)) {
911 useLocalStatic(env
, l
);
912 setLocRaw(env
, l
, t
);
916 useLocalStatic(env
, l
);
920 // If the current function is a memoize wrapper, return the inferred return type
921 // of the function being wrapped.
922 Type
memoizeImplRetType(ISS
& env
) {
923 always_assert(env
.ctx
.func
->isMemoizeWrapper
);
925 // Lookup the wrapped function. This should always resolve to a precise
926 // function but we don't rely on it.
927 auto const memo_impl_func
= [&]{
928 if (env
.ctx
.func
->cls
) {
929 auto const clsTy
= selfClsExact(env
);
930 return env
.index
.resolve_method(
932 clsTy
? *clsTy
: TCls
,
933 memoize_impl_name(env
.ctx
.func
)
936 return env
.index
.resolve_func(env
.ctx
, memoize_impl_name(env
.ctx
.func
));
939 // Infer the return type of the wrapped function, taking into account the
940 // types of the parameters for context sensitive types.
941 auto const numArgs
= env
.ctx
.func
->params
.size();
942 std::vector
<Type
> args
{numArgs
};
943 for (auto i
= LocalId
{0}; i
< numArgs
; ++i
) {
944 args
[i
] = locAsCell(env
, i
);
947 auto retTy
= env
.index
.lookup_return_type(
948 CallContext
{ env
.ctx
, args
},
951 // Regardless of anything we know the return type will be an InitCell (this is
952 // a requirement of memoize functions).
953 if (!retTy
.subtypeOf(TInitCell
)) return TInitCell
;
958 * Propagate a more specific type to the taken/fall-through branches of a jmp
959 * operation when the jmp is done because of a type test. Given a type `valTy`,
960 * being tested against the type `testTy`, propagate `failTy` to the branch
961 * representing test failure, and `testTy` to the branch representing test
964 template<class JmpOp
>
965 void typeTestPropagate(ISS
& env
, Type valTy
, Type testTy
,
966 Type failTy
, const JmpOp
& jmp
) {
968 auto const takenOnSuccess
= jmp
.op
== Op::JmpNZ
;
970 if (valTy
.subtypeOf(testTy
) || failTy
.subtypeOf(TBottom
)) {
971 push(env
, std::move(valTy
));
972 if (takenOnSuccess
) {
973 jmp_nofallthrough(env
);
974 env
.propagate(jmp
.target
, env
.state
);
980 if (!valTy
.couldBe(testTy
)) {
982 if (takenOnSuccess
) {
985 jmp_nofallthrough(env
);
986 env
.propagate(jmp
.target
, env
.state
);
991 push(env
, std::move(takenOnSuccess
? testTy
: failTy
));
992 env
.propagate(jmp
.target
, env
.state
);
994 push(env
, std::move(takenOnSuccess
? failTy
: testTy
));
999 // After a StaticLocCheck, we know the local is bound on the true path,
1000 // and not changed on the false path.
1001 template<class JmpOp
>
1002 void group(ISS
& env
, const bc::StaticLocCheck
& slc
, const JmpOp
& jmp
) {
1003 auto const takenOnInit
= jmp
.op
== Op::JmpNZ
;
1004 auto save
= env
.state
;
1006 if (auto const v
= staticLocHelper(env
, slc
.loc1
, TBottom
)) {
1007 return impl(env
, slc
, jmp
);
1010 if (env
.collect
.localStaticTypes
.size() > slc
.loc1
&&
1011 env
.collect
.localStaticTypes
[slc
.loc1
].subtypeOf(TBottom
)) {
1013 env
.state
= std::move(save
);
1014 jmp_nevertaken(env
);
1016 env
.propagate(jmp
.target
, save
);
1017 jmp_nofallthrough(env
);
1023 env
.propagate(jmp
.target
, env
.state
);
1024 env
.state
= std::move(save
);
1026 env
.propagate(jmp
.target
, save
);
1030 // If we duplicate a value, and then test its type and Jmp based on that result,
1031 // we can narrow the type of the top of the stack. Only do this for null checks
1032 // right now (because its useful in memoize wrappers).
1033 template<class JmpOp
>
1034 void group(ISS
& env
, const bc::Dup
& dup
,
1035 const bc::IsTypeC
& istype
, const JmpOp
& jmp
) {
1036 if (istype
.subop1
!= IsTypeOp::Scalar
) {
1037 auto const testTy
= type_of_istype(istype
.subop1
);
1038 if (testTy
.subtypeOf(TNull
)) {
1039 auto const valTy
= popC(env
);
1041 env
, valTy
, TInitNull
, is_opt(valTy
) ? unopt(valTy
) : valTy
, jmp
1046 impl(env
, dup
, istype
, jmp
);
1049 // If we duplicate a value, do an instanceof check and Jmp based on
1050 // that result, we can narrow the type of the top of the stack.
1051 template<class JmpOp
>
1052 void group(ISS
& env
, const bc::Dup
& dup
,
1053 const bc::InstanceOfD
& inst
, const JmpOp
& jmp
) {
1054 auto bail
= [&] { impl(env
, dup
, inst
, jmp
); };
1056 if (interface_supports_non_objects(inst
.str1
)) return bail();
1057 auto const rcls
= env
.index
.resolve_class(env
.ctx
, inst
.str1
);
1058 if (!rcls
) return bail();
1060 auto const instTy
= subObj(*rcls
);
1061 auto const obj
= popC(env
);
1062 typeTestPropagate(env
, obj
, instTy
, obj
, jmp
);
1065 // If we do an IsUninit check and then Jmp based on the check, one branch will
1066 // be the original type minus the Uninit, and the other will be
1067 // Uninit. (IsUninit does not pop the value).
1068 template<class JmpOp
>
1069 void group(ISS
& env
, const bc::IsUninit
&, const JmpOp
& jmp
) {
1070 auto const valTy
= popCU(env
);
1071 typeTestPropagate(env
, valTy
, TUninit
, remove_uninit(valTy
), jmp
);
1074 // A MemoGet, followed by an IsUninit, followed by a Jmp, can have the type of
1075 // the stack inferred very well. The IsUninit success path will be Uninit and
1076 // the failure path will be the inferred return type of the wrapped
1077 // function. This has to be done as a group and not via individual interp()
1078 // calls is because of limitations in HHBBC's type-system. The type that MemoGet
1079 // pushes is the inferred return type of the wrapper function with Uninit added
1080 // in. Unfortunately HHBBC's type-system cannot exactly represent this
1081 // combination, so it gets forced to Cell. By analyzing this triplet as a group,
1082 // we can avoid this loss of type precision.
1083 template <class JmpOp
>
1084 void group(ISS
& env
, const bc::MemoGet
& get
, const bc::IsUninit
& /*isuninit*/,
1087 typeTestPropagate(env
, popCU(env
), TUninit
, memoizeImplRetType(env
), jmp
);
1090 template<class JmpOp
>
1091 void group(ISS
& env
,
1092 const bc::InstanceOfD
& inst
,
1094 auto bail
= [&] { impl(env
, inst
, jmp
); };
1096 auto const locId
= topStkEquiv(env
);
1097 if (locId
== NoLocalId
|| interface_supports_non_objects(inst
.str1
)) {
1100 auto const loc
= peekLocRaw(env
, locId
);
1101 assertx(!loc
.couldBe(TRef
)); // we shouldn't have an equivLoc if it was
1102 auto const rcls
= env
.index
.resolve_class(env
.ctx
, inst
.str1
);
1103 if (!rcls
) return bail();
1105 auto const instTy
= subObj(*rcls
);
1106 if (loc
.subtypeOf(instTy
) || !loc
.couldBe(instTy
)) {
1111 auto const negate
= jmp
.op
== Op::JmpNZ
;
1112 auto const was_true
= instTy
;
1113 auto const was_false
= loc
;
1114 refineLoc(env
, locId
, negate
? was_true
: was_false
);
1115 env
.propagate(jmp
.target
, env
.state
);
1116 refineLoc(env
, locId
, negate
? was_false
: was_true
);
1119 void in(ISS
& env
, const bc::Switch
& op
) {
1121 forEachTakenEdge(op
, [&] (BlockId id
) {
1122 env
.propagate(id
, env
.state
);
1126 void in(ISS
& env
, const bc::SSwitch
& op
) {
1128 forEachTakenEdge(op
, [&] (BlockId id
) {
1129 env
.propagate(id
, env
.state
);
1133 void in(ISS
& env
, const bc::RetC
& /*op*/) {
1134 doRet(env
, popC(env
), false);
1136 void in(ISS
& env
, const bc::RetV
& /*op*/) {
1137 doRet(env
, popV(env
), false);
1139 void in(ISS
& /*env*/, const bc::Unwind
& /*op*/) {}
1140 void in(ISS
& env
, const bc::Throw
& /*op*/) {
1144 void in(ISS
& env
, const bc::Catch
&) {
1146 return push(env
, subObj(env
.index
.builtin_class(s_Throwable
.get())));
1149 void in(ISS
& env
, const bc::NativeImpl
&) {
1153 if (is_collection_method_returning_this(env
.ctx
.cls
, env
.ctx
.func
)) {
1154 assert(env
.ctx
.func
->attrs
& AttrParamCoerceModeNull
);
1155 assert(!(env
.ctx
.func
->attrs
& AttrReference
));
1156 auto const resCls
= env
.index
.builtin_class(env
.ctx
.cls
->name
);
1157 // Can still return null if parameter coercion fails
1158 return doRet(env
, union_of(objExact(resCls
), TInitNull
), true);
1161 if (env
.ctx
.func
->nativeInfo
) {
1162 return doRet(env
, native_function_return_type(env
.ctx
.func
), true);
1164 doRet(env
, TInitGen
, true);
1167 void in(ISS
& env
, const bc::CGetL
& op
) {
1168 if (!locCouldBeUninit(env
, op
.loc1
)) {
1172 push(env
, locAsCell(env
, op
.loc1
), op
.loc1
);
1175 void in(ISS
& env
, const bc::CGetQuietL
& op
) {
1178 push(env
, locAsCell(env
, op
.loc1
), op
.loc1
);
1181 void in(ISS
& env
, const bc::CUGetL
& op
) {
1182 auto ty
= locRaw(env
, op
.loc1
);
1183 if (ty
.subtypeOf(TUninit
)) {
1184 return reduce(env
, bc::NullUninit
{});
1187 if (!ty
.couldBe(TUninit
)) constprop(env
);
1188 if (!ty
.subtypeOf(TCell
)) ty
= TCell
;
1189 push(env
, std::move(ty
), op
.loc1
);
1192 void in(ISS
& env
, const bc::PushL
& op
) {
1193 impl(env
, bc::CGetL
{ op
.loc1
}, bc::UnsetL
{ op
.loc1
});
1196 void in(ISS
& env
, const bc::CGetL2
& op
) {
1197 // Can't constprop yet because of no INS_1 support in bc.h
1198 if (!locCouldBeUninit(env
, op
.loc1
)) nothrow(env
);
1199 auto loc
= locAsCell(env
, op
.loc1
);
1200 auto topEquiv
= topStkEquiv(env
);
1201 auto top
= popT(env
);
1202 push(env
, std::move(loc
), op
.loc1
);
1203 push(env
, std::move(top
), topEquiv
);
1208 template <typename Op
> void common_cgetn(ISS
& env
) {
1209 auto const t1
= topC(env
);
1210 auto const v1
= tv(t1
);
1211 if (v1
&& v1
->m_type
== KindOfPersistentString
) {
1212 auto const loc
= findLocal(env
, v1
->m_data
.pstr
);
1213 if (loc
!= NoLocalId
) {
1214 return reduce(env
, bc::PopC
{}, Op
{ loc
});
1217 readUnknownLocals(env
);
1219 popC(env
); // conversion to string can throw
1220 push(env
, TInitCell
);
1225 void in(ISS
& env
, const bc::CGetN
&) { common_cgetn
<bc::CGetL
>(env
); }
1226 void in(ISS
& env
, const bc::CGetQuietN
&) { common_cgetn
<bc::CGetQuietL
>(env
); }
1228 void in(ISS
& env
, const bc::CGetG
&) { popC(env
); push(env
, TInitCell
); }
1229 void in(ISS
& env
, const bc::CGetQuietG
&) { popC(env
); push(env
, TInitCell
); }
1231 void in(ISS
& env
, const bc::CGetS
& op
) {
1232 auto const tcls
= takeClsRefSlot(env
, op
.slot
);
1233 auto const tname
= popC(env
);
1234 auto const vname
= tv(tname
);
1235 auto const self
= selfCls(env
);
1237 if (vname
&& vname
->m_type
== KindOfPersistentString
&&
1238 self
&& tcls
.subtypeOf(*self
)) {
1239 if (auto ty
= selfPropAsCell(env
, vname
->m_data
.pstr
)) {
1240 // Only nothrow when we know it's a private declared property
1241 // (and thus accessible here).
1244 // We can only constprop here if we know for sure this is exactly the
1245 // correct class. The reason for this is that you could have a LSB class
1246 // attempting to access a private static in a derived class with the same
1247 // name as a private static in this class, which is supposed to fatal at
1248 // runtime (for an example see test/quick/static_sprop2.php).
1249 auto const selfExact
= selfClsExact(env
);
1250 if (selfExact
&& tcls
.subtypeOf(*selfExact
)) {
1254 return push(env
, std::move(*ty
));
1258 auto indexTy
= env
.index
.lookup_public_static(tcls
, tname
);
1259 if (indexTy
.subtypeOf(TInitCell
)) {
1261 * Constant propagation here can change when we invoke autoload, so it's
1262 * considered HardConstProp. It's safe not to check anything about private
1263 * or protected static properties, because you can't override a public
1264 * static property with a private or protected one---if the index gave us
1265 * back a constant type, it's because it found a public static and it must
1266 * be the property this would have read dynamically.
1268 if (options
.HardConstProp
) constprop(env
);
1269 return push(env
, std::move(indexTy
));
1272 push(env
, TInitCell
);
1275 void in(ISS
& env
, const bc::VGetL
& op
) {
1277 setLocRaw(env
, op
.loc1
, TRef
);
1281 void in(ISS
& env
, const bc::VGetN
&) {
1282 auto const t1
= topC(env
);
1283 auto const v1
= tv(t1
);
1284 if (v1
&& v1
->m_type
== KindOfPersistentString
) {
1285 auto const loc
= findLocal(env
, v1
->m_data
.pstr
);
1286 if (loc
!= NoLocalId
) {
1287 return reduce(env
, bc::PopC
{},
1291 modifyLocalStatic(env
, NoLocalId
, TRef
);
1293 boxUnknownLocal(env
);
1298 void in(ISS
& env
, const bc::VGetG
&) { popC(env
); push(env
, TRef
); }
1300 void in(ISS
& env
, const bc::VGetS
& op
) {
1301 auto const tcls
= takeClsRefSlot(env
, op
.slot
);
1302 auto const tname
= popC(env
);
1303 auto const vname
= tv(tname
);
1304 auto const self
= selfCls(env
);
1306 if (!self
|| tcls
.couldBe(*self
)) {
1307 if (vname
&& vname
->m_type
== KindOfPersistentString
) {
1308 boxSelfProp(env
, vname
->m_data
.pstr
);
1314 if (auto c
= env
.collect
.publicStatics
) {
1315 c
->merge(env
.ctx
, tcls
, tname
, TRef
);
1321 void clsRefGetImpl(ISS
& env
, Type t1
, ClsRefSlotId slot
) {
1323 if (t1
.subtypeOf(TObj
)) {
1327 auto const v1
= tv(t1
);
1328 if (v1
&& v1
->m_type
== KindOfPersistentString
) {
1329 if (auto const rcls
= env
.index
.resolve_class(env
.ctx
, v1
->m_data
.pstr
)) {
1330 return clsExact(*rcls
);
1335 putClsRefSlot(env
, slot
, std::move(cls
));
1338 void in(ISS
& env
, const bc::ClsRefGetL
& op
) {
1339 clsRefGetImpl(env
, locAsCell(env
, op
.loc1
), op
.slot
);
1341 void in(ISS
& env
, const bc::ClsRefGetC
& op
) {
1342 clsRefGetImpl(env
, popC(env
), op
.slot
);
1345 void in(ISS
& env
, const bc::AKExists
& /*op*/) {
1346 auto const t1
= popC(env
);
1347 auto const t2
= popC(env
);
1349 auto const mayThrow
= [&]{
1350 if (!t1
.subtypeOfAny(TObj
, TArr
, TVec
, TDict
, TKeyset
)) return true;
1351 if (t2
.subtypeOfAny(TStr
, TNull
)) {
1352 return t1
.subtypeOfAny(TObj
, TArr
) &&
1353 RuntimeOption::EvalHackArrCompatNotices
;
1355 if (t2
.subtypeOf(TInt
)) return false;
1359 if (!mayThrow
) nothrow(env
);
1363 void in(ISS
& env
, const bc::GetMemoKeyL
& op
) {
1364 always_assert(env
.ctx
.func
->isMemoizeWrapper
);
1366 auto const tyIMemoizeParam
=
1367 subObj(env
.index
.builtin_class(s_IMemoizeParam
.get()));
1369 auto const inTy
= locAsCell(env
, op
.loc1
);
1371 // If the local could be uninit, we might raise a warning (as
1372 // usual). Converting an object to a memo key might invoke PHP code if it has
1373 // the IMemoizeParam interface, and if it doesn't, we'll throw.
1374 if (!locCouldBeUninit(env
, op
.loc1
) && !inTy
.couldBe(TObj
)) {
1375 nothrow(env
); constprop(env
);
1378 // If type constraints are being enforced and the local being turned into a
1379 // memo key is a parameter, then we can possibly using the type constraint to
1380 // perform a more efficient memoization scheme. Note that this all needs to
1381 // stay in sync with the interpreter and JIT.
1382 using MK
= MemoKeyConstraint
;
1383 auto const mkc
= [&] {
1384 if (!RuntimeOption::EvalHardTypeHints
) return MK::None
;
1385 if (op
.loc1
>= env
.ctx
.func
->params
.size()) return MK::None
;
1386 auto tc
= env
.ctx
.func
->params
[op
.loc1
].typeConstraint
;
1387 if (tc
.type() == AnnotType::Object
) {
1388 auto res
= env
.index
.resolve_type_name(tc
.typeName());
1389 if (res
.type
!= AnnotType::Object
) {
1390 tc
.resolveType(res
.type
, res
.nullable
|| tc
.isNullable());
1393 return memoKeyConstraintFromTC(tc
);
1398 // Always null, so the key can always just be 0
1399 always_assert(inTy
.subtypeOf(TNull
));
1400 return push(env
, ival(0));
1402 // Always an int, so the key is always an identity mapping
1403 always_assert(inTy
.subtypeOf(TInt
));
1404 return reduce(env
, bc::CGetL
{ op
.loc1
});
1406 // Always a bool, so the key is the bool cast to an int
1407 always_assert(inTy
.subtypeOf(TBool
));
1408 return reduce(env
, bc::CGetL
{ op
.loc1
}, bc::CastInt
{});
1410 // Always a string, so the key is always an identity mapping
1411 always_assert(inTy
.subtypeOf(TStr
));
1412 return reduce(env
, bc::CGetL
{ op
.loc1
});
1414 // Either an int or string, so the key can be an identity mapping
1415 return reduce(env
, bc::CGetL
{ op
.loc1
});
1418 // A nullable string or int. For strings the key will always be 0 or the
1419 // string. For ints the key will be the int or a static string. We can't
1420 // reduce either without introducing control flow.
1421 return push(env
, union_of(TInt
, TStr
));
1422 case MK::BoolOrNull
:
1423 // A nullable bool. The key will always be an int (null will be 2), but we
1424 // can't reduce that without introducing control flow.
1425 return push(env
, TInt
);
1430 // No type constraint, or one that isn't usuable. Use the generic memoization
1431 // scheme which can handle any type:
1433 // Integer keys are always mapped to themselves
1434 if (inTy
.subtypeOf(TInt
)) return reduce(env
, bc::CGetL
{ op
.loc1
});
1436 if (inTy
.subtypeOf(tyIMemoizeParam
)) {
1439 bc::CGetL
{ op
.loc1
},
1440 bc::FPushObjMethodD
{
1442 s_getInstanceKey
.get(),
1443 ObjMethodOp::NullThrows
,
1451 // A memo key can be an integer if the input might be an integer, and is a
1452 // string otherwise. Booleans are always static strings.
1454 if (auto const val
= tv(inTy
)) {
1455 auto const key
= eval_cell(
1456 [&]{ return HHVM_FN(serialize_memoize_param
)(*val
); }
1458 if (key
) return *key
;
1460 if (inTy
.subtypeOf(TBool
)) return TSStr
;
1461 if (inTy
.couldBe(TInt
)) return union_of(TInt
, TStr
);
1464 push(env
, std::move(keyTy
));
1467 void in(ISS
& env
, const bc::IssetL
& op
) {
1470 auto const loc
= locAsCell(env
, op
.loc1
);
1471 if (loc
.subtypeOf(TNull
)) return push(env
, TFalse
);
1472 if (!loc
.couldBe(TNull
)) return push(env
, TTrue
);
1476 void in(ISS
& env
, const bc::EmptyL
& op
) {
1479 castBoolImpl(env
, locAsCell(env
, op
.loc1
), true);
1482 void in(ISS
& env
, const bc::EmptyS
& op
) {
1483 takeClsRefSlot(env
, op
.slot
);
1488 void in(ISS
& env
, const bc::IssetS
& op
) {
1489 auto const tcls
= takeClsRefSlot(env
, op
.slot
);
1490 auto const tname
= popC(env
);
1491 auto const vname
= tv(tname
);
1492 auto const self
= selfCls(env
);
1494 if (self
&& tcls
.subtypeOf(*self
) &&
1495 vname
&& vname
->m_type
== KindOfPersistentString
) {
1496 if (auto const t
= selfPropAsCell(env
, vname
->m_data
.pstr
)) {
1497 if (t
->subtypeOf(TNull
)) { constprop(env
); return push(env
, TFalse
); }
1498 if (!t
->couldBe(TNull
)) { constprop(env
); return push(env
, TTrue
); }
1502 auto const indexTy
= env
.index
.lookup_public_static(tcls
, tname
);
1503 if (indexTy
.subtypeOf(TInitCell
)) {
1504 // See the comments in CGetS about constprop for public statics.
1505 if (options
.HardConstProp
) constprop(env
);
1506 if (indexTy
.subtypeOf(TNull
)) { return push(env
, TFalse
); }
1507 if (!indexTy
.couldBe(TNull
)) { return push(env
, TTrue
); }
1513 template<class ReduceOp
>
1514 void issetEmptyNImpl(ISS
& env
) {
1515 auto const t1
= topC(env
);
1516 auto const v1
= tv(t1
);
1517 if (v1
&& v1
->m_type
== KindOfPersistentString
) {
1518 auto const loc
= findLocal(env
, v1
->m_data
.pstr
);
1519 if (loc
!= NoLocalId
) {
1520 return reduce(env
, bc::PopC
{}, ReduceOp
{ loc
});
1522 // Can't push true in the non env.findLocal case unless we know
1523 // whether this function can have a VarEnv.
1525 readUnknownLocals(env
);
1531 void in(ISS
& env
, const bc::IssetN
&) { issetEmptyNImpl
<bc::IssetL
>(env
); }
1532 void in(ISS
& env
, const bc::EmptyN
&) { issetEmptyNImpl
<bc::EmptyL
>(env
); }
1533 void in(ISS
& env
, const bc::EmptyG
&) { popC(env
); push(env
, TBool
); }
1534 void in(ISS
& env
, const bc::IssetG
&) { popC(env
); push(env
, TBool
); }
1536 void isTypeImpl(ISS
& env
, const Type
& locOrCell
, const Type
& test
) {
1538 if (locOrCell
.subtypeOf(test
)) return push(env
, TTrue
);
1539 if (!locOrCell
.couldBe(test
)) return push(env
, TFalse
);
1543 void isTypeObj(ISS
& env
, const Type
& ty
) {
1544 if (!ty
.couldBe(TObj
)) return push(env
, TFalse
);
1545 if (ty
.subtypeOf(TObj
)) {
1546 auto const incompl
= objExact(
1547 env
.index
.builtin_class(s_PHP_Incomplete_Class
.get()));
1548 if (!ty
.couldBe(incompl
)) return push(env
, TTrue
);
1549 if (ty
.subtypeOf(incompl
)) return push(env
, TFalse
);
1555 void isTypeLImpl(ISS
& env
, const Op
& op
) {
1556 if (!locCouldBeUninit(env
, op
.loc1
)) { nothrow(env
); constprop(env
); }
1557 auto const loc
= locAsCell(env
, op
.loc1
);
1558 switch (op
.subop2
) {
1559 case IsTypeOp::Scalar
: return push(env
, TBool
);
1560 case IsTypeOp::Obj
: return isTypeObj(env
, loc
);
1561 default: return isTypeImpl(env
, loc
, type_of_istype(op
.subop2
));
1566 void isTypeCImpl(ISS
& env
, const Op
& op
) {
1568 auto const t1
= popC(env
);
1569 switch (op
.subop1
) {
1570 case IsTypeOp::Scalar
: return push(env
, TBool
);
1571 case IsTypeOp::Obj
: return isTypeObj(env
, t1
);
1572 default: return isTypeImpl(env
, t1
, type_of_istype(op
.subop1
));
1576 void in(ISS
& env
, const bc::IsTypeC
& op
) { isTypeCImpl(env
, op
); }
1577 void in(ISS
& env
, const bc::IsTypeL
& op
) { isTypeLImpl(env
, op
); }
1579 void in(ISS
& env
, const bc::IsUninit
& /*op*/) {
1581 push(env
, popCU(env
));
1582 isTypeImpl(env
, topT(env
), TUninit
);
1585 void in(ISS
& env
, const bc::MaybeMemoType
& /*op*/) {
1586 always_assert(env
.ctx
.func
->isMemoizeWrapper
);
1589 auto const memoTy
= memoizeImplRetType(env
);
1590 auto const ty
= popC(env
);
1591 push(env
, ty
.couldBe(memoTy
) ? TTrue
: TFalse
);
1594 void in(ISS
& env
, const bc::IsMemoType
& /*op*/) {
1595 always_assert(env
.ctx
.func
->isMemoizeWrapper
);
1598 auto const memoTy
= memoizeImplRetType(env
);
1599 auto const ty
= popC(env
);
1600 push(env
, memoTy
.subtypeOf(ty
) ? TTrue
: TFalse
);
1603 void in(ISS
& env
, const bc::InstanceOfD
& op
) {
1604 auto const t1
= popC(env
);
1605 // Note: InstanceOfD can do autoload if the type might be a type
1606 // alias, so it's not nothrow unless we know it's an object type.
1607 if (auto const rcls
= env
.index
.resolve_class(env
.ctx
, op
.str1
)) {
1609 if (!interface_supports_non_objects(rcls
->name())) {
1610 isTypeImpl(env
, t1
, subObj(*rcls
));
1617 void in(ISS
& env
, const bc::InstanceOf
& /*op*/) {
1618 auto const t1
= topC(env
);
1619 auto const v1
= tv(t1
);
1620 if (v1
&& v1
->m_type
== KindOfPersistentString
) {
1621 return reduce(env
, bc::PopC
{},
1622 bc::InstanceOfD
{ v1
->m_data
.pstr
});
1625 if (t1
.subtypeOf(TObj
) && is_specialized_obj(t1
)) {
1626 auto const dobj
= dobj_of(t1
);
1627 switch (dobj
.type
) {
1631 return reduce(env
, bc::PopC
{},
1632 bc::InstanceOfD
{ dobj
.cls
.name() });
1644 * If the value on the top of the stack is known to be equivalent to the local
1645 * its being moved/copied to, return folly::none without modifying any
1646 * state. Otherwise, pop the stack value, perform the set, and return a pair
1647 * giving the value's type, and any other local its known to be equivalent to.
1649 template <typename Op
>
1650 folly::Optional
<std::pair
<Type
, LocalId
>> moveToLocImpl(ISS
& env
,
1653 auto equivLoc
= topStkEquiv(env
);
1654 // If the local could be a Ref, don't record equality because the stack
1655 // element and the local won't actually have the same type.
1656 if (!locCouldBeRef(env
, op
.loc1
) &&
1657 !is_volatile_local(env
.ctx
.func
, op
.loc1
)) {
1658 if (equivLoc
!= NoLocalId
) {
1659 if (equivLoc
== op
.loc1
||
1660 locsAreEquiv(env
, equivLoc
, op
.loc1
)) {
1661 // We allow equivalency to ignore Uninit, so we need to check
1663 if (peekLocRaw(env
, op
.loc1
) == topC(env
)) {
1671 auto val
= popC(env
);
1672 setLoc(env
, op
.loc1
, val
);
1673 if (equivLoc
!= op
.loc1
&& equivLoc
!= NoLocalId
) {
1674 addLocEquiv(env
, op
.loc1
, equivLoc
);
1676 return { std::make_pair(std::move(val
), equivLoc
) };
1681 void in(ISS
& env
, const bc::PopL
& op
) {
1682 // If the same value is already in the local, do nothing but pop
1683 // it. Otherwise, the set has been done by moveToLocImpl.
1684 if (!moveToLocImpl(env
, op
)) return reduce(env
, bc::PopC
{});
1687 void in(ISS
& env
, const bc::SetL
& op
) {
1688 // If the same value is already in the local, do nothing because SetL keeps
1689 // the value on the stack. If it isn't, we need to push it back onto the stack
1690 // because moveToLocImpl popped it.
1691 if (auto p
= moveToLocImpl(env
, op
)) {
1692 push(env
, std::move(p
->first
), p
->second
);
1694 reduce(env
, bc::Nop
{});
1698 void in(ISS
& env
, const bc::SetN
&) {
1699 // This isn't trivial to strength reduce, without a "flip two top
1700 // elements of stack" opcode.
1701 auto t1
= popC(env
);
1702 auto const t2
= popC(env
);
1703 auto const v2
= tv(t2
);
1704 // TODO(#3653110): could nothrow if t2 can't be an Obj or Res
1706 auto const knownLoc
= v2
&& v2
->m_type
== KindOfPersistentString
1707 ? findLocal(env
, v2
->m_data
.pstr
)
1709 if (knownLoc
!= NoLocalId
) {
1710 setLoc(env
, knownLoc
, t1
);
1712 // We could be changing the value of any local, but we won't
1713 // change whether or not they are boxed or initialized.
1714 loseNonRefLocalTypes(env
);
1717 push(env
, std::move(t1
));
1720 void in(ISS
& env
, const bc::SetG
&) {
1721 auto t1
= popC(env
);
1723 push(env
, std::move(t1
));
1726 void in(ISS
& env
, const bc::SetS
& op
) {
1727 auto const t1
= popC(env
);
1728 auto const tcls
= takeClsRefSlot(env
, op
.slot
);
1729 auto const tname
= popC(env
);
1730 auto const vname
= tv(tname
);
1731 auto const self
= selfCls(env
);
1733 if (!self
|| tcls
.couldBe(*self
)) {
1734 if (vname
&& vname
->m_type
== KindOfPersistentString
) {
1736 mergeSelfProp(env
, vname
->m_data
.pstr
, t1
);
1738 mergeEachSelfPropRaw(env
, [&] (Type
) { return t1
; });
1742 if (auto c
= env
.collect
.publicStatics
) {
1743 c
->merge(env
.ctx
, tcls
, tname
, t1
);
1746 push(env
, std::move(t1
));
1749 void in(ISS
& env
, const bc::SetOpL
& op
) {
1750 auto const t1
= popC(env
);
1751 auto const v1
= tv(t1
);
1752 auto const loc
= locAsCell(env
, op
.loc1
);
1753 auto const locVal
= tv(loc
);
1755 // Can't constprop at this eval_cell, because of the effects on
1757 auto resultTy
= eval_cell([&] {
1760 setopBody(&c
, op
.subop2
, &rhs
);
1763 if (!resultTy
) resultTy
= TInitCell
;
1765 // We may have inferred a TSStr or TSArr with a value here, but
1766 // at runtime it will not be static. For now just throw that
1767 // away. TODO(#3696042): should be able to loosen_staticness here.
1768 if (resultTy
->subtypeOf(TStr
)) resultTy
= TStr
;
1769 else if (resultTy
->subtypeOf(TArr
)) resultTy
= TArr
;
1770 else if (resultTy
->subtypeOf(TVec
)) resultTy
= TVec
;
1771 else if (resultTy
->subtypeOf(TDict
)) resultTy
= TDict
;
1772 else if (resultTy
->subtypeOf(TKeyset
)) resultTy
= TKeyset
;
1774 setLoc(env
, op
.loc1
, *resultTy
);
1775 push(env
, *resultTy
);
1779 auto resultTy
= typeSetOp(op
.subop2
, loc
, t1
);
1780 setLoc(env
, op
.loc1
, resultTy
);
1781 push(env
, std::move(resultTy
));
1784 void in(ISS
& env
, const bc::SetOpN
&) {
1787 loseNonRefLocalTypes(env
);
1789 push(env
, TInitCell
);
1792 void in(ISS
& env
, const bc::SetOpG
&) {
1793 popC(env
); popC(env
);
1794 push(env
, TInitCell
);
1797 void in(ISS
& env
, const bc::SetOpS
& op
) {
1799 auto const tcls
= takeClsRefSlot(env
, op
.slot
);
1800 auto const tname
= popC(env
);
1801 auto const vname
= tv(tname
);
1802 auto const self
= selfCls(env
);
1804 if (!self
|| tcls
.couldBe(*self
)) {
1805 if (vname
&& vname
->m_type
== KindOfPersistentString
) {
1806 mergeSelfProp(env
, vname
->m_data
.pstr
, TInitCell
);
1808 loseNonRefSelfPropTypes(env
);
1812 if (auto c
= env
.collect
.publicStatics
) {
1813 c
->merge(env
.ctx
, tcls
, tname
, TInitCell
);
1816 push(env
, TInitCell
);
1819 void in(ISS
& env
, const bc::IncDecL
& op
) {
1820 auto loc
= locAsCell(env
, op
.loc1
);
1821 auto newT
= typeIncDec(op
.subop2
, loc
);
1822 auto const pre
= isPre(op
.subop2
);
1824 // If it's a non-numeric string, this may cause it to exceed the max length.
1825 if (!locCouldBeUninit(env
, op
.loc1
) &&
1826 !loc
.couldBe(TStr
)) {
1830 if (!pre
) push(env
, std::move(loc
));
1831 setLoc(env
, op
.loc1
, newT
);
1832 if (pre
) push(env
, std::move(newT
));
1835 void in(ISS
& env
, const bc::IncDecN
& op
) {
1836 auto const t1
= topC(env
);
1837 auto const v1
= tv(t1
);
1838 auto const knownLoc
= v1
&& v1
->m_type
== KindOfPersistentString
1839 ? findLocal(env
, v1
->m_data
.pstr
)
1841 if (knownLoc
!= NoLocalId
) {
1842 return reduce(env
, bc::PopC
{},
1843 bc::IncDecL
{ knownLoc
, op
.subop1
});
1846 loseNonRefLocalTypes(env
);
1848 push(env
, TInitCell
);
1851 void in(ISS
& env
, const bc::IncDecG
&) { popC(env
); push(env
, TInitCell
); }
1853 void in(ISS
& env
, const bc::IncDecS
& op
) {
1854 auto const tcls
= takeClsRefSlot(env
, op
.slot
);
1855 auto const tname
= popC(env
);
1856 auto const vname
= tv(tname
);
1857 auto const self
= selfCls(env
);
1859 if (!self
|| tcls
.couldBe(*self
)) {
1860 if (vname
&& vname
->m_type
== KindOfPersistentString
) {
1861 mergeSelfProp(env
, vname
->m_data
.pstr
, TInitCell
);
1863 loseNonRefSelfPropTypes(env
);
1867 if (auto c
= env
.collect
.publicStatics
) {
1868 c
->merge(env
.ctx
, tcls
, tname
, TInitCell
);
1871 push(env
, TInitCell
);
1874 void in(ISS
& env
, const bc::BindL
& op
) {
1875 // If the op.loc1 was bound to a local static, its going to be
1876 // unbound from it. If the thing its being bound /to/ is a local
1877 // static, we've already marked it as modified via the VGetL, so
1878 // there's nothing more to track.
1879 // Unbind it before any updates.
1880 modifyLocalStatic(env
, op
.loc1
, TUninit
);
1882 auto t1
= popV(env
);
1883 setLocRaw(env
, op
.loc1
, t1
);
1884 push(env
, std::move(t1
));
1887 void in(ISS
& env
, const bc::BindN
&) {
1888 // TODO(#3653110): could nothrow if t2 can't be an Obj or Res
1889 auto t1
= popV(env
);
1890 auto const t2
= popC(env
);
1891 auto const v2
= tv(t2
);
1892 auto const knownLoc
= v2
&& v2
->m_type
== KindOfPersistentString
1893 ? findLocal(env
, v2
->m_data
.pstr
)
1895 unbindLocalStatic(env
, knownLoc
);
1896 if (knownLoc
!= NoLocalId
) {
1897 setLocRaw(env
, knownLoc
, t1
);
1899 boxUnknownLocal(env
);
1902 push(env
, std::move(t1
));
1905 void in(ISS
& env
, const bc::BindG
&) {
1906 auto t1
= popV(env
);
1908 push(env
, std::move(t1
));
1911 void in(ISS
& env
, const bc::BindS
& op
) {
1913 auto const tcls
= takeClsRefSlot(env
, op
.slot
);
1914 auto const tname
= popC(env
);
1915 auto const vname
= tv(tname
);
1916 auto const self
= selfCls(env
);
1918 if (!self
|| tcls
.couldBe(*self
)) {
1919 if (vname
&& vname
->m_type
== KindOfPersistentString
) {
1920 boxSelfProp(env
, vname
->m_data
.pstr
);
1926 if (auto c
= env
.collect
.publicStatics
) {
1927 c
->merge(env
.ctx
, tcls
, tname
, TRef
);
1933 void in(ISS
& env
, const bc::UnsetL
& op
) {
1935 setLocRaw(env
, op
.loc1
, TUninit
);
1938 void in(ISS
& env
, const bc::UnsetN
& /*op*/) {
1939 auto const t1
= topC(env
);
1940 auto const v1
= tv(t1
);
1941 if (v1
&& v1
->m_type
== KindOfPersistentString
) {
1942 auto const loc
= findLocal(env
, v1
->m_data
.pstr
);
1943 if (loc
!= NoLocalId
) {
1944 return reduce(env
, bc::PopC
{},
1945 bc::UnsetL
{ loc
});
1949 if (!t1
.couldBe(TObj
) && !t1
.couldBe(TRes
)) nothrow(env
);
1950 unsetUnknownLocal(env
);
1954 void in(ISS
& env
, const bc::UnsetG
& /*op*/) {
1955 auto const t1
= popC(env
);
1956 if (!t1
.couldBe(TObj
) && !t1
.couldBe(TRes
)) nothrow(env
);
1959 void in(ISS
& env
, const bc::FPushFuncD
& op
) {
1960 auto const rfunc
= env
.index
.resolve_func(env
.ctx
, op
.str2
);
1961 if (auto const func
= rfunc
.exactFunc()) {
1962 if (can_emit_builtin(func
, op
.arg1
, op
.has_unpack
)) {
1963 fpiPush(env
, ActRec
{ FPIKind::Builtin
, folly::none
, rfunc
});
1964 return reduce(env
, bc::Nop
{});
1967 fpiPush(env
, ActRec
{ FPIKind::Func
, folly::none
, rfunc
});
1970 void in(ISS
& env
, const bc::FPushFunc
& op
) {
1971 auto const t1
= topC(env
);
1972 auto const v1
= tv(t1
);
1973 if (v1
&& v1
->m_type
== KindOfPersistentString
) {
1974 auto const name
= normalizeNS(v1
->m_data
.pstr
);
1975 // FPushFuncD doesn't support class-method pair strings yet.
1976 if (isNSNormalized(name
) && notClassMethodPair(name
)) {
1977 auto const rfunc
= env
.index
.resolve_func(env
.ctx
, name
);
1978 // Don't turn dynamic calls to caller frame affecting functions into
1979 // static calls, because they might fatal (whereas the static one won't).
1980 if (!rfunc
.mightAccessCallerFrame()) {
1981 return reduce(env
, bc::PopC
{},
1982 bc::FPushFuncD
{ op
.arg1
, name
, op
.has_unpack
});
1987 if (t1
.subtypeOf(TObj
)) return fpiPush(env
, ActRec
{ FPIKind::ObjInvoke
});
1988 if (t1
.subtypeOf(TArr
)) return fpiPush(env
, ActRec
{ FPIKind::CallableArr
});
1989 if (t1
.subtypeOf(TStr
)) return fpiPush(env
, ActRec
{ FPIKind::Func
});
1990 fpiPush(env
, ActRec
{ FPIKind::Unknown
});
1993 void in(ISS
& env
, const bc::FPushFuncU
& op
) {
1994 auto const rfuncPair
=
1995 env
.index
.resolve_func_fallback(env
.ctx
, op
.str2
, op
.str3
);
1996 if (options
.ElideAutoloadInvokes
&& !rfuncPair
.second
) {
1999 bc::FPushFuncD
{ op
.arg1
, rfuncPair
.first
.name(), op
.has_unpack
}
2004 ActRec
{ FPIKind::Func
, folly::none
, rfuncPair
.first
, rfuncPair
.second
}
2008 void in(ISS
& env
, const bc::FPushObjMethodD
& op
) {
2009 auto loc
= topStkEquiv(env
);
2010 auto t1
= popC(env
);
2011 if (is_opt(t1
) && op
.subop3
== ObjMethodOp::NullThrows
) {
2014 auto const clsTy
= objcls(t1
);
2015 auto const rcls
= [&]() -> folly::Optional
<res::Class
> {
2016 if (is_specialized_cls(clsTy
)) return dcls_of(clsTy
).cls
;
2020 fpiPush(env
, ActRec
{
2023 env
.index
.resolve_method(env
.ctx
, clsTy
, op
.str2
)
2025 if (loc
!= NoLocalId
) {
2026 auto locTy
= peekLocRaw(env
, loc
);
2027 if (locTy
.subtypeOf(TCell
)) {
2028 if (!is_specialized_obj(locTy
)) {
2030 op
.subop3
== ObjMethodOp::NullThrows
? TObj
: TOptObj
);
2031 } else if (is_opt(locTy
) && op
.subop3
== ObjMethodOp::NullThrows
) {
2032 refineLoc(env
, loc
, unopt(locTy
));
2038 void in(ISS
& env
, const bc::FPushObjMethod
& op
) {
2039 auto const t1
= topC(env
);
2040 auto const v1
= tv(t1
);
2041 if (v1
&& v1
->m_type
== KindOfPersistentString
) {
2045 bc::FPushObjMethodD
{ op
.arg1
, v1
->m_data
.pstr
, op
.subop2
, op
.has_unpack
}
2050 fpiPush(env
, ActRec
{ FPIKind::ObjMeth
});
2053 void in(ISS
& env
, const bc::FPushClsMethodD
& op
) {
2054 auto const rcls
= env
.index
.resolve_class(env
.ctx
, op
.str3
);
2055 auto const rfun
= env
.index
.resolve_method(
2057 rcls
? clsExact(*rcls
) : TCls
,
2060 fpiPush(env
, ActRec
{ FPIKind::ClsMeth
, rcls
, rfun
});
2063 void in(ISS
& env
, const bc::FPushClsMethod
& op
) {
2064 auto const t1
= takeClsRefSlot(env
, op
.slot
);
2065 auto const t2
= popC(env
);
2066 auto const v2
= tv(t2
);
2068 folly::Optional
<res::Func
> rfunc
;
2069 if (v2
&& v2
->m_type
== KindOfPersistentString
) {
2070 rfunc
= env
.index
.resolve_method(env
.ctx
, t1
, v2
->m_data
.pstr
);
2072 folly::Optional
<res::Class
> rcls
;
2073 if (is_specialized_cls(t1
)) rcls
= dcls_of(t1
).cls
;
2074 fpiPush(env
, ActRec
{ FPIKind::ClsMeth
, rcls
, rfunc
});
2077 void in(ISS
& env
, const bc::FPushClsMethodF
& op
) {
2078 // The difference with FPushClsMethod is what ends up on the
2079 // ActRec (late-bound class), which we currently aren't tracking.
2080 impl(env
, bc::FPushClsMethod
{ op
.arg1
, op
.slot
, op
.has_unpack
});
2083 void ctorHelper(ISS
& env
, SString name
) {
2084 auto const rcls
= env
.index
.resolve_class(env
.ctx
, name
);
2085 push(env
, rcls
? objExact(*rcls
) : TObj
);
2087 rcls
? env
.index
.resolve_ctor(env
.ctx
, *rcls
) : folly::none
;
2088 fpiPush(env
, ActRec
{ FPIKind::Ctor
, rcls
, rfunc
});
2091 void in(ISS
& env
, const bc::FPushCtorD
& op
) {
2092 ctorHelper(env
, op
.str2
);
2095 void in(ISS
& env
, const bc::FPushCtorI
& op
) {
2096 auto const name
= env
.ctx
.unit
->classes
[op
.arg2
]->name
;
2097 ctorHelper(env
, name
);
2100 void in(ISS
& env
, const bc::FPushCtor
& op
) {
2101 auto const& t1
= peekClsRefSlot(env
, op
.slot
);
2102 if (is_specialized_cls(t1
)) {
2103 auto const dcls
= dcls_of(t1
);
2104 if (dcls
.type
== DCls::Exact
) {
2105 return reduce(env
, bc::DiscardClsRef
{ op
.slot
},
2106 bc::FPushCtorD
{ op
.arg1
, dcls
.cls
.name(), op
.has_unpack
});
2109 takeClsRefSlot(env
, op
.slot
);
2111 fpiPush(env
, ActRec
{ FPIKind::Ctor
});
2114 void in(ISS
& env
, const bc::FPushCufIter
&) {
2116 fpiPush(env
, ActRec
{ FPIKind::Unknown
});
2119 void in(ISS
& env
, const bc::FPushCuf
&) {
2121 fpiPush(env
, ActRec
{ FPIKind::Unknown
});
2123 void in(ISS
& env
, const bc::FPushCufF
&) {
2125 fpiPush(env
, ActRec
{ FPIKind::Unknown
});
2128 void in(ISS
& env
, const bc::FPushCufSafe
&) {
2129 auto t1
= popC(env
);
2131 push(env
, std::move(t1
));
2132 fpiPush(env
, ActRec
{ FPIKind::Unknown
});
2136 void in(ISS
& env
, const bc::RaiseFPassWarning
& op
) {
2139 void in(ISS
& env
, const bc::FPassL
& op
) {
2140 switch (prepKind(env
, op
.arg1
)) {
2141 case PrepKind::Unknown
:
2142 if (!locCouldBeUninit(env
, op
.loc2
) && op
.subop3
== FPassHint::Any
) {
2145 // This might box the local, we can't tell. Note: if the local
2146 // is already TRef, we could try to leave it alone, but not for
2148 setLocRaw(env
, op
.loc2
, TGen
);
2149 return push(env
, TInitGen
);
2151 return reduce_fpass_arg(env
, bc::CGetL
{ op
.loc2
}, op
.arg1
, false,
2154 return reduce_fpass_arg(env
, bc::VGetL
{ op
.loc2
}, op
.arg1
, true,
2159 void in(ISS
& env
, const bc::FPassN
& op
) {
2160 switch (prepKind(env
, op
.arg1
)) {
2161 case PrepKind::Unknown
:
2162 // This could change the type of any local.
2166 return push(env
, TInitGen
);
2167 case PrepKind::Val
: return reduce_fpass_arg(env
,
2172 case PrepKind::Ref
: return reduce_fpass_arg(env
,
2180 void in(ISS
& env
, const bc::FPassG
& op
) {
2181 switch (prepKind(env
, op
.arg1
)) {
2182 case PrepKind::Unknown
: popC(env
); return push(env
, TInitGen
);
2183 case PrepKind::Val
: return reduce_fpass_arg(env
,
2188 case PrepKind::Ref
: return reduce_fpass_arg(env
,
2196 void in(ISS
& env
, const bc::FPassS
& op
) {
2197 switch (prepKind(env
, op
.arg1
)) {
2198 case PrepKind::Unknown
:
2200 auto tcls
= takeClsRefSlot(env
, op
.slot
);
2201 auto const self
= selfCls(env
);
2202 auto const tname
= popC(env
);
2203 auto const vname
= tv(tname
);
2204 if (!self
|| tcls
.couldBe(*self
)) {
2205 if (vname
&& vname
->m_type
== KindOfPersistentString
) {
2206 // May or may not be boxing it, depending on the refiness.
2207 mergeSelfProp(env
, vname
->m_data
.pstr
, TInitGen
);
2212 if (auto c
= env
.collect
.publicStatics
) {
2213 c
->merge(env
.ctx
, std::move(tcls
), tname
, TInitGen
);
2216 return push(env
, TInitGen
);
2218 return reduce_fpass_arg(env
, bc::CGetS
{ op
.slot
}, op
.arg1
, false,
2221 return reduce_fpass_arg(env
, bc::VGetS
{ op
.slot
}, op
.arg1
, true,
2226 void in(ISS
& env
, const bc::FPassV
& op
) {
2227 auto const kind
= prepKind(env
, op
.arg1
);
2228 if (!fpassCanThrow(env
, kind
, op
.subop2
)) nothrow(env
);
2230 case PrepKind::Unknown
:
2232 return push(env
, TInitGen
);
2234 return reduce_fpass_arg(env
, bc::Unbox
{}, op
.arg1
, false, op
.subop2
);
2236 return reduce_fpass_arg(env
, bc::Nop
{}, op
.arg1
, true, op
.subop2
);
2240 void in(ISS
& env
, const bc::FPassR
& op
) {
2241 auto const kind
= prepKind(env
, op
.arg1
);
2242 if (!fpassCanThrow(env
, kind
, op
.subop2
)) nothrow(env
);
2243 if (fpiTop(env
).kind
== FPIKind::Builtin
) {
2245 case PrepKind::Unknown
:
2248 return reduceFPassBuiltin(env
, kind
, op
.subop2
, op
.arg1
, bc::UnboxR
{});
2250 return reduceFPassBuiltin(env
, kind
, op
.subop2
, op
.arg1
, bc::BoxR
{});
2254 auto const t1
= topT(env
);
2255 if (t1
.subtypeOf(TCell
)) {
2256 return reduce_fpass_arg(env
, bc::UnboxRNop
{}, op
.arg1
, false, op
.subop2
);
2259 // If it's known to be a ref, this behaves like FPassV, except we need to do
2260 // it slightly differently to keep stack flavors correct.
2261 if (t1
.subtypeOf(TRef
)) {
2263 case PrepKind::Unknown
:
2265 return push(env
, TInitGen
);
2267 return reduce_fpass_arg(env
, bc::UnboxR
{}, op
.arg1
, false, op
.subop2
);
2269 return reduce_fpass_arg(env
, bc::BoxRNop
{}, op
.arg1
, true, op
.subop2
);
2274 // Here we don't know if it is going to be a cell or a ref.
2276 case PrepKind::Unknown
: popR(env
); return push(env
, TInitGen
);
2277 case PrepKind::Val
: popR(env
); return push(env
, TInitCell
);
2278 case PrepKind::Ref
: popR(env
); return push(env
, TRef
);
2282 void in(ISS
& env
, const bc::FPassVNop
& op
) {
2283 push(env
, popV(env
));
2284 if (fpiTop(env
).kind
== FPIKind::Builtin
) {
2285 return reduceFPassBuiltin(env
, prepKind(env
, op
.arg1
), op
.subop2
, op
.arg1
,
2288 if (op
.subop2
!= FPassHint::Cell
) nothrow(env
);
2291 void in(ISS
& env
, const bc::FPassC
& op
) {
2292 if (fpiTop(env
).kind
== FPIKind::Builtin
) {
2293 return reduceFPassBuiltin(env
, prepKind(env
, op
.arg1
), op
.subop2
, op
.arg1
,
2296 if (op
.subop2
!= FPassHint::Ref
) nothrow(env
);
2299 void fpassCXHelper(ISS
& env
, uint32_t param
, bool error
, FPassHint hint
) {
2300 auto const& fpi
= fpiTop(env
);
2301 auto const kind
= prepKind(env
, param
);
2302 if (fpi
.kind
== FPIKind::Builtin
) {
2304 case PrepKind::Unknown
:
2308 auto const& params
= fpi
.func
->exactFunc()->params
;
2309 if (param
>= params
.size() || params
[param
].mustBeRef
) {
2311 return reduceFPassBuiltin(
2316 bc::String
{ s_byRefError
.get() },
2317 bc::Fatal
{ FatalOp::Runtime
}
2320 return reduceFPassBuiltin(
2325 bc::String
{ s_byRefWarn
.get() },
2326 bc::Int
{ (int)ErrorMode::STRICT
},
2327 bc::FCallBuiltin
{ 2, 2, s_trigger_error
.get() },
2335 return reduce(env
, bc::Nop
{});
2340 case PrepKind::Unknown
: return;
2341 case PrepKind::Val
: return reduce(env
, bc::FPassC
{ param
, hint
});
2342 case PrepKind::Ref
: /* will warn/fatal at runtime */ return;
2346 void in(ISS
& env
, const bc::FPassCW
& op
) {
2347 fpassCXHelper(env
, op
.arg1
, false, op
.subop2
);
2350 void in(ISS
& env
, const bc::FPassCE
& op
) {
2351 fpassCXHelper(env
, op
.arg1
, true, op
.subop2
);
2354 void pushCallReturnType(ISS
& env
, Type
&& ty
) {
2355 if (ty
== TBottom
) {
2356 // The callee function never returns. It might throw, or loop forever.
2359 return push(env
, std::move(ty
));
2362 const StaticString s_defined
{ "defined" };
2363 const StaticString s_function_exists
{ "function_exists" };
2365 void fcallKnownImpl(ISS
& env
, uint32_t numArgs
) {
2366 auto const ar
= fpiPop(env
);
2367 always_assert(ar
.func
.hasValue());
2369 if (options
.ConstantFoldBuiltins
&& ar
.func
->isFoldable()) {
2370 if (auto val
= const_fold(env
, numArgs
, *ar
.func
)) {
2371 return push(env
, std::move(*val
));
2375 specialFunctionEffects(env
, ar
);
2377 if (ar
.func
->name()->isame(s_function_exists
.get())) {
2378 handle_function_exists(env
, numArgs
, false);
2381 std::vector
<Type
> args(numArgs
);
2382 for (auto i
= uint32_t{0}; i
< numArgs
; ++i
) {
2383 args
[numArgs
- i
- 1] = popF(env
);
2386 if (options
.HardConstProp
&&
2388 ar
.func
->name()->isame(s_defined
.get())) {
2389 // If someone calls defined('foo') they probably want foo to be
2390 // defined normally; ie not a persistent constant.
2391 if (auto const v
= tv(args
[0])) {
2392 if (isStringType(v
->m_type
) &&
2393 !env
.index
.lookup_constant(env
.ctx
, v
->m_data
.pstr
)) {
2394 env
.collect
.cnsMap
[v
->m_data
.pstr
].m_type
= kDynamicConstant
;
2399 auto ty
= env
.index
.lookup_return_type(
2400 CallContext
{ env
.ctx
, args
},
2403 if (!ar
.fallbackFunc
) {
2404 pushCallReturnType(env
, std::move(ty
));
2407 auto ty2
= env
.index
.lookup_return_type(
2408 CallContext
{ env
.ctx
, args
},
2411 pushCallReturnType(env
, union_of(std::move(ty
), std::move(ty2
)));
2414 void in(ISS
& env
, const bc::FCall
& op
) {
2415 auto const ar
= fpiTop(env
);
2416 if (ar
.func
&& !ar
.fallbackFunc
) {
2418 case FPIKind::Unknown
:
2419 case FPIKind::CallableArr
:
2420 case FPIKind::ObjInvoke
:
2423 // Don't turn dynamic calls into static calls with functions that can
2424 // potentially touch the caller's frame. Such functions will fatal if
2425 // called dynamically and we want to preserve that behavior.
2426 if (!ar
.func
->mightAccessCallerFrame()) {
2429 bc::FCallD
{ op
.arg1
, s_empty
.get(), ar
.func
->name() }
2433 case FPIKind::Builtin
:
2434 return finish_builtin(env
, ar
.func
->exactFunc(), op
.arg1
, false);
2437 * Need to be wary of old-style ctors. We could get into the situation
2438 * where we're constructing class D extends B, and B has an old-style
2439 * ctor but D::B also exists. (So in this case we'll skip the
2440 * fcallKnownImpl stuff.)
2442 if (!ar
.func
->name()->isame(s_construct
.get()) &&
2443 !ar
.func
->name()->isame(s_86ctor
.get())) {
2447 case FPIKind::ObjMeth
:
2448 case FPIKind::ClsMeth
:
2449 if (ar
.cls
.hasValue() && ar
.func
->cantBeMagicCall()) {
2452 bc::FCallD
{ op
.arg1
, ar
.cls
->name(), ar
.func
->name() }
2456 // If we didn't return a reduce above, we still can compute a
2457 // partially-known FCall effect with our res::Func.
2458 return fcallKnownImpl(env
, op
.arg1
);
2462 for (auto i
= uint32_t{0}; i
< op
.arg1
; ++i
) popF(env
);
2464 specialFunctionEffects(env
, ar
);
2465 push(env
, TInitGen
);
2468 void in(ISS
& env
, const bc::FCallD
& op
) {
2469 auto const ar
= fpiTop(env
);
2470 if (ar
.kind
== FPIKind::Builtin
) {
2471 return finish_builtin(env
, ar
.func
->exactFunc(), op
.arg1
, false);
2473 if (ar
.func
) return fcallKnownImpl(env
, op
.arg1
);
2474 specialFunctionEffects(env
, ar
);
2475 for (auto i
= uint32_t{0}; i
< op
.arg1
; ++i
) popF(env
);
2476 push(env
, TInitGen
);
2479 void in(ISS
& env
, const bc::FCallAwait
& op
) {
2481 bc::FCallD
{ op
.arg1
, op
.str2
, op
.str3
},
2486 void fcallArrayImpl(ISS
& env
, int arg
) {
2487 auto const ar
= fpiTop(env
);
2488 if (ar
.kind
== FPIKind::Builtin
) {
2489 return finish_builtin(env
, ar
.func
->exactFunc(), arg
, true);
2492 for (auto i
= uint32_t{0}; i
< arg
; ++i
) { popF(env
); }
2494 specialFunctionEffects(env
, ar
);
2496 auto ty
= env
.index
.lookup_return_type(env
.ctx
, *ar
.func
);
2497 if (!ar
.fallbackFunc
) {
2498 pushCallReturnType(env
, std::move(ty
));
2501 auto ty2
= env
.index
.lookup_return_type(env
.ctx
, *ar
.fallbackFunc
);
2502 pushCallReturnType(env
, union_of(std::move(ty
), std::move(ty2
)));
2505 return push(env
, TInitGen
);
2508 void in(ISS
& env
, const bc::FCallArray
& /*op*/) {
2509 fcallArrayImpl(env
, 1);
2512 void in(ISS
& env
, const bc::FCallUnpack
& op
) {
2513 fcallArrayImpl(env
, op
.arg1
);
2516 void in(ISS
& env
, const bc::CufSafeArray
&) {
2517 popR(env
); popC(env
); popC(env
);
2521 void in(ISS
& env
, const bc::CufSafeReturn
&) {
2522 popR(env
); popC(env
); popC(env
);
2523 push(env
, TInitCell
);
2526 void in(ISS
& env
, const bc::DecodeCufIter
& op
) {
2528 env
.propagate(op
.target
, env
.state
); // before iter is modifed
2531 void in(ISS
& env
, const bc::IterInit
& op
) {
2532 auto const t1
= popC(env
);
2533 auto ity
= iter_types(t1
);
2534 if (!ity
.mayThrowOnInit
) nothrow(env
);
2536 auto const taken
= [&]{
2537 // Take the branch before setting locals if the iter is already
2538 // empty, but after popping. Similar for the other IterInits
2540 freeIter(env
, op
.iter1
);
2541 env
.propagate(op
.target
, env
.state
);
2544 auto const fallthrough
= [&]{
2545 setLoc(env
, op
.loc3
, ity
.value
);
2546 setIter(env
, op
.iter1
, TrackedIter
{ std::move(ity
) });
2549 switch (ity
.count
) {
2550 case IterTypes::Count::Empty
:
2552 mayReadLocal(env
, op
.loc3
);
2553 jmp_nofallthrough(env
);
2555 case IterTypes::Count::Single
:
2556 case IterTypes::Count::NonEmpty
:
2558 jmp_nevertaken(env
);
2560 case IterTypes::Count::ZeroOrOne
:
2561 case IterTypes::Count::Any
:
2568 void in(ISS
& env
, const bc::MIterInit
& op
) {
2570 env
.propagate(op
.target
, env
.state
);
2571 unbindLocalStatic(env
, op
.loc3
);
2572 setLocRaw(env
, op
.loc3
, TRef
);
2575 void in(ISS
& env
, const bc::IterInitK
& op
) {
2576 auto const t1
= popC(env
);
2577 auto ity
= iter_types(t1
);
2578 if (!ity
.mayThrowOnInit
) nothrow(env
);
2580 auto const taken
= [&]{
2581 freeIter(env
, op
.iter1
);
2582 env
.propagate(op
.target
, env
.state
);
2585 auto const fallthrough
= [&]{
2586 setLoc(env
, op
.loc3
, ity
.value
);
2587 setLoc(env
, op
.loc4
, ity
.key
);
2588 setIter(env
, op
.iter1
, TrackedIter
{ std::move(ity
) });
2591 switch (ity
.count
) {
2592 case IterTypes::Count::Empty
:
2594 mayReadLocal(env
, op
.loc3
);
2595 mayReadLocal(env
, op
.loc4
);
2596 jmp_nofallthrough(env
);
2598 case IterTypes::Count::Single
:
2599 case IterTypes::Count::NonEmpty
:
2601 jmp_nevertaken(env
);
2603 case IterTypes::Count::ZeroOrOne
:
2604 case IterTypes::Count::Any
:
2611 void in(ISS
& env
, const bc::MIterInitK
& op
) {
2613 env
.propagate(op
.target
, env
.state
);
2614 unbindLocalStatic(env
, op
.loc3
);
2615 setLocRaw(env
, op
.loc3
, TRef
);
2616 setLoc(env
, op
.loc4
, TInitCell
);
2619 void in(ISS
& env
, const bc::WIterInit
& op
) {
2621 env
.propagate(op
.target
, env
.state
);
2622 // WIter* instructions may leave the value locals as either refs
2623 // or cells, depending whether the rhs of the assignment was a
2625 setLocRaw(env
, op
.loc3
, TInitGen
);
2628 void in(ISS
& env
, const bc::WIterInitK
& op
) {
2630 env
.propagate(op
.target
, env
.state
);
2631 setLocRaw(env
, op
.loc3
, TInitGen
);
2632 setLoc(env
, op
.loc4
, TInitCell
);
2635 void in(ISS
& env
, const bc::IterNext
& op
) {
2636 auto const curLoc3
= locRaw(env
, op
.loc3
);
2638 auto const noTaken
= match
<bool>(
2639 env
.state
.iters
[op
.iter1
],
2641 setLoc(env
, op
.loc3
, TInitCell
);
2644 [&] (const TrackedIter
& ti
) {
2645 if (!ti
.types
.mayThrowOnNext
) nothrow(env
);
2646 switch (ti
.types
.count
) {
2647 case IterTypes::Count::Single
:
2648 case IterTypes::Count::ZeroOrOne
:
2650 case IterTypes::Count::NonEmpty
:
2651 case IterTypes::Count::Any
:
2652 setLoc(env
, op
.loc3
, ti
.types
.value
);
2654 case IterTypes::Count::Empty
:
2655 always_assert(false);
2661 jmp_nevertaken(env
);
2662 freeIter(env
, op
.iter1
);
2666 env
.propagate(op
.target
, env
.state
);
2668 freeIter(env
, op
.iter1
);
2669 setLocRaw(env
, op
.loc3
, curLoc3
);
2672 void in(ISS
& env
, const bc::MIterNext
& op
) {
2673 env
.propagate(op
.target
, env
.state
);
2674 unbindLocalStatic(env
, op
.loc3
);
2675 setLocRaw(env
, op
.loc3
, TRef
);
2678 void in(ISS
& env
, const bc::IterNextK
& op
) {
2679 auto const curLoc3
= locRaw(env
, op
.loc3
);
2680 auto const curLoc4
= locRaw(env
, op
.loc4
);
2682 auto const noTaken
= match
<bool>(
2683 env
.state
.iters
[op
.iter1
],
2685 setLoc(env
, op
.loc3
, TInitCell
);
2686 setLoc(env
, op
.loc4
, TInitCell
);
2689 [&] (const TrackedIter
& ti
) {
2690 if (!ti
.types
.mayThrowOnNext
) nothrow(env
);
2691 switch (ti
.types
.count
) {
2692 case IterTypes::Count::Single
:
2693 case IterTypes::Count::ZeroOrOne
:
2695 case IterTypes::Count::NonEmpty
:
2696 case IterTypes::Count::Any
:
2697 setLoc(env
, op
.loc3
, ti
.types
.value
);
2698 setLoc(env
, op
.loc4
, ti
.types
.key
);
2700 case IterTypes::Count::Empty
:
2701 always_assert(false);
2707 jmp_nevertaken(env
);
2708 freeIter(env
, op
.iter1
);
2712 env
.propagate(op
.target
, env
.state
);
2714 freeIter(env
, op
.iter1
);
2715 setLocRaw(env
, op
.loc3
, curLoc3
);
2716 setLocRaw(env
, op
.loc4
, curLoc4
);
2719 void in(ISS
& env
, const bc::MIterNextK
& op
) {
2720 env
.propagate(op
.target
, env
.state
);
2721 unbindLocalStatic(env
, op
.loc3
);
2722 setLocRaw(env
, op
.loc3
, TRef
);
2723 setLoc(env
, op
.loc4
, TInitCell
);
2726 void in(ISS
& env
, const bc::WIterNext
& op
) {
2727 env
.propagate(op
.target
, env
.state
);
2728 setLocRaw(env
, op
.loc3
, TInitGen
);
2731 void in(ISS
& env
, const bc::WIterNextK
& op
) {
2732 env
.propagate(op
.target
, env
.state
);
2733 setLocRaw(env
, op
.loc3
, TInitGen
);
2734 setLoc(env
, op
.loc4
, TInitCell
);
2737 void in(ISS
& env
, const bc::IterFree
& op
) {
2739 freeIter(env
, op
.iter1
);
2741 void in(ISS
& env
, const bc::MIterFree
& op
) {
2743 freeIter(env
, op
.iter1
);
2745 void in(ISS
& env
, const bc::CIterFree
& op
) {
2747 freeIter(env
, op
.iter1
);
2750 void in(ISS
& env
, const bc::IterBreak
& op
) {
2751 for (auto& kv
: op
.iterTab
) freeIter(env
, kv
.second
);
2752 env
.propagate(op
.target
, env
.state
);
2756 * Any include/require (or eval) op kills all locals, and private properties.
2758 * We don't need to do anything for collect.publicStatics because we'll analyze
2759 * the included pseudo-main separately and see any effects it may have on
2762 void inclOpImpl(ISS
& env
) {
2768 push(env
, TInitCell
);
2771 void in(ISS
& env
, const bc::Incl
&) { inclOpImpl(env
); }
2772 void in(ISS
& env
, const bc::InclOnce
&) { inclOpImpl(env
); }
2773 void in(ISS
& env
, const bc::Req
&) { inclOpImpl(env
); }
2774 void in(ISS
& env
, const bc::ReqOnce
&) { inclOpImpl(env
); }
2775 void in(ISS
& env
, const bc::ReqDoc
&) { inclOpImpl(env
); }
2776 void in(ISS
& env
, const bc::Eval
&) { inclOpImpl(env
); }
2778 void in(ISS
& /*env*/, const bc::DefFunc
&) {}
2779 void in(ISS
& /*env*/, const bc::DefCls
&) {}
2780 void in(ISS
& /*env*/, const bc::DefClsNop
&) {}
2781 void in(ISS
& env
, const bc::AliasCls
&) {
2786 void in(ISS
& env
, const bc::DefCns
& op
) {
2787 auto const t
= popC(env
);
2788 if (options
.HardConstProp
) {
2789 auto const v
= tv(t
);
2790 auto const val
= v
&& tvAsCVarRef(&*v
).isAllowedAsConstantValue() ?
2791 *v
: make_tv
<KindOfUninit
>();
2792 auto const res
= env
.collect
.cnsMap
.emplace(op
.str1
, val
);
2794 if (res
.first
->second
.m_type
== kReadOnlyConstant
) {
2795 // we only saw a read of this constant
2796 res
.first
->second
= val
;
2798 // more than one definition in this function
2799 res
.first
->second
.m_type
= kDynamicConstant
;
2806 void in(ISS
& /*env*/, const bc::DefTypeAlias
&) {}
2808 void in(ISS
& env
, const bc::This
&) {
2809 if (thisAvailable(env
)) {
2810 return reduce(env
, bc::BareThis
{ BareThisOp::NeverNull
});
2812 auto const ty
= thisType(env
);
2813 push(env
, ty
? *ty
: TObj
);
2814 setThisAvailable(env
);
2817 void in(ISS
& env
, const bc::LateBoundCls
& op
) {
2818 auto const ty
= selfCls(env
);
2819 putClsRefSlot(env
, op
.slot
, ty
? *ty
: TCls
);
2822 void in(ISS
& env
, const bc::CheckThis
&) {
2823 if (thisAvailable(env
)) {
2824 reduce(env
, bc::Nop
{});
2826 setThisAvailable(env
);
2829 void in(ISS
& env
, const bc::BareThis
& op
) {
2830 if (thisAvailable(env
)) {
2831 if (op
.subop1
!= BareThisOp::NeverNull
) {
2832 return reduce(env
, bc::BareThis
{ BareThisOp::NeverNull
});
2836 auto const ty
= thisType(env
);
2837 switch (op
.subop1
) {
2838 case BareThisOp::Notice
:
2840 case BareThisOp::NoNotice
:
2843 case BareThisOp::NeverNull
:
2845 setThisAvailable(env
);
2846 return push(env
, ty
? *ty
: TObj
);
2849 push(env
, ty
? opt(*ty
) : TOptObj
);
2852 void in(ISS
& env
, const bc::InitThisLoc
& op
) {
2853 setLocRaw(env
, op
.loc1
, TCell
);
2856 void in(ISS
& env
, const bc::StaticLocDef
& op
) {
2857 if (staticLocHelper(env
, op
.loc1
, topC(env
))) {
2858 return reduce(env
, bc::SetL
{ op
.loc1
}, bc::PopC
{});
2863 void in(ISS
& env
, const bc::StaticLocCheck
& op
) {
2864 auto const l
= op
.loc1
;
2865 if (!env
.ctx
.func
->isMemoizeWrapper
&&
2866 !env
.ctx
.func
->isClosureBody
&&
2867 env
.collect
.localStaticTypes
.size() > l
) {
2868 auto t
= env
.collect
.localStaticTypes
[l
];
2869 if (auto v
= tv(t
)) {
2870 useLocalStatic(env
, l
);
2871 setLocRaw(env
, l
, t
);
2874 bc::SetL
{ op
.loc1
}, bc::PopC
{},
2878 setLocRaw(env
, l
, TGen
);
2879 maybeBindLocalStatic(env
, l
);
2883 void in(ISS
& env
, const bc::StaticLocInit
& op
) {
2884 if (staticLocHelper(env
, op
.loc1
, topC(env
))) {
2885 return reduce(env
, bc::SetL
{ op
.loc1
}, bc::PopC
{});
2891 * Amongst other things, we use this to mark units non-persistent.
2893 void in(ISS
& env
, const bc::OODeclExists
& op
) {
2894 auto flag
= popC(env
);
2895 auto name
= popC(env
);
2897 if (!name
.strictSubtypeOf(TStr
)) return TBool
;
2898 auto const v
= tv(name
);
2899 if (!v
) return TBool
;
2900 auto rcls
= env
.index
.resolve_class(env
.ctx
, v
->m_data
.pstr
);
2901 if (!rcls
|| !rcls
->cls()) return TBool
;
2902 auto const mayExist
= [&] () -> bool {
2903 switch (op
.subop1
) {
2904 case OODeclExistsOp::Class
:
2905 return !(rcls
->cls()->attrs
& (AttrInterface
| AttrTrait
));
2906 case OODeclExistsOp::Interface
:
2907 return rcls
->cls()->attrs
& AttrInterface
;
2908 case OODeclExistsOp::Trait
:
2909 return rcls
->cls()->attrs
& AttrTrait
;
2913 auto unit
= rcls
->cls()->unit
;
2914 auto canConstProp
= [&] {
2915 // Its generally not safe to constprop this, because of
2916 // autoload. We're safe if its part of systemlib, or a
2917 // superclass of the current context.
2918 if (is_systemlib_part(*unit
)) return true;
2919 if (!env
.ctx
.cls
) return false;
2920 auto thisClass
= env
.index
.resolve_class(env
.ctx
.cls
);
2921 return thisClass
.subtypeOf(*rcls
);
2923 if (canConstProp()) {
2925 return mayExist
? TTrue
: TFalse
;
2927 unit
->persistent
.store(false, std::memory_order_relaxed
);
2928 // At this point, if it mayExist, we still don't know that it
2929 // *does* exist, but if not we know that it either doesn't
2930 // exist, or it doesn't have the right type.
2931 return mayExist
? TBool
: TFalse
;
2935 void in(ISS
& env
, const bc::VerifyParamType
& op
) {
2936 if (env
.ctx
.func
->isMemoizeImpl
&&
2937 !locCouldBeRef(env
, op
.loc1
) &&
2938 RuntimeOption::EvalHardTypeHints
) {
2939 // a MemoizeImpl's params have already been checked by the wrapper
2940 return reduce(env
, bc::Nop
{});
2943 locAsCell(env
, op
.loc1
);
2944 if (!RuntimeOption::EvalHardTypeHints
) return;
2947 * In HardTypeHints mode, we assume that if this opcode doesn't
2948 * throw, the parameter was of the specified type (although it may
2949 * have been a Ref if the parameter was by reference).
2951 * The env.setLoc here handles dealing with a parameter that was
2952 * already known to be a reference.
2954 * NB: VerifyParamType of a reference parameter can kill any
2955 * references if it re-enters, even if Option::HardTypeHints is
2958 auto const constraint
= env
.ctx
.func
->params
[op
.loc1
].typeConstraint
;
2959 if (!RuntimeOption::EvalCheckThisTypeHints
&& constraint
.isThis()) {
2962 if (constraint
.hasConstraint() && !constraint
.isTypeVar() &&
2963 !constraint
.isTypeConstant()) {
2964 auto t
= env
.index
.lookup_constraint(env
.ctx
, constraint
);
2965 if (t
.subtypeOf(TBottom
)) unreachable(env
);
2966 FTRACE(2, " {} ({})\n", constraint
.fullName(), show(t
));
2967 setLoc(env
, op
.loc1
, std::move(t
));
2971 void in(ISS
& /*env*/, const bc::VerifyRetTypeV
& /*op*/) {}
2973 void in(ISS
& env
, const bc::VerifyRetTypeC
& /*op*/) {
2974 auto const constraint
= env
.ctx
.func
->retTypeConstraint
;
2975 auto const stackT
= topC(env
);
2977 // If there is no return type constraint, or if the return type
2978 // constraint is a typevar, or if the top of stack is the same
2979 // or a subtype of the type constraint, then this is a no-op.
2980 if (env
.index
.satisfies_constraint(env
.ctx
, stackT
, constraint
)) {
2981 reduce(env
, bc::Nop
{});
2985 // If CheckReturnTypeHints < 3 OR if the constraint is soft,
2986 // then there are no optimizations we can safely do here, so
2987 // just leave the top of stack as is.
2988 if (RuntimeOption::EvalCheckReturnTypeHints
< 3 || constraint
.isSoft()
2989 || (!RuntimeOption::EvalCheckThisTypeHints
&& constraint
.isThis())) {
2993 // If we reach here, then CheckReturnTypeHints >= 3 AND the constraint
2994 // is not soft. We can safely assume that either VerifyRetTypeC will
2995 // throw or it will produce a value whose type is compatible with the
2996 // return type constraint.
2998 remove_uninit(env
.index
.lookup_constraint(env
.ctx
, constraint
));
3000 if (tcT
.subtypeOf(TBottom
)) {
3005 // Below we compute retT, which is a rough conservative approximate of the
3006 // intersection of stackT and tcT.
3007 // TODO(4441939): We could do better if we had an intersect_of() function
3008 // that provided a formal way to compute the intersection of two Types.
3010 // If tcT could be an interface or trait, we upcast it to TObj/TOptObj.
3011 // Why? Because we want uphold the invariant that we only refine return
3012 // types and never widen them, and if we allow tcT to be an interface then
3013 // it's possible for violations of this invariant to arise. For an example,
3014 // see "hphp/test/slow/hhbbc/return-type-opt-bug.php".
3015 // Note: It's safe to use TObj/TOptObj because lookup_constraint() only
3016 // returns classes or interfaces or traits (it never returns something that
3017 // could be an enum or type alias) and it never returns anything that could
3018 // be a "magic" interface that supports non-objects. (For traits the return
3019 // typehint will always throw at run time, so it's safe to use TObj/TOptObj.)
3020 if (is_specialized_obj(tcT
) && dobj_of(tcT
).cls
.couldBeInterfaceOrTrait()) {
3021 tcT
= is_opt(tcT
) ? TOptObj
: TObj
;
3023 // If stackT is a subtype of tcT, use stackT. Otherwise, if tc is an opt
3024 // type and stackT cannot be InitNull, then we can safely use unopt(tcT).
3025 // In all other cases, use tcT.
3026 auto retT
= stackT
.subtypeOf(tcT
) ? stackT
:
3027 is_opt(tcT
) && !stackT
.couldBe(TInitNull
) ? unopt(tcT
) :
3030 // Update the top of stack with the rough conservative approximate of the
3031 // intersection of stackT and tcT
3033 push(env
, std::move(retT
));
3036 void in(ISS
& env
, const bc::Self
& op
) {
3037 auto self
= selfClsExact(env
);
3038 putClsRefSlot(env
, op
.slot
, self
? *self
: TCls
);
3041 void in(ISS
& env
, const bc::Parent
& op
) {
3042 auto parent
= parentClsExact(env
);
3043 putClsRefSlot(env
, op
.slot
, parent
? *parent
: TCls
);
3046 void in(ISS
& env
, const bc::CreateCl
& op
) {
3047 auto const nargs
= op
.arg1
;
3048 auto const clsPair
= env
.index
.resolve_closure_class(env
.ctx
, op
.arg2
);
3051 * Every closure should have a unique allocation site, but we may see it
3052 * multiple times in a given round of analyzing this function. Each time we
3053 * may have more information about the used variables; the types should only
3054 * possibly grow. If it's already there we need to merge the used vars in
3055 * with what we saw last time.
3058 std::vector
<Type
> usedVars(nargs
);
3059 for (auto i
= uint32_t{0}; i
< nargs
; ++i
) {
3060 usedVars
[nargs
- i
- 1] = popT(env
);
3062 merge_closure_use_vars_into(
3063 env
.collect
.closureUseTypes
,
3069 // Closure classes can be cloned and rescoped at runtime, so it's not safe to
3070 // assert the exact type of closure objects. The best we can do is assert
3071 // that it's a subclass of Closure.
3072 auto const closure
= env
.index
.builtin_class(s_Closure
.get());
3074 return push(env
, subObj(closure
));
3077 void in(ISS
& env
, const bc::CreateCont
& /*op*/) {
3078 // First resume is always next() which pushes null.
3079 push(env
, TInitNull
);
3082 void in(ISS
& env
, const bc::ContEnter
&) { popC(env
); push(env
, TInitCell
); }
3083 void in(ISS
& env
, const bc::ContRaise
&) { popC(env
); push(env
, TInitCell
); }
3085 void in(ISS
& env
, const bc::Yield
&) {
3087 push(env
, TInitCell
);
3090 void in(ISS
& env
, const bc::YieldK
&) {
3093 push(env
, TInitCell
);
3096 void in(ISS
& env
, const bc::ContAssignDelegate
&) {
3100 void in(ISS
& env
, const bc::ContEnterDelegate
&) {
3104 void in(ISS
& env
, const bc::YieldFromDelegate
&) {
3105 push(env
, TInitCell
);
3108 void in(ISS
& /*env*/, const bc::ContUnsetDelegate
&) {}
3110 void in(ISS
& /*env*/, const bc::ContCheck
&) {}
3111 void in(ISS
& env
, const bc::ContValid
&) { push(env
, TBool
); }
3112 void in(ISS
& env
, const bc::ContStarted
&) { push(env
, TBool
); }
3113 void in(ISS
& env
, const bc::ContKey
&) { push(env
, TInitCell
); }
3114 void in(ISS
& env
, const bc::ContCurrent
&) { push(env
, TInitCell
); }
3115 void in(ISS
& env
, const bc::ContGetReturn
&) { push(env
, TInitCell
); }
3117 void pushTypeFromWH(ISS
& env
, const Type t
) {
3118 if (!t
.couldBe(TObj
)) {
3119 // These opcodes require an object descending from WaitHandle.
3120 // Exceptions will be thrown for any non-object.
3125 // If we aren't even sure this is a wait handle, there's nothing we can
3126 // infer here. (This can happen if a user declares a class with a
3127 // getWaitHandle method that returns non-WaitHandle garbage.)
3128 if (!t
.subtypeOf(TObj
) || !is_specialized_wait_handle(t
)) {
3129 return push(env
, TInitCell
);
3132 auto inner
= wait_handle_inner(t
);
3133 if (inner
.subtypeOf(TBottom
)) {
3134 // If it's a WaitH<Bottom>, we know it's going to throw an exception, and
3135 // the fallthrough code is not reachable.
3141 push(env
, std::move(inner
));
3144 void in(ISS
& env
, const bc::WHResult
&) {
3145 pushTypeFromWH(env
, popC(env
));
3148 void in(ISS
& env
, const bc::Await
&) {
3149 pushTypeFromWH(env
, popC(env
));
3152 void in(ISS
& /*env*/, const bc::IncStat
&) {}
3154 void in(ISS
& env
, const bc::Idx
&) {
3155 popC(env
); popC(env
); popC(env
);
3156 push(env
, TInitCell
);
3159 void in(ISS
& env
, const bc::ArrayIdx
&) {
3160 popC(env
); popC(env
); popC(env
);
3161 push(env
, TInitCell
);
3164 void in(ISS
& env
, const bc::CheckProp
&) {
3165 if (env
.ctx
.cls
->attrs
& AttrNoOverride
) {
3166 return reduce(env
, bc::False
{});
3172 void in(ISS
& env
, const bc::InitProp
& op
) {
3173 auto const t
= topC(env
);
3174 switch (op
.subop2
) {
3175 case InitPropOp::Static
:
3176 mergeSelfProp(env
, op
.str1
, t
);
3177 if (auto c
= env
.collect
.publicStatics
) {
3178 auto const cls
= selfClsExact(env
);
3179 always_assert(!!cls
);
3180 c
->merge(env
.ctx
, *cls
, sval(op
.str1
), t
);
3183 case InitPropOp::NonStatic
:
3184 mergeThisProp(env
, op
.str1
, t
);
3187 auto const v
= tv(t
);
3188 if (v
|| !could_run_destructor(t
)) {
3189 for (auto& prop
: env
.ctx
.func
->cls
->properties
) {
3190 if (prop
.name
== op
.str1
) {
3191 ITRACE(1, "InitProp: {} = {}\n", op
.str1
, show(t
));
3192 prop
.attrs
= (Attr
)(prop
.attrs
& ~AttrDeepInit
);
3195 if (op
.subop2
== InitPropOp::Static
&&
3196 !env
.collect
.publicStatics
&&
3197 !env
.index
.frozen()) {
3198 env
.index
.fixup_public_static(env
.ctx
.func
->cls
, prop
.name
, t
);
3200 return reduce(env
, bc::PopC
{});
3207 void in(ISS
& env
, const bc::Silence
& op
) {
3209 switch (op
.subop2
) {
3210 case SilenceOp::Start
:
3211 setLoc(env
, op
.loc1
, TInt
);
3213 case SilenceOp::End
:
3218 void in(ISS
& /*emv*/, const bc::VarEnvDynCall
&) {}
3221 //////////////////////////////////////////////////////////////////////
3223 void dispatch(ISS
& env
, const Bytecode
& op
) {
3224 #define O(opcode, ...) case Op::opcode: interp_step::in(env, op.opcode); return;
3225 switch (op
.op
) { OPCODES
}
3230 //////////////////////////////////////////////////////////////////////
3232 template<class Iterator
, class... Args
>
3233 void group(ISS
& env
, Iterator
& it
, Args
&&... args
) {
3234 FTRACE(2, " {}\n", [&]() -> std::string
{
3235 auto ret
= std::string
{};
3236 for (auto i
= size_t{0}; i
< sizeof...(Args
); ++i
) {
3237 ret
+= " " + show(env
.ctx
.func
, it
[i
]);
3238 if (i
!= sizeof...(Args
) - 1) ret
+= ';';
3242 it
+= sizeof...(Args
);
3243 return interp_step::group(env
, std::forward
<Args
>(args
)...);
3246 template<class Iterator
>
3247 void interpStep(ISS
& env
, Iterator
& it
, Iterator stop
) {
3249 * During the analysis phase, we analyze some common bytecode
3250 * patterns involving conditional jumps as groups to be able to
3251 * add additional information to the type environment depending on
3252 * whether the branch is taken or not.
3254 auto const o1
= it
->op
;
3255 auto const o2
= it
+ 1 != stop
? it
[1].op
: Op::Nop
;
3256 auto const o3
= it
+ 1 != stop
&&
3257 it
+ 2 != stop
? it
[2].op
: Op::Nop
;
3260 case Op::InstanceOfD
:
3263 return group(env
, it
, it
[0].InstanceOfD
, it
[1].JmpZ
);
3265 return group(env
, it
, it
[0].InstanceOfD
, it
[1].JmpNZ
);
3271 case Op::JmpZ
: return group(env
, it
, it
[0].IsTypeL
, it
[1].JmpZ
);
3272 case Op::JmpNZ
: return group(env
, it
, it
[0].IsTypeL
, it
[1].JmpNZ
);
3278 case Op::JmpZ
: return group(env
, it
, it
[0].IsUninit
, it
[1].JmpZ
);
3279 case Op::JmpNZ
: return group(env
, it
, it
[0].IsUninit
, it
[1].JmpNZ
);
3288 return group(env
, it
, it
[0].Dup
, it
[1].IsTypeC
, it
[2].JmpZ
);
3290 return group(env
, it
, it
[0].Dup
, it
[1].IsTypeC
, it
[2].JmpNZ
);
3294 case Op::InstanceOfD
:
3297 return group(env
, it
, it
[0].Dup
, it
[1].InstanceOfD
, it
[2].JmpZ
);
3299 return group(env
, it
, it
[0].Dup
, it
[1].InstanceOfD
, it
[2].JmpNZ
);
3311 return group(env
, it
, it
[0].MemoGet
, it
[1].IsUninit
, it
[2].JmpZ
);
3313 return group(env
, it
, it
[0].MemoGet
, it
[1].IsUninit
, it
[2].JmpNZ
);
3320 case Op::StaticLocCheck
:
3323 return group(env
, it
, it
[0].StaticLocCheck
, it
[1].JmpZ
);
3325 return group(env
, it
, it
[0].StaticLocCheck
, it
[1].JmpNZ
);
3332 FTRACE(2, " {}\n", show(env
.ctx
.func
, *it
));
3333 dispatch(env
, *it
++);
3336 template<class Iterator
>
3337 StepFlags
interpOps(Interp
& interp
,
3338 Iterator
& iter
, Iterator stop
,
3339 PropagateFn propagate
) {
3340 auto flags
= StepFlags
{};
3341 ISS env
{ interp
, flags
, propagate
};
3343 // If there are factored edges, make a copy of the state (except
3344 // stacks) in case we need to propagate across factored exits (if
3346 auto const stateBefore
= interp
.blk
->factoredExits
.empty()
3348 : without_stacks(interp
.state
);
3350 auto const numPushed
= iter
->numPush();
3351 interpStep(env
, iter
, stop
);
3353 auto fix_const_outputs
= [&] {
3354 auto elems
= &interp
.state
.stack
.back();
3355 constexpr auto numCells
= 4;
3356 Cell cells
[numCells
];
3359 while (i
< numPushed
) {
3361 auto const v
= tv(elems
->type
);
3362 if (!v
) return false;
3364 } else if (!is_scalar(elems
->type
)) {
3370 while (++elems
, i
--) {
3371 elems
->type
= from_cell(i
< numCells
?
3372 cells
[i
] : *tv(elems
->type
));
3377 if (options
.ConstantProp
&& flags
.canConstProp
&& fix_const_outputs()) {
3379 FTRACE(2, " nothrow (due to constprop)\n");
3380 flags
.wasPEI
= false;
3382 if (!flags
.effectFree
) {
3383 FTRACE(2, " effect_free (due to constprop)\n");
3384 flags
.effectFree
= true;
3388 assertx(!flags
.effectFree
|| !flags
.wasPEI
);
3390 FTRACE(2, " PEI.\n");
3391 for (auto factored
: interp
.blk
->factoredExits
) {
3392 propagate(factored
, stateBefore
);
3398 //////////////////////////////////////////////////////////////////////
3400 RunFlags
run(Interp
& interp
, PropagateFn propagate
) {
3402 FTRACE(2, "out {}{}\n",
3403 state_string(*interp
.ctx
.func
, interp
.state
, interp
.collect
),
3404 property_state_string(interp
.collect
.props
));
3407 auto ret
= RunFlags
{};
3408 auto const stop
= end(interp
.blk
->hhbcs
);
3409 auto iter
= begin(interp
.blk
->hhbcs
);
3410 while (iter
!= stop
) {
3411 auto const flags
= interpOps(interp
, iter
, stop
, propagate
);
3412 if (interp
.collect
.effectFree
&& !flags
.effectFree
) {
3413 interp
.collect
.effectFree
= false;
3416 if (flags
.usedLocalStatics
) {
3417 if (!ret
.usedLocalStatics
) {
3418 ret
.usedLocalStatics
= std::move(flags
.usedLocalStatics
);
3420 for (auto& elm
: *flags
.usedLocalStatics
) {
3421 ret
.usedLocalStatics
->insert(std::move(elm
));
3426 if (interp
.state
.unreachable
) {
3427 FTRACE(2, " <bytecode fallthrough is unreachable>\n");
3431 switch (flags
.jmpFlag
) {
3432 case StepFlags::JmpFlags::Taken
:
3433 FTRACE(2, " <took branch; no fallthrough>\n");
3435 case StepFlags::JmpFlags::Fallthrough
:
3436 case StepFlags::JmpFlags::Either
:
3439 if (flags
.returned
) {
3440 FTRACE(2, " returned {}\n", show(*flags
.returned
));
3441 always_assert(iter
== stop
);
3442 always_assert(interp
.blk
->fallthrough
== NoBlockId
);
3443 ret
.returned
= flags
.returned
;
3448 FTRACE(2, " <end block>\n");
3449 if (interp
.blk
->fallthrough
!= NoBlockId
) {
3450 propagate(interp
.blk
->fallthrough
, interp
.state
);
3455 StepFlags
step(Interp
& interp
, const Bytecode
& op
) {
3456 auto flags
= StepFlags
{};
3457 auto noop
= [] (BlockId
, const State
&) {};
3458 ISS env
{ interp
, flags
, noop
};
3463 void default_dispatch(ISS
& env
, const Bytecode
& op
) {
3467 //////////////////////////////////////////////////////////////////////