Emit fatal when unit takes a reference to a local superglobal
[hiphop-php.git] / hphp / hhbbc / optimize.cpp
blobaf13b22e3f628f005fcf7ff1195318006474285d
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/hhbbc/optimize.h"
18 #include <vector>
19 #include <string>
20 #include <utility>
21 #include <algorithm>
22 #include <cassert>
23 #include <bitset>
25 #include <boost/dynamic_bitset.hpp>
27 #include <folly/Optional.h>
28 #include <folly/gen/Base.h>
29 #include <folly/gen/String.h>
31 #include "hphp/util/trace.h"
32 #include "hphp/util/match.h"
34 #include "hphp/runtime/vm/hhbc.h"
35 #include "hphp/runtime/base/datatype.h"
37 #include "hphp/hhbbc/hhbbc.h"
38 #include "hphp/hhbbc/analyze.h"
39 #include "hphp/hhbbc/cfg.h"
40 #include "hphp/hhbbc/cfg-opts.h"
41 #include "hphp/hhbbc/dce.h"
42 #include "hphp/hhbbc/func-util.h"
43 #include "hphp/hhbbc/interp.h"
44 #include "hphp/hhbbc/interp-internal.h"
45 #include "hphp/hhbbc/interp-state.h"
46 #include "hphp/hhbbc/misc.h"
47 #include "hphp/hhbbc/options-util.h"
48 #include "hphp/hhbbc/peephole.h"
49 #include "hphp/hhbbc/representation.h"
50 #include "hphp/hhbbc/type-system.h"
51 #include "hphp/hhbbc/unit-util.h"
53 namespace HPHP { namespace HHBBC {
55 //////////////////////////////////////////////////////////////////////
57 namespace {
59 //////////////////////////////////////////////////////////////////////
61 const StaticString s_86pinit("86pinit");
62 const StaticString s_86sinit("86sinit");
63 const StaticString s_86linit("86linit");
65 //////////////////////////////////////////////////////////////////////
68 * For filtering assertions, some opcodes are considered to have no
69 * use for a stack input assertion.
71 * For now this is restricted to opcodes that do literally nothing.
73 bool ignoresStackInput(Op op) {
74 switch (op) {
75 case Op::UGetCUNop:
76 case Op::CGetCUNop:
77 case Op::PopU:
78 return true;
79 default:
80 return false;
82 not_reached();
85 template<class TyBC, class ArgType>
86 folly::Optional<Bytecode> makeAssert(ArrayTypeTable::Builder& arrTable,
87 ArgType arg,
88 Type t) {
89 if (t.subtypeOf(BBottom)) return folly::none;
90 auto const rat = make_repo_type(arrTable, t);
91 using T = RepoAuthType::Tag;
92 if (options.FilterAssertions) {
93 // Gen and InitGen don't add any useful information, so leave them
94 // out entirely.
95 if (rat == RepoAuthType{T::Gen}) return folly::none;
96 if (rat == RepoAuthType{T::InitGen}) return folly::none;
98 return Bytecode { TyBC { arg, rat } };
101 template<class Gen>
102 void insert_assertions_step(ArrayTypeTable::Builder& arrTable,
103 const php::Func& func,
104 const Bytecode& bcode,
105 const State& state,
106 std::bitset<kMaxTrackedLocals> mayReadLocalSet,
107 std::vector<uint8_t> obviousStackOutputs,
108 Gen gen) {
109 if (state.unreachable) return;
111 for (LocalId i = 0; i < state.locals.size(); ++i) {
112 if (func.locals[i].killed) continue;
113 if (options.FilterAssertions) {
114 if (i < mayReadLocalSet.size() && !mayReadLocalSet.test(i)) {
115 continue;
118 auto const realT = state.locals[i];
119 auto const op = makeAssert<bc::AssertRATL>(arrTable, i, realT);
120 if (op) gen(*op);
123 if (!options.InsertStackAssertions) return;
125 assert(obviousStackOutputs.size() == state.stack.size());
127 auto const assert_stack = [&] (size_t idx) {
128 assert(idx < state.stack.size());
129 if (obviousStackOutputs[state.stack.size() - idx - 1]) return;
130 if (ignoresStackInput(bcode.op)) return;
131 auto const realT = state.stack[state.stack.size() - idx - 1].type;
132 auto const flav = stack_flav(realT);
134 assert(realT.subtypeOf(BBottom) || !realT.subtypeOf(BCls));
135 if (options.FilterAssertions && !realT.strictSubtypeOf(flav)) {
136 return;
139 auto const op =
140 makeAssert<bc::AssertRATStk>(
141 arrTable,
142 static_cast<uint32_t>(idx),
143 realT
145 if (op) gen(*op);
149 * This doesn't need to account for ActRecs on the fpiStack, because
150 * no instruction in an FPI region can ever consume a stack value
151 * from above the pre-live ActRec.
153 for (auto i = size_t{0}; i < bcode.numPop(); ++i) assert_stack(i);
155 // The base instructions are special in that they don't pop anything, but do
156 // read from the stack. We want type assertions on the stack slots they'll
157 // read.
158 switch (bcode.op) {
159 case Op::BaseC: assert_stack(bcode.BaseC.arg1); break;
160 case Op::BaseGC: assert_stack(bcode.BaseGC.arg1); break;
161 case Op::BaseSC: assert_stack(bcode.BaseSC.arg1); break;
162 case Op::Dim: {
163 switch (bcode.Dim.mkey.mcode) {
164 case MEC: case MPC:
165 assert_stack(bcode.Dim.mkey.idx);
166 break;
167 case MW: case MEL: case MPL: case MEI:
168 case MET: case MPT: case MQT:
169 break;
172 default: break;
177 * When filter assertions is on, we use this to avoid putting stack
178 * assertions on some "obvious" instructions.
180 * These are instructions that push an output type that is always the
181 * same, i.e. where an AssertT is never going to add information.
182 * (E.g. "Int 3" obviously pushes an Int, and the JIT has no trouble
183 * figuring that out, so there's no reason to assert about it.)
185 * TODO(#3676101): It turns out many hhbc opcodes have known output
186 * types---there are some super polymorphic ones, but many always do
187 * bools or objects, etc. We might consider making stack flavors have
188 * subtypes and adding this to the opcode table.
190 bool hasObviousStackOutput(const Bytecode& op, const Interp& interp) {
191 // Generally consider CGetL obvious because if we knew the type of the local,
192 // we'll assert that right before the CGetL.
193 auto cgetlObvious = [&] (LocalId l, int idx) {
194 return !interp.state.locals[l].couldBe(BRef) ||
195 !interp.state.stack[interp.state.stack.size() - idx - 1].
196 type.strictSubtypeOf(TInitCell);
198 switch (op.op) {
199 case Op::Box:
200 case Op::Null:
201 case Op::NullUninit:
202 case Op::True:
203 case Op::False:
204 case Op::Int:
205 case Op::Double:
206 case Op::String:
207 case Op::Array:
208 case Op::Dict:
209 case Op::Vec:
210 case Op::Keyset:
211 case Op::NewArray:
212 case Op::NewDArray:
213 case Op::NewDictArray:
214 case Op::NewPackedArray:
215 case Op::NewVArray:
216 case Op::NewStructArray:
217 case Op::NewStructDArray:
218 case Op::NewStructDict:
219 case Op::NewVecArray:
220 case Op::NewKeysetArray:
221 case Op::AddNewElemC:
222 case Op::AddNewElemV:
223 case Op::NewCol:
224 case Op::NewPair:
225 case Op::ClsRefName:
226 case Op::File:
227 case Op::Dir:
228 case Op::Concat:
229 case Op::Not:
230 case Op::Xor:
231 case Op::Same:
232 case Op::NSame:
233 case Op::Eq:
234 case Op::Neq:
235 case Op::Lt:
236 case Op::Gt:
237 case Op::Lte:
238 case Op::Gte:
239 case Op::Cmp:
240 case Op::Shl:
241 case Op::Shr:
242 case Op::CastBool:
243 case Op::CastInt:
244 case Op::CastDouble:
245 case Op::CastString:
246 case Op::CastArray:
247 case Op::CastObject:
248 case Op::CastDict:
249 case Op::CastVec:
250 case Op::CastKeyset:
251 case Op::CastVArray:
252 case Op::CastDArray:
253 case Op::DblAsBits:
254 case Op::InstanceOfD:
255 case Op::IsLateBoundCls:
256 case Op::IsTypeStructC:
257 case Op::AsTypeStructC:
258 case Op::CombineAndResolveTypeStruct:
259 case Op::RecordReifiedGeneric:
260 case Op::ReifiedName:
261 case Op::InstanceOf:
262 case Op::Print:
263 case Op::Exit:
264 case Op::AKExists:
265 case Op::IssetL:
266 case Op::IssetG:
267 case Op::IssetS:
268 case Op::EmptyL:
269 case Op::EmptyG:
270 case Op::EmptyS:
271 case Op::IsTypeC:
272 case Op::IsTypeL:
273 case Op::OODeclExists:
274 case Op::AliasCls:
275 return true;
277 case Op::This:
278 case Op::BareThis:
279 if (auto tt = thisType(interp.index, interp.ctx)) {
280 auto t = interp.state.stack.back().type;
281 if (is_opt(t)) t = unopt(std::move(t));
282 return !t.strictSubtypeOf(*tt);
284 return true;
286 case Op::CGetL:
287 return cgetlObvious(op.CGetL.loc1, 0);
288 case Op::CGetQuietL:
289 return cgetlObvious(op.CGetQuietL.loc1, 0);
290 case Op::CUGetL:
291 return cgetlObvious(op.CUGetL.loc1, 0);
292 case Op::CGetL2:
293 return cgetlObvious(op.CGetL2.loc1, 1);
294 case Op::PushL:
295 return cgetlObvious(op.PushL.loc1, 0);
297 // The output of SetL is obvious if you know what its input is
298 // (which we'll assert if we know).
299 case Op::SetL:
300 return true;
302 // The output of SetM isn't quite as obvious as SetL, but the jit
303 // can work it out from the input just as well as hhbbc (if not better).
304 case Op::SetM:
305 return true;
307 default:
308 return false;
312 void insert_assertions(const Index& index,
313 const FuncAnalysis& ainfo,
314 CollectedInfo& collect,
315 BlockId bid,
316 State state) {
317 BytecodeVec newBCs;
318 auto& cblk = ainfo.ctx.func->blocks[bid];
319 newBCs.reserve(cblk->hhbcs.size());
321 auto& arrTable = *index.array_table_builder();
322 auto const ctx = ainfo.ctx;
324 std::vector<uint8_t> obviousStackOutputs(state.stack.size(), false);
326 auto interp = Interp { index, ctx, collect, bid, cblk.get(), state };
327 for (auto& op : cblk->hhbcs) {
328 FTRACE(2, " == {}\n", show(ctx.func, op));
330 auto gen = [&] (const Bytecode& newb) {
331 newBCs.push_back(newb);
332 newBCs.back().srcLoc = op.srcLoc;
333 FTRACE(2, " + {}\n", show(ctx.func, newBCs.back()));
336 if (state.unreachable) {
337 if (cblk->fallthrough != NoBlockId || cblk->unwindExits.size()) {
338 auto const blk = cblk.mutate();
339 blk->fallthrough = NoBlockId;
340 blk->unwindExits = {};
342 if (!(instrFlags(op.op) & TF)) {
343 gen(bc::BreakTraceHint {});
344 gen(bc::String { s_unreachable.get() });
345 gen(bc::Fatal { FatalOp::Runtime });
347 break;
350 auto const preState = state;
351 auto const flags = step(interp, op);
353 insert_assertions_step(
354 arrTable,
355 *ctx.func,
357 preState,
358 flags.mayReadLocalSet,
359 obviousStackOutputs,
363 if (op.op == Op::CGetL2) {
364 obviousStackOutputs.emplace(obviousStackOutputs.end() - 1,
365 hasObviousStackOutput(op, interp));
366 } else {
367 for (int i = 0; i < op.numPop(); i++) {
368 obviousStackOutputs.pop_back();
370 for (auto i = op.numPush(); i--; ) {
371 obviousStackOutputs.emplace_back(hasObviousStackOutput(op, interp));
375 gen(op);
378 if (cblk->hhbcs != newBCs) cblk.mutate()->hhbcs = std::move(newBCs);
381 bool persistence_check(const php::Block* const blk) {
382 for (auto& op : blk->hhbcs) {
383 switch (op.op) {
384 case Op::Nop:
385 case Op::DefCls:
386 case Op::DefClsNop:
387 case Op::DefCns:
388 case Op::DefTypeAlias:
389 case Op::Null:
390 case Op::True:
391 case Op::False:
392 case Op::Int:
393 case Op::Double:
394 case Op::String:
395 case Op::Vec:
396 case Op::Dict:
397 case Op::Keyset:
398 case Op::Array:
399 continue;
400 case Op::PopC:
401 // Not strictly no-side effects, but as long as the rest of
402 // the unit is limited to the above, we're fine (and we expect
403 // one following a DefCns).
404 continue;
405 case Op::RetC:
406 continue;
407 default:
408 return false;
411 return true;
414 //////////////////////////////////////////////////////////////////////
416 template<class Gen>
417 bool propagate_constants(const Bytecode& op, State& state, Gen gen) {
418 auto const numPop = op.numPop();
419 auto const numPush = op.numPush();
420 auto const stkSize = state.stack.size();
421 constexpr auto numCells = 4;
422 Cell constVals[numCells];
424 // All outputs of the instruction must have constant types for this
425 // to be allowed.
426 for (auto i = size_t{0}; i < numPush; ++i) {
427 auto const& ty = state.stack[stkSize - i - 1].type;
428 if (i < numCells) {
429 auto const v = tv(ty);
430 if (!v) return false;
431 constVals[i] = *v;
432 } else if (!is_scalar(ty)) {
433 return false;
437 auto const slot = visit(op, ReadClsRefSlotVisitor{});
438 if (slot != NoClsRefSlotId) gen(bc::DiscardClsRef { slot });
440 // Pop the inputs, and push the constants.
441 for (auto i = size_t{0}; i < numPop; ++i) {
442 switch (op.popFlavor(i)) {
443 case Flavor::C: gen(bc::PopC {}); break;
444 case Flavor::V: gen(bc::PopV {}); break;
445 case Flavor::U: not_reached(); break;
446 case Flavor::CU:
447 // We only support C's for CU right now.
448 gen(bc::PopC {});
449 break;
450 case Flavor::CV: not_reached(); break;
451 case Flavor::CVU:
452 // Note that we only support C's for CVU so far (this only comes up with
453 // FCallBuiltin)---we'll fail the verifier if something changes to send
454 // V's or U's through here.
455 gen(bc::PopC {});
456 break;
460 for (auto i = size_t{0}; i < numPush; ++i) {
461 auto const v = i < numCells ?
462 constVals[i] : *tv(state.stack[stkSize - i - 1].type);
463 gen(gen_constant(v));
464 state.stack[stkSize - i - 1].type = from_cell(v);
467 return true;
470 bool propagate_constants(const Bytecode& bc, State& state,
471 BytecodeVec& out) {
472 return propagate_constants(bc, state, [&] (const Bytecode& bc) {
473 out.push_back(bc);
477 //////////////////////////////////////////////////////////////////////
480 * Create a block similar to another block (but with no bytecode in it yet).
482 BlockId make_block(FuncAnalysis& ainfo,
483 const php::Block* srcBlk,
484 const State& state) {
485 FTRACE(1, " ++ new block {}\n", ainfo.ctx.func->blocks.size());
486 assert(ainfo.bdata.size() == ainfo.ctx.func->blocks.size());
488 auto newBlk = copy_ptr<php::Block>{php::Block{}};
489 auto const blk = newBlk.mutate();
490 blk->section = srcBlk->section;
491 blk->exnNodeId = srcBlk->exnNodeId;
492 blk->throwExits = srcBlk->throwExits;
493 blk->unwindExits = srcBlk->unwindExits;
494 auto const bid = ainfo.ctx.func->blocks.size();
495 ainfo.ctx.func->blocks.push_back(std::move(newBlk));
497 ainfo.rpoBlocks.push_back(bid);
498 ainfo.bdata.push_back(FuncAnalysis::BlockData {
499 static_cast<uint32_t>(ainfo.rpoBlocks.size() - 1),
500 state
503 return bid;
506 BlockId make_fatal_block(FuncAnalysis& ainfo,
507 const php::Block* srcBlk,
508 const State& state) {
509 auto bid = make_block(ainfo, srcBlk, state);
510 auto const blk = ainfo.ctx.func->blocks[bid].mutate();
511 auto const srcLoc = srcBlk->hhbcs.back().srcLoc;
512 blk->hhbcs = {
513 bc_with_loc(srcLoc, bc::String { s_unreachable.get() }),
514 bc_with_loc(srcLoc, bc::Fatal { FatalOp::Runtime })
516 blk->fallthrough = NoBlockId;
517 blk->throwExits = {};
518 blk->unwindExits = {};
519 blk->exnNodeId = NoExnNodeId;
520 return bid;
523 void first_pass(const Index& index,
524 FuncAnalysis& ainfo,
525 CollectedInfo& collect,
526 BlockId bid,
527 State state) {
528 auto const ctx = ainfo.ctx;
530 auto interp = Interp {
531 index, ctx, collect, bid, ctx.func->blocks[bid].get(), state
534 BytecodeVec newBCs;
535 newBCs.reserve(interp.blk->hhbcs.size());
537 if (options.ConstantProp) collect.propagate_constants = propagate_constants;
539 auto peephole = make_peephole(newBCs, index, ctx);
540 std::vector<PeepholeStackElem> srcStack(state.stack.size());
542 for (size_t idx = 0, end = interp.blk->hhbcs.size(); idx != end; ) {
543 // mutate can change blocks[bid], but that should only ever happen
544 // on the last bytecode.
545 assertx(interp.blk == ctx.func->blocks[bid].get());
546 auto const& op = interp.blk->hhbcs[idx++];
547 FTRACE(2, " == {}\n", show(ctx.func, op));
549 if (options.Peephole) {
550 peephole.prestep(op, srcStack, state.stack);
553 auto const flags = step(interp, op);
555 auto gen = [&] (const Bytecode& bc) {
556 auto newBC = bc;
557 newBC.srcLoc = op.srcLoc;
558 FTRACE(2, " + {}\n", show(ctx.func, newBC));
559 if (options.Peephole) {
560 peephole.append(
561 newBC,
562 !flags.wasPEI &&
563 !any(collect.opts & CollectionOpts::TrackConstantArrays),
564 srcStack, state.stack);
565 } else {
566 newBCs.push_back(newBC);
570 // The peephole wants the old values of srcStack, so defer the update to the
571 // end of the loop.
572 SCOPE_EXIT {
573 // If we're on the last bytecode, there's no need to update
574 // srcStack, and some opcodes (eg MemoGet) push differently on
575 // the taken path vs the non-taken path.
576 if (idx == end) return;
577 if (op.op == Op::CGetL2) {
578 srcStack.emplace(srcStack.end() - 1,
579 op.op, (state.stack.end() - 2)->type.subtypeOf(BStr));
580 // The CGetL was the last thing to write this too (even though
581 // it just copied it).
582 srcStack.back().op = op.op;
583 } else {
584 FTRACE(2, " srcStack: pop {} push {}\n", op.numPop(), op.numPush());
585 for (int i = 0; i < op.numPop(); i++) {
586 srcStack.pop_back();
588 for (int i = 0; i < op.numPush(); i++) {
589 srcStack.emplace_back(
590 op.op, state.stack[srcStack.size()].type.subtypeOf(BStr));
595 auto genOut = [&] (const Bytecode* op) -> Op {
596 if (options.ConstantProp && flags.canConstProp) {
597 if (propagate_constants(*op, state, gen)) {
598 assert(!flags.strengthReduced);
599 return Op::Nop;
603 if (flags.strengthReduced) {
604 for (auto const& bc : *flags.strengthReduced) {
605 gen(bc);
607 return flags.strengthReduced->back().op;
610 gen(*op);
611 return op->op;
614 if (state.unreachable) {
615 // We should still perform the requested transformations; we
616 // might be part way through converting an FPush/FCall to an
617 // FCallBuiltin, for example
618 auto opc = genOut(&op);
619 if (interp.blk->fallthrough != NoBlockId ||
620 interp.blk->unwindExits.size()) {
621 auto const blk = ctx.func->blocks[bid].mutate();
622 blk->fallthrough = NoBlockId;
623 blk->unwindExits = {};
625 if (!(instrFlags(opc) & TF)) {
626 gen(bc::BreakTraceHint {});
627 gen(bc::String { s_unreachable.get() });
628 gen(bc::Fatal { FatalOp::Runtime });
630 break;
633 if (idx != end || !options.RemoveDeadBlocks) {
634 genOut(&op);
635 continue;
638 assertx(!interp.state.speculatedIsUnconditional ||
639 interp.state.speculated != NoBlockId);
641 auto speculate = [&] (BlockId new_target, auto fun) {
642 auto bc = fun();
643 if (bc) {
644 if (bc->op != Op::Nop) gen(*bc);
645 if (interp.state.speculatedPops) {
646 auto const blk = ctx.func->blocks[bid].mutate();
647 auto const new_bid = make_block(ainfo, blk, interp.state);
648 auto fixer = [&] (BlockId& t) {
649 if (t == new_target) t = new_bid;
651 forEachTakenEdge(*bc, fixer);
652 fixer(blk->fallthrough);
653 auto const new_block = ctx.func->blocks[new_bid].mutate();
654 new_block->fallthrough = new_target;
655 new_block->hhbcs.insert(new_block->hhbcs.end(),
656 interp.state.speculatedPops,
657 bc::PopC {});
658 auto& newState = ainfo.bdata[new_bid].stateIn;
659 newState.speculated = NoBlockId;
660 newState.speculatedPops = 0;
661 newState.speculatedIsUnconditional = false;
662 newState.speculatedIsFallThrough = false;
663 return;
666 for (auto i = 0; i < interp.state.speculatedPops; i++) {
667 gen(bc::PopC {});
671 auto const jmpDest =
672 interp.state.speculatedIsUnconditional ?
673 interp.state.speculated : flags.jmpDest;
675 if (jmpDest == NoBlockId) {
676 genOut(&op);
677 if (interp.state.speculated != NoBlockId &&
678 interp.state.speculatedIsFallThrough) {
679 assertx(interp.blk->fallthrough != NoBlockId);
680 speculate(interp.state.speculated,
681 [&] () -> folly::Optional<Bytecode> {
682 FTRACE(2,
683 "Speculated fallthrough: B{}->B{}({})\n",
684 bid,
685 interp.state.speculated,
686 interp.state.speculatedPops);
687 ctx.func->blocks[bid].mutate()->fallthrough =
688 interp.state.speculated;
689 if (interp.state.speculatedPops) return { bc::Nop{} };
690 return folly::none;
693 break;
696 auto const wasFallThrough =
697 interp.state.speculatedIsUnconditional ?
698 interp.state.speculatedIsFallThrough : jmpDest == interp.blk->fallthrough;
700 switch (op.op) {
702 * For jumps, we need to pop the cell that was on the stack
703 * for the conditional jump. Note: for jumps this also
704 * conceptually needs to execute any side effects a conversion
705 * to bool can have. (Currently that is none.)
707 case Op::JmpNZ:
708 case Op::JmpZ:
709 case Op::SSwitch:
710 case Op::Switch:
711 always_assert(!flags.wasPEI);
712 speculate(
713 jmpDest,
714 [&] () -> folly::Optional<Bytecode> {
715 gen(bc::PopC {});
716 ctx.func->blocks[bid].mutate()->fallthrough = jmpDest;
717 return folly::none;
720 break;
721 case Op::IterInit:
722 case Op::LIterInit:
723 case Op::IterInitK:
724 case Op::LIterInitK:
725 speculate(
726 jmpDest,
727 [&] () -> folly::Optional<Bytecode> {
728 auto iterOp = op;
729 auto set_target = [&] (BlockId t) {
730 if (iterOp.op == Op::IterInit) {
731 iterOp.IterInit.target2 = t;
732 } else if (iterOp.op == Op::IterInitK) {
733 iterOp.IterInitK.target2 = t;
734 } else if (iterOp.op == Op::LIterInit) {
735 iterOp.LIterInit.target3 = t;
736 } else if (iterOp.op == Op::LIterInitK) {
737 iterOp.LIterInitK.target3 = t;
738 } else {
739 assertx(false);
742 if (!wasFallThrough) {
744 * For iterators, if we'll always take the taken branch
745 * (which means there's nothing to iterate over), and the
746 * op cannot raise an exception, we can just pop the input
747 * and set the fall-through to the taken branch. If not,
748 * we have to keep the op, but we can make sure we'll
749 * fatal if we ever actually take the fall-through.
751 if (!flags.wasPEI) {
752 if (op.op != Op::LIterInit && op.op != Op::LIterInitK) {
753 gen(bc::PopC {});
755 ctx.func->blocks[bid].mutate()->fallthrough = jmpDest;
756 return folly::none;
758 set_target(jmpDest);
759 auto const blk = ctx.func->blocks[bid].mutate();
760 blk->fallthrough = make_fatal_block(ainfo, blk, state);
761 return iterOp;
764 * We can't ever optimize away iteration initialization if
765 * we know we'll always fall-through (which means we enter
766 * the loop) because we need to initialize the
767 * iterator. We can ensure, however, that the taken branch
768 * is a fatal.
770 auto const blk = ctx.func->blocks[bid].mutate();
771 set_target(make_fatal_block(ainfo, blk, state));
772 blk->fallthrough = jmpDest;
773 return iterOp;
776 break;
777 case Op::IterNext:
778 case Op::IterNextK:
779 case Op::LIterNext:
780 case Op::LIterNextK:
781 assertx(wasFallThrough);
782 speculate(
783 jmpDest,
784 [&] () -> folly::Optional<Bytecode> {
786 * If we're nexting an iterator and we know we'll always
787 * fall-through (which means the iteration is over), and we
788 * can't raise an exception when nexting the iterator, we
789 * can just free the iterator and let it fall-through. If
790 * not, we can at least ensure the taken branch is a fatal.
792 auto const blk = ctx.func->blocks[bid].mutate();
793 blk->fallthrough = jmpDest;
794 if (!flags.wasPEI) {
795 if (op.op == Op::IterNext) {
796 gen(bc::IterFree { op.IterNext.iter1 });
797 } else if (op.op == Op::IterNextK) {
798 gen(bc::IterFree { op.IterNextK.iter1 });
799 } else if (op.op == Op::LIterNext) {
800 gen(bc::LIterFree { op.LIterNext.iter1, op.LIterNext.loc2 });
801 } else {
802 gen(
803 bc::LIterFree { op.LIterNextK.iter1, op.LIterNextK.loc2 }
806 blk->multiSucc = false;
807 return folly::none;
809 auto const fatal = make_fatal_block(ainfo, blk, state);
810 auto iterOp = op;
811 if (iterOp.op == Op::IterNext) {
812 iterOp.IterNext.target2 = fatal;
813 } else if (iterOp.op == Op::IterNextK) {
814 iterOp.IterNextK.target2 = fatal;
815 } else if (iterOp.op == Op::LIterNext) {
816 iterOp.LIterNext.target3 = fatal;
817 } else {
818 iterOp.LIterNextK.target3 = fatal;
820 return iterOp;
823 break;
824 case Op::MemoGet:
825 case Op::MemoGetEager:
826 speculate(
827 jmpDest,
828 [&] () -> folly::Optional<Bytecode> {
829 auto const blk = ctx.func->blocks[bid].mutate();
830 if (!wasFallThrough) {
831 if (!flags.wasPEI) {
832 blk->fallthrough = jmpDest;
833 return folly::none;
835 auto iterOp = op;
836 if (iterOp.op == Op::MemoGet) {
837 iterOp.MemoGet.target1 = jmpDest;
838 } else {
839 assertx(jmpDest != iterOp.MemoGetEager.target2);
840 iterOp.MemoGetEager.target1 = jmpDest;
842 blk->fallthrough = make_fatal_block(ainfo, blk, state);
843 return iterOp;
845 blk->fallthrough = jmpDest;
846 return op;
849 break;
850 default:
851 always_assert(0 && "unsupported jmpDest");
853 // if we inserted extra blocks, we might have invalidated cblk;
854 // even without that, mutating it means our iterator is into the
855 // original block, not the new one.
856 break;
859 if (options.Peephole) {
860 peephole.finalize();
863 // note cblk might be dead
864 if (ctx.func->blocks[bid]->hhbcs != newBCs) {
865 ctx.func->blocks[bid].mutate()->hhbcs = std::move(newBCs);
867 auto& fpiStack = ainfo.bdata[bid].stateIn.fpiStack;
868 auto it = std::remove_if(fpiStack.begin(), fpiStack.end(),
869 [](const ActRec& ar) {
870 return ar.kind == FPIKind::Builtin || ar.foldable;
873 if (it != fpiStack.end()) {
874 fpiStack.erase(it, fpiStack.end());
878 //////////////////////////////////////////////////////////////////////
880 template<class BlockContainer, class AInfo, class Fun>
881 void visit_blocks_impl(const char* what,
882 const Index& index,
883 AInfo& ainfo,
884 CollectedInfo& collect,
885 const BlockContainer& rpoBlocks,
886 Fun&& fun) {
888 BlockId curBlk = NoBlockId;
889 SCOPE_ASSERT_DETAIL(what) {
890 if (curBlk == NoBlockId) return std::string{"\nNo block processed\n"};
891 return folly::sformat(
892 "block #{}\nin-{}", curBlk,
893 state_string(*ainfo.ctx.func, ainfo.bdata[curBlk].stateIn, collect)
897 FTRACE(1, "|---- {}\n", what);
898 for (auto const bid : rpoBlocks) {
899 curBlk = bid;
900 FTRACE(2, "block #{}\n", bid);
901 auto const& state = ainfo.bdata[bid].stateIn;
902 if (!state.initialized) {
903 FTRACE(2, " unreachable\n");
904 continue;
906 // TODO(#3732260): this should probably spend an extra interp pass
907 // in debug builds to check that no transformation to the bytecode
908 // was made that changes the block output state.
909 fun(index, ainfo, collect, bid, state);
911 assert(check(*ainfo.ctx.func));
914 template<class Fun>
915 void visit_blocks_mutable(const char* what,
916 const Index& index,
917 FuncAnalysis& ainfo,
918 CollectedInfo& collect,
919 Fun&& fun) {
920 // Make a copy of the block list so it can be mutated by the visitor.
921 auto const blocksCopy = ainfo.rpoBlocks;
922 visit_blocks_impl(what, index, ainfo, collect,
923 blocksCopy, std::forward<Fun>(fun));
926 template<class Fun>
927 void visit_blocks(const char* what,
928 const Index& index,
929 const FuncAnalysis& ainfo,
930 CollectedInfo& collect,
931 Fun&& fun) {
932 visit_blocks_impl(what, index, ainfo, collect,
933 ainfo.rpoBlocks, std::forward<Fun>(fun));
936 //////////////////////////////////////////////////////////////////////
938 IterId iterFromInit(const php::Func& func, BlockId initBlock) {
939 auto const& op = func.blocks[initBlock]->hhbcs.back();
940 if (op.op == Op::IterInit) return op.IterInit.iter1;
941 if (op.op == Op::IterInitK) return op.IterInitK.iter1;
942 if (op.op == Op::LIterInit) return op.LIterInit.iter1;
943 if (op.op == Op::LIterInitK) return op.LIterInitK.iter1;
944 always_assert(false);
948 * Attempt to convert normal iterators into liters. In order for an iterator to
949 * be converted to a liter, the following needs to be true:
951 * - The iterator is initialized with the value in a local at exactly one block.
953 * - That same local is not modified on all possible paths from the
954 * initialization to every usage of that iterator.
956 * The first condition is actually more restrictive than necessary, but
957 * enforcing that the iterator is initialized at exactly one place simplifies
958 * the bookkeeping and is always true with how we currently emit bytecode.
961 struct OptimizeIterState {
962 void operator()(const Index& index,
963 const FuncAnalysis& ainfo,
964 CollectedInfo& collect,
965 BlockId bid,
966 State state) {
967 auto const ctx = ainfo.ctx;
968 auto const blk = ctx.func->blocks[bid].get();
969 auto interp = Interp { index, ctx, collect, bid, blk, state };
970 for (uint32_t opIdx = 0; opIdx < blk->hhbcs.size(); ++opIdx) {
971 // If we've already determined that nothing is eligible, we can just stop.
972 if (!eligible.any()) break;
974 auto const& op = blk->hhbcs[opIdx];
975 FTRACE(2, " == {}\n", show(ctx.func, op));
977 if (state.unreachable) break;
979 // At every op, we check the known state of all live iterators and mark it
980 // as ineligible as necessary.
981 for (IterId it = 0; it < state.iters.size(); ++it) {
982 match<void>(
983 state.iters[it],
984 [] (DeadIter) {},
985 [&] (const LiveIter& ti) {
986 FTRACE(4, " iter {: <2} :: {}\n",
987 it, show(*ctx.func, state.iters[it]));
988 // The init block is unknown. This can only happen if there's more
989 // than one block where this iterator was initialized. This makes
990 // tracking the iteration loop ambiguous, and can't happen with how
991 // we currently emit bytecode, so just pessimize everything.
992 if (ti.initBlock == NoBlockId) {
993 FTRACE(2, " - pessimize all\n");
994 eligible.clear();
995 return;
997 // Otherwise, if the iterator doesn't have an equivalent local,
998 // either it was never initialized with a local to begin with, or
999 // that local got changed within the loop. Either way, this
1000 // iteration loop isn't eligible.
1001 if (eligible[ti.initBlock] && ti.baseLocal == NoLocalId) {
1002 FTRACE(2, " - blk:{} ineligible\n", ti.initBlock);
1003 eligible[ti.initBlock] = false;
1009 auto const fixupForInit = [&] {
1010 auto const base = topStkLocal(state);
1011 if (base == NoLocalId && eligible[bid]) {
1012 FTRACE(2, " - blk:{} ineligible\n", bid);
1013 eligible[bid] = false;
1015 fixups.emplace_back(Fixup{bid, opIdx, bid, base});
1016 FTRACE(2, " + fixup ({})\n", fixups.back().show(*ctx.func));
1019 auto const fixupFromState = [&] (IterId it) {
1020 match<void>(
1021 state.iters[it],
1022 [] (DeadIter) {},
1023 [&] (const LiveIter& ti) {
1024 if (ti.initBlock != NoBlockId) {
1025 assertx(iterFromInit(*ctx.func, ti.initBlock) == it);
1026 fixups.emplace_back(
1027 Fixup{bid, opIdx, ti.initBlock, ti.baseLocal}
1029 FTRACE(2, " + fixup ({})\n", fixups.back().show(*ctx.func));
1035 // Record a fixup for this iteration op. This iteration loop may not be
1036 // ultimately eligible, but we'll check that before actually doing the
1037 // transformation.
1038 switch (op.op) {
1039 case Op::IterInit:
1040 case Op::IterInitK:
1041 assertx(opIdx == blk->hhbcs.size() - 1);
1042 fixupForInit();
1043 break;
1044 case Op::IterNext:
1045 fixupFromState(op.IterNext.iter1);
1046 break;
1047 case Op::IterNextK:
1048 fixupFromState(op.IterNextK.iter1);
1049 break;
1050 case Op::IterFree:
1051 fixupFromState(op.IterFree.iter1);
1052 break;
1053 case Op::IterBreak:
1054 for (auto const& it : op.IterBreak.iterTab) {
1055 if (it.kind == KindOfIter) fixupFromState(it.id);
1057 break;
1058 default:
1059 break;
1062 step(interp, op);
1066 // We identify iteration loops by the block of the initialization op (which we
1067 // enforce is exactly one block). A fixup describes a transformation to an
1068 // iteration instruction which must be applied only if its associated loop is
1069 // eligible.
1070 struct Fixup {
1071 BlockId block; // Block of the op
1072 uint32_t op; // Index into the block of the op
1073 BlockId init; // Block of the loop's initializer
1074 LocalId base; // Invariant base of the iterator
1076 std::string show(const php::Func& f) const {
1077 return folly::sformat(
1078 "blk:{},{},blk:{},{}",
1079 block, op, init,
1080 base != NoLocalId ? local_string(f, base) : "-"
1084 std::vector<Fixup> fixups;
1085 // All of the associated iterator operations within an iterator loop can be
1086 // optimized to liter if the iterator's initialization block is eligible.
1087 boost::dynamic_bitset<> eligible;
1090 void optimize_iterators(const Index& index,
1091 const FuncAnalysis& ainfo,
1092 CollectedInfo& collect) {
1093 auto const func = ainfo.ctx.func;
1094 // Quick exit. If there's no iterators, or if no associated local survives to
1095 // the end of the iterator, there's nothing to do.
1096 if (!func->numIters || !ainfo.hasInvariantIterBase) return;
1098 OptimizeIterState state;
1099 // Everything starts out as eligible. We'll remove initialization blocks as we
1100 // go.
1101 state.eligible.resize(func->blocks.size(), true);
1103 // Visit all the blocks and build up the fixup state.
1104 visit_blocks("optimize_iterators", index, ainfo, collect, state);
1105 if (!state.eligible.any()) return;
1107 FTRACE(2, "Rewrites:\n");
1108 for (auto const& fixup : state.fixups) {
1109 auto& cblk = func->blocks[fixup.block];
1110 auto const& op = cblk->hhbcs[fixup.op];
1112 if (!state.eligible[fixup.init]) {
1113 // This iteration loop isn't eligible, so don't apply the fixup
1114 FTRACE(2, " * ({}): {}\n", fixup.show(*func), show(func, op));
1115 continue;
1118 BytecodeVec newOps;
1119 assertx(fixup.base != NoLocalId);
1121 // Rewrite the iteration op to its liter equivalent:
1122 switch (op.op) {
1123 case Op::IterInit:
1124 newOps = {
1125 bc_with_loc(op.srcLoc, bc::PopC {}),
1126 bc_with_loc(
1127 op.srcLoc,
1128 bc::LIterInit {
1129 op.IterInit.iter1,
1130 fixup.base,
1131 op.IterInit.target2,
1132 op.IterInit.loc3
1136 break;
1137 case Op::IterInitK:
1138 newOps = {
1139 bc_with_loc(op.srcLoc, bc::PopC {}),
1140 bc_with_loc(
1141 op.srcLoc,
1142 bc::LIterInitK {
1143 op.IterInitK.iter1,
1144 fixup.base,
1145 op.IterInitK.target2,
1146 op.IterInitK.loc3,
1147 op.IterInitK.loc4
1151 break;
1152 case Op::IterNext:
1153 newOps = {
1154 bc_with_loc(
1155 op.srcLoc,
1156 bc::LIterNext {
1157 op.IterNext.iter1,
1158 fixup.base,
1159 op.IterNext.target2,
1160 op.IterNext.loc3
1164 break;
1165 case Op::IterNextK:
1166 newOps = {
1167 bc_with_loc(
1168 op.srcLoc,
1169 bc::LIterNextK {
1170 op.IterNextK.iter1,
1171 fixup.base,
1172 op.IterNextK.target2,
1173 op.IterNextK.loc3,
1174 op.IterNextK.loc4
1178 break;
1179 case Op::IterFree:
1180 newOps = {
1181 bc_with_loc(
1182 op.srcLoc,
1183 bc::LIterFree { op.IterFree.iter1, fixup.base }
1186 break;
1187 case Op::IterBreak: {
1188 auto const iter = iterFromInit(*func, fixup.init);
1189 newOps = { op };
1190 for (auto& it : newOps.back().IterBreak.iterTab) {
1191 if (it.id == iter) {
1192 assertx(it.kind == KindOfIter);
1193 it.kind = KindOfLIter;
1194 it.local = fixup.base;
1197 break;
1199 default:
1200 always_assert(false);
1203 FTRACE(
1204 2, " ({}): {} ==> {}\n",
1205 fixup.show(*func), show(func, op),
1206 [&] {
1207 using namespace folly::gen;
1208 return from(newOps)
1209 | map([&] (const Bytecode& bc) { return show(func, bc); })
1210 | unsplit<std::string>(",");
1214 auto const blk = cblk.mutate();
1215 blk->hhbcs.erase(blk->hhbcs.begin() + fixup.op);
1216 blk->hhbcs.insert(blk->hhbcs.begin() + fixup.op,
1217 newOps.begin(), newOps.end());
1220 FTRACE(10, "{}", show(*func));
1223 //////////////////////////////////////////////////////////////////////
1226 * Use the information in the index to resolve a type-constraint to its
1227 * underlying type, if possible.
1229 void fixTypeConstraint(const Index& index, TypeConstraint& tc) {
1230 if (!tc.isCheckable() || !tc.isObject() || tc.isResolved()) return;
1232 if (interface_supports_non_objects(tc.typeName())) return;
1233 auto const resolved = index.resolve_type_name(tc.typeName());
1235 assertx(!RuntimeOption::EvalHackArrDVArrs ||
1236 (resolved.type != AnnotType::VArray &&
1237 resolved.type != AnnotType::DArray));
1239 if (resolved.type == AnnotType::Object) {
1240 if (!resolved.value || !resolved.value->resolved()) return;
1241 // Can't resolve if it resolves to a magic interface. If we mark it as
1242 // resolved, we'll think its an object and not do the special magic
1243 // interface checks at runtime.
1244 if (interface_supports_non_objects(resolved.value->name())) return;
1245 if (!resolved.value->couldHaveMockedDerivedClass()) tc.setNoMockObjects();
1248 tc.resolveType(resolved.type, tc.isNullable() || resolved.nullable);
1249 FTRACE(1, "Retype tc {} -> {}\n", tc.typeName(), tc.displayName());
1252 //////////////////////////////////////////////////////////////////////
1254 void do_optimize(const Index& index, FuncAnalysis&& ainfo, bool isFinal) {
1255 FTRACE(2, "{:-^70} {}\n", "Optimize Func", ainfo.ctx.func->name);
1257 bool again;
1258 folly::Optional<CollectedInfo> collect;
1259 auto collectionOpts = isFinal ?
1260 CollectionOpts::TrackConstantArrays : CollectionOpts{};
1262 collect.emplace(
1263 index, ainfo.ctx, nullptr,
1264 collectionOpts, &ainfo
1267 optimize_iterators(index, ainfo, *collect);
1269 do {
1270 again = false;
1271 visit_blocks_mutable("first pass", index, ainfo, *collect, first_pass);
1273 FTRACE(10, "{}", show(*ainfo.ctx.func));
1275 * Note: it's useful to do dead block removal before DCE, so it can remove
1276 * code relating to the branch to the dead block.
1278 remove_unreachable_blocks(ainfo);
1280 if (options.LocalDCE) {
1281 visit_blocks("local DCE", index, ainfo, *collect, local_dce);
1283 if (options.GlobalDCE) {
1284 global_dce(index, ainfo);
1285 again = control_flow_opts(ainfo);
1286 assert(check(*ainfo.ctx.func));
1288 * Global DCE can change types of locals across blocks. See
1289 * dce.cpp for an explanation.
1291 * We need to perform a final type analysis before we do
1292 * anything else.
1294 ainfo = analyze_func(index, ainfo.ctx, collectionOpts);
1295 collect.emplace(
1296 index, ainfo.ctx, nullptr,
1297 collectionOpts, &ainfo
1299 if (!again) {
1300 for (auto& bd : ainfo.bdata) {
1301 if (bd.stateIn.speculated != NoBlockId) {
1302 again = true;
1303 break;
1309 // If we merged blocks, there could be new optimization opportunities
1310 } while (again);
1312 if (!isFinal) return;
1314 auto const func = ainfo.ctx.func;
1315 if (func->name == s_86pinit.get() ||
1316 func->name == s_86sinit.get() ||
1317 func->name == s_86linit.get()) {
1318 auto const& blk = *func->blocks[func->mainEntry];
1319 if (blk.hhbcs.size() == 2 &&
1320 blk.hhbcs[0].op == Op::Null &&
1321 blk.hhbcs[1].op == Op::RetC) {
1322 FTRACE(2, "Erasing {}::{}\n", func->cls->name, func->name);
1323 func->cls->methods.erase(
1324 std::find_if(func->cls->methods.begin(),
1325 func->cls->methods.end(),
1326 [&](const std::unique_ptr<php::Func>& f) {
1327 return f.get() == func;
1328 }));
1329 return;
1333 auto pseudomain = is_pseudomain(func);
1334 func->attrs = (pseudomain ||
1335 func->attrs & AttrInterceptable ||
1336 ainfo.mayUseVV) ?
1337 Attr(func->attrs | AttrMayUseVV) : Attr(func->attrs & ~AttrMayUseVV);
1339 if (pseudomain && func->unit->persistent.load(std::memory_order_relaxed)) {
1340 auto persistent = true;
1341 visit_blocks("persistence check", index, ainfo, *collect,
1342 [&] (const Index&,
1343 const FuncAnalysis&,
1344 CollectedInfo&,
1345 BlockId bid,
1346 const State&) {
1347 auto const blk = ainfo.ctx.func->blocks[bid].get();
1348 if (persistent && !persistence_check(blk)) {
1349 persistent = false;
1352 if (!persistent) {
1353 func->unit->persistent.store(persistent, std::memory_order_relaxed);
1357 if (options.InsertAssertions) {
1358 visit_blocks("insert assertions", index, ainfo, *collect,
1359 insert_assertions);
1362 if (RuntimeOption::EvalHardTypeHints) {
1363 for (auto& p : func->params) fixTypeConstraint(index, p.typeConstraint);
1366 if (RuntimeOption::EvalCheckReturnTypeHints >= 3) {
1367 fixTypeConstraint(index, func->retTypeConstraint);
1371 //////////////////////////////////////////////////////////////////////
1375 //////////////////////////////////////////////////////////////////////
1377 Bytecode gen_constant(const Cell& cell) {
1378 switch (cell.m_type) {
1379 case KindOfUninit:
1380 return bc::NullUninit {};
1381 case KindOfNull:
1382 return bc::Null {};
1383 case KindOfBoolean:
1384 if (cell.m_data.num) {
1385 return bc::True {};
1386 } else {
1387 return bc::False {};
1389 case KindOfInt64:
1390 return bc::Int { cell.m_data.num };
1391 case KindOfDouble:
1392 return bc::Double { cell.m_data.dbl };
1393 case KindOfString:
1394 assert(cell.m_data.pstr->isStatic());
1395 case KindOfPersistentString:
1396 return bc::String { cell.m_data.pstr };
1397 case KindOfVec:
1398 assert(cell.m_data.parr->isStatic());
1399 case KindOfPersistentVec:
1400 assert(cell.m_data.parr->isVecArray());
1401 return bc::Vec { cell.m_data.parr };
1402 case KindOfDict:
1403 assert(cell.m_data.parr->isStatic());
1404 case KindOfPersistentDict:
1405 assert(cell.m_data.parr->isDict());
1406 return bc::Dict { cell.m_data.parr };
1407 case KindOfKeyset:
1408 assert(cell.m_data.parr->isStatic());
1409 case KindOfPersistentKeyset:
1410 assert(cell.m_data.parr->isKeyset());
1411 return bc::Keyset { cell.m_data.parr };
1412 case KindOfShape:
1413 case KindOfPersistentShape:
1414 not_implemented();
1415 case KindOfArray:
1416 assert(cell.m_data.parr->isStatic());
1417 case KindOfPersistentArray:
1418 assert(cell.m_data.parr->isPHPArray());
1419 return bc::Array { cell.m_data.parr };
1421 case KindOfRef:
1422 case KindOfResource:
1423 case KindOfObject:
1424 case KindOfFunc:
1425 case KindOfClass:
1426 case KindOfClsMeth:
1427 case KindOfRecord: // TODO(arnabde)
1428 always_assert(0 && "invalid constant in propagate_constants");
1430 not_reached();
1433 void optimize_func(const Index& index, FuncAnalysis&& ainfo, bool isFinal) {
1434 auto const bump = trace_bump_for(ainfo.ctx.cls, ainfo.ctx.func);
1436 SCOPE_ASSERT_DETAIL("optimize_func") {
1437 return "Optimizing:" + show(ainfo.ctx);
1440 Trace::Bump bumper1{Trace::hhbbc, bump};
1441 Trace::Bump bumper2{Trace::hhbbc_cfg, bump};
1442 Trace::Bump bumper3{Trace::hhbbc_dce, bump};
1443 do_optimize(index, std::move(ainfo), isFinal);
1446 //////////////////////////////////////////////////////////////////////
1448 void optimize_class_prop_type_hints(const Index& index, Context ctx) {
1449 assertx(!ctx.func);
1450 auto const bump = trace_bump_for(ctx.cls, nullptr);
1451 Trace::Bump bumper{Trace::hhbbc, bump};
1452 for (auto& prop : ctx.cls->properties) {
1453 fixTypeConstraint(
1454 index,
1455 const_cast<TypeConstraint&>(prop.typeConstraint)
1460 //////////////////////////////////////////////////////////////////////