Remove dead includes in hphp/runtime/vm
[hiphop-php.git] / hphp / runtime / vm / jit / ir-builder.cpp
blob95d861ecd0bba623d690d71530939377a4981716
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #include "hphp/runtime/vm/jit/ir-builder.h"
19 #include <algorithm>
20 #include <utility>
22 #include <folly/ScopeGuard.h>
24 #include "hphp/util/assertions.h"
25 #include "hphp/util/trace.h"
27 #include "hphp/runtime/base/rds.h"
28 #include "hphp/runtime/vm/jit/analysis.h"
29 #include "hphp/runtime/vm/jit/guard-constraint.h"
30 #include "hphp/runtime/vm/jit/ir-unit.h"
31 #include "hphp/runtime/vm/jit/mutation.h"
32 #include "hphp/runtime/vm/jit/print.h"
33 #include "hphp/runtime/vm/jit/punt.h"
34 #include "hphp/runtime/vm/jit/simple-propagation.h"
35 #include "hphp/runtime/vm/jit/simplify.h"
36 #include "hphp/runtime/vm/jit/translator.h"
38 namespace HPHP { namespace jit { namespace irgen {
40 namespace {
42 TRACE_SET_MOD(hhir);
43 using Trace::Indent;
45 ///////////////////////////////////////////////////////////////////////////////
47 template<typename M>
48 const typename M::mapped_type& get_required(const M& m,
49 typename M::key_type key) {
50 auto it = m.find(key);
51 always_assert(it != m.end());
52 return it->second;
55 SSATmp* fwdGuardSource(IRInstruction* inst) {
56 if (inst->is(AssertType, CheckType)) return inst->src(0);
58 assertx(inst->is(AssertLoc, CheckLoc,
59 AssertStk, CheckStk,
60 AssertMBase, CheckMBase));
61 inst->convertToNop();
62 return nullptr;
65 bool isMBaseLoad(const IRInstruction* inst) {
66 if (!inst->is(LdMem)) return false;
67 auto src = inst->src(0)->inst();
68 while (src->isPassthrough()) src = src->getPassthroughValue()->inst();
69 return src->is(LdMBase);
72 ///////////////////////////////////////////////////////////////////////////////
76 ///////////////////////////////////////////////////////////////////////////////
78 IRBuilder::IRBuilder(IRUnit& unit, const BCMarker& initMarker)
79 : m_unit(unit)
80 , m_initialMarker(initMarker)
81 , m_curBCContext{initMarker, 0}
82 , m_state(initMarker)
83 , m_curBlock(m_unit.entry())
85 if (RuntimeOption::EvalHHIRGenOpts) {
86 m_enableSimplification = RuntimeOption::EvalHHIRSimplification;
88 m_state.startBlock(m_curBlock, false);
91 void IRBuilder::enableConstrainGuards() {
92 m_constrainGuards = true;
95 bool IRBuilder::shouldConstrainGuards() const {
96 return m_constrainGuards;
99 void IRBuilder::appendInstruction(IRInstruction* inst) {
100 FTRACE(1, " append {}\n", inst->toString());
101 assertx(inst->marker().valid());
102 if (inst->is(Nop, DefConst)) return;
104 if (shouldConstrainGuards()) {
105 auto const l = [&]() -> folly::Optional<Location> {
106 switch (inst->op()) {
107 case AssertLoc:
108 case CheckLoc:
109 case LdLoc:
110 return loc(inst->extra<LocalId>()->locId);
112 case AssertStk:
113 case CheckStk:
114 case LdStk:
115 return stk(inst->extra<IRSPRelOffsetData>()->offset);
117 case AssertMBase:
118 case CheckMBase:
119 return folly::make_optional<Location>(Location::MBase{});
121 case LdMem:
122 return isMBaseLoad(inst)
123 ? folly::make_optional<Location>(Location::MBase{})
124 : folly::none;
126 default:
127 return folly::none;
129 not_reached();
130 }();
132 // If we're constraining guards, some instructions need certain information
133 // to be recorded in side tables.
134 if (l) {
135 m_constraints.typeSrcs[inst] = m_state.typeSrcsOf(*l);
136 if (!inst->is(LdLoc, LdStk) && !isMBaseLoad(inst)) {
137 constrainLocation(*l, DataTypeGeneric, "appendInstruction");
138 m_constraints.prevTypes[inst] = m_state.typeOf(*l);
143 // If the block isn't empty, check if we need to create a new block.
144 if (!m_curBlock->empty()) {
145 auto& prev = m_curBlock->back();
146 if (prev.isBlockEnd()) {
147 assertx(!prev.isTerminal());
148 m_curBlock = m_unit.defBlock(prev.block()->profCount());
149 m_curBlock->setHint(prev.block()->hint());
150 prev.setNext(m_curBlock);
151 m_state.finishBlock(prev.block());
152 FTRACE(2, "lazily appending B{}\n", m_curBlock->id());
153 m_state.startBlock(m_curBlock, false);
157 assertx((m_curBlock->empty() || !m_curBlock->back().isBlockEnd()) &&
158 "Can't append an instruction after a BlockEnd instruction");
159 m_curBlock->push_back(inst);
160 m_state.update(inst);
161 if (inst->isTerminal()) m_state.finishBlock(m_curBlock);
164 ///////////////////////////////////////////////////////////////////////////////
166 SSATmp* IRBuilder::preOptimizeCheckLocation(IRInstruction* inst, Location l) {
167 if (auto const prevValue = valueOf(l, DataTypeGeneric)) {
168 gen(CheckType, inst->typeParam(), inst->taken(), prevValue);
169 inst->convertToNop();
170 return nullptr;
173 auto const oldType = typeOf(l, DataTypeGeneric);
174 auto const newType = oldType & inst->typeParam();
176 if (oldType <= newType) {
177 // The type of the src is the same or more refined than type, so the guard
178 // is unnecessary.
179 return fwdGuardSource(inst);
181 return nullptr;
184 SSATmp* IRBuilder::preOptimizeCheckLoc(IRInstruction* inst) {
185 return preOptimizeCheckLocation(inst, loc(inst->extra<CheckLoc>()->locId));
188 SSATmp* IRBuilder::preOptimizeCheckStk(IRInstruction* inst) {
189 return preOptimizeCheckLocation(inst, stk(inst->extra<CheckStk>()->offset));
192 SSATmp* IRBuilder::preOptimizeCheckMBase(IRInstruction* inst) {
193 return preOptimizeCheckLocation(inst, Location::MBase{});
196 SSATmp* IRBuilder::preOptimizeAssertTypeOp(IRInstruction* inst,
197 const Type oldType,
198 SSATmp* oldVal,
199 const IRInstruction* srcInst) {
200 ITRACE(3, "preOptimizeAssertTypeOp({}, {}, {}, {})\n",
201 *inst, oldType,
202 oldVal ? oldVal->toString() : "nullptr",
203 srcInst ? srcInst->toString() : "nullptr");
205 auto const newType = oldType & inst->typeParam();
207 // Eliminate this AssertTypeOp if:
208 // 1) oldType is at least as good as newType and:
209 // a) typeParam == Cell
210 // b) oldVal is from a DefConst
211 // c) oldType.hasConstVal()
212 // The AssertType will never be useful for guard constraining in these
213 // situations.
214 // 2) The source instruction is known to be another assert that's at least
215 // as good as this one.
216 if ((oldType <= newType &&
217 (inst->typeParam() == TCell ||
218 (oldVal && oldVal->inst()->is(DefConst)) ||
219 oldType.hasConstVal())) ||
220 (srcInst &&
221 srcInst->is(AssertType, AssertLoc, AssertStk, AssertMBase) &&
222 srcInst->typeParam() <= inst->typeParam())) {
223 return fwdGuardSource(inst);
226 // 3) We're not constraining guards, and the old type is at least as good as
227 // the new type.
228 if (!shouldConstrainGuards()) {
229 if (newType == TBottom) {
230 gen(Unreachable, ASSERT_REASON);
231 return m_unit.cns(TBottom);
233 if (oldType <= newType) return fwdGuardSource(inst);
236 return nullptr;
239 SSATmp* IRBuilder::preOptimizeAssertLocation(IRInstruction* inst,
240 Location l) {
241 if (auto const prevValue = valueOf(l, DataTypeGeneric)) {
242 gen(AssertType, inst->typeParam(), prevValue);
243 inst->convertToNop();
244 return nullptr;
247 return preOptimizeAssertTypeOp(
248 inst,
249 typeOf(l, DataTypeGeneric),
250 valueOf(l, DataTypeGeneric),
251 nullptr
255 SSATmp* IRBuilder::preOptimizeAssertType(IRInstruction* inst) {
256 auto const src = inst->src(0);
257 return preOptimizeAssertTypeOp(inst, src->type(), src, src->inst());
260 SSATmp* IRBuilder::preOptimizeAssertLoc(IRInstruction* inst) {
261 return preOptimizeAssertLocation(inst, loc(inst->extra<AssertLoc>()->locId));
264 SSATmp* IRBuilder::preOptimizeAssertStk(IRInstruction* inst) {
265 return preOptimizeAssertLocation(inst, stk(inst->extra<AssertStk>()->offset));
268 SSATmp* IRBuilder::preOptimizeLdLocation(IRInstruction* inst, Location l) {
269 if (auto tmp = valueOf(l, DataTypeGeneric)) return tmp;
271 auto const type = typeOf(l, DataTypeGeneric);
273 // The types may not be compatible in the presence of unreachable code.
274 // Don't try to optimize the code in this case, and just let dead code
275 // elimination take care of it later.
276 if (!type.maybe(inst->typeParam())) {
277 inst->setTypeParam(TBottom);
278 return nullptr;
281 if (l.tag() == LTag::Local) {
282 // If FrameStateMgr's type for a local isn't as good as the type param,
283 // we're missing information in the IR.
284 assertx(inst->typeParam() >= type);
286 inst->setTypeParam(std::min(type, inst->typeParam()));
288 return nullptr;
291 SSATmp* IRBuilder::preOptimizeLdLoc(IRInstruction* inst) {
292 return preOptimizeLdLocation(inst, loc(inst->extra<LdLoc>()->locId));
295 SSATmp* IRBuilder::preOptimizeLdStk(IRInstruction* inst) {
296 return preOptimizeLdLocation(inst, stk(inst->extra<LdStk>()->offset));
299 SSATmp* IRBuilder::preOptimizeLdMBase(IRInstruction* inst) {
300 if (auto ptr = m_state.mbr().ptr) return ptr;
301 return nullptr;
304 SSATmp* IRBuilder::preOptimizeLdClosureCtx(IRInstruction* inst) {
305 auto const closure = canonical(inst->src(0));
306 if (!closure->inst()->is(ConstructClosure)) return nullptr;
307 return gen(AssertType, inst->typeParam(), closure->inst()->src(0));
310 SSATmp* IRBuilder::preOptimizeLdClosureCls(IRInstruction* inst) {
311 return preOptimizeLdClosureCtx(inst);
314 SSATmp* IRBuilder::preOptimizeLdClosureThis(IRInstruction* inst) {
315 return preOptimizeLdClosureCtx(inst);
318 SSATmp* IRBuilder::preOptimizeLdFrameCtx(IRInstruction* inst) {
319 auto const func = inst->marker().func();
320 assertx(func->cls() || func->isClosureBody());
321 if (auto ctx = m_state.ctx()) {
322 assertx(!inst->is(LdFrameCls) || ctx->type() <= TCls);
323 assertx(!inst->is(LdFrameThis) || ctx->type() <= TObj);
324 if (ctx->inst()->is(DefConst)) return ctx;
325 if (ctx->hasConstVal() ||
326 ctx->type().subtypeOfAny(TInitNull, TUninit, TNullptr)) {
327 return m_unit.cns(ctx->type());
329 return ctx;
332 if (inst->is(LdFrameCls)) {
333 // ActRec->m_cls of a static function is always a valid class pointer with
334 // the bottom bit set
335 assertx(func->cls());
336 if (func->cls()->attrs() & AttrNoOverride) {
337 return m_unit.cns(func->cls());
341 return nullptr;
344 SSATmp* IRBuilder::preOptimizeLdObjClass(IRInstruction* inst) {
345 if (auto const spec = inst->src(0)->type().clsSpec()) {
346 if (spec.exact() || spec.cls()->attrs() & AttrNoOverride) {
347 return m_unit.cns(spec.cls());
350 return nullptr;
353 SSATmp* IRBuilder::preOptimizeLdFrameThis(IRInstruction* inst) {
354 return preOptimizeLdFrameCtx(inst);
357 SSATmp* IRBuilder::preOptimizeLdFrameCls(IRInstruction* inst) {
358 return preOptimizeLdFrameCtx(inst);
361 SSATmp* IRBuilder::preOptimize(IRInstruction* inst) {
362 #define X(op) case op: return preOptimize##op(inst);
363 switch (inst->op()) {
364 X(AssertType)
365 X(AssertLoc)
366 X(AssertStk)
367 X(CheckLoc)
368 X(CheckStk)
369 X(CheckMBase)
370 X(LdLoc)
371 X(LdStk)
372 X(LdMBase)
373 X(LdClosureCls)
374 X(LdClosureThis)
375 X(LdFrameCls)
376 X(LdFrameThis)
377 X(LdObjClass)
378 default: break;
380 #undef X
381 return nullptr;
384 ///////////////////////////////////////////////////////////////////////////////
387 * Perform preoptimization and simplification on the input instruction. If the
388 * input instruction has a dest, this will return an SSATmp that represents the
389 * same value as dst(0) of the input instruction. If the input instruction has
390 * no dest, this will return nullptr.
392 * The caller never needs to clone or append; all this has been done.
394 SSATmp* IRBuilder::optimizeInst(IRInstruction* inst, CloneFlag doClone,
395 Block* /*srcBlock*/) {
396 static DEBUG_ONLY __thread int instNest = 0;
397 if (debug) ++instNest;
398 SCOPE_EXIT { if (debug) --instNest; };
399 DEBUG_ONLY auto indent = [&] { return std::string(instNest * 2, ' '); };
401 FTRACE(1, "optimize: {}\n", inst->toString());
403 auto cloneAndAppendOriginal = [&] () -> SSATmp* {
404 if (inst->op() == Nop) return nullptr;
405 if (doClone == CloneFlag::Yes) {
406 inst = m_unit.clone(inst);
408 appendInstruction(inst);
409 return inst->dst(0);
412 // Copy propagation on inst source operands. Only perform constant
413 // propagation if we're not constraining guards, to avoid breaking the
414 // use-def chains it uses to find guards.
415 copyProp(inst);
416 if (!shouldConstrainGuards()) constProp(m_unit, inst);
418 // Since preOptimize can inspect tracked state, we don't
419 // perform it on non-main traces.
420 if (m_savedBlocks.size() == 0) {
421 // First pass of IRBuilder optimizations try to replace an
422 // instruction based on tracked state before we do anything else.
423 // May mutate the IRInstruction in place (and return nullptr) or
424 // return an SSATmp*.
425 if (auto const preOpt = preOptimize(inst)) {
426 FTRACE(1, " {}preOptimize returned: {}\n",
427 indent(), preOpt->inst()->toString());
428 return inst->hasDst() ? preOpt : nullptr;
430 if (inst->op() == Nop) return cloneAndAppendOriginal();
433 // We skip simplification for AssertType when guard constraining is enabled
434 // because information that appears to be redundant may allow us to avoid
435 // constraining certain guards. preOptimizeAssertType() still eliminates some
436 // subset of redundant AssertType instructions.
437 if (!m_enableSimplification ||
438 (shouldConstrainGuards() && inst->is(AssertType))) {
439 return cloneAndAppendOriginal();
442 auto const simpResult = simplify(m_unit, inst);
444 // These are the possible outputs:
446 // ([], nullptr): no optimization possible. Use original inst.
448 // ([], non-nullptr): passing through a src.
450 // ([X, ...], Y): throw away input instruction, append 'X, ...',
451 // return Y.
453 if (!simpResult.instrs.empty()) {
454 // New instructions were generated. Append the new ones, filtering out Nops.
455 for (auto* newInst : simpResult.instrs) {
456 assertx(!newInst->isTransient());
457 if (newInst->op() == Nop) continue;
458 appendInstruction(newInst);
460 return simpResult.dst;
463 // No new instructions were generated. Either simplification didn't do
464 // anything, or we're using some other instruction's dst instead of our own.
466 if (simpResult.dst) {
467 // We're using some other instruction's output. Don't append anything.
468 assertx(simpResult.dst->inst() != inst);
469 return simpResult.dst;
472 // No simplification happened.
473 return cloneAndAppendOriginal();
476 void IRBuilder::exceptionStackBoundary() {
478 * If this assert fires, we're trying to put things on the stack in a catch
479 * trace that the unwinder won't be able to see.
481 FTRACE(2, "exceptionStackBoundary()\n");
482 assertx(m_state.bcSPOff() == curMarker().spOff());
483 m_exnStack.syncedSpLevel = m_state.bcSPOff();
484 m_state.resetStackModified();
487 void IRBuilder::setCurMarker(const BCMarker& newMarker) {
488 if (newMarker == curMarker()) return;
489 FTRACE(2, "IRBuilder::setCurMarker:\n old: {}\n new: {}\n",
490 curMarker().valid() ? curMarker().show() : "<invalid>",
491 newMarker.show());
492 assertx(newMarker.valid());
493 m_curBCContext.marker = newMarker;
496 ///////////////////////////////////////////////////////////////////////////////
497 // Guard relaxation.
499 bool IRBuilder::constrainGuard(const IRInstruction* inst, GuardConstraint gc) {
500 if (!shouldConstrainGuards()) return false;
502 auto& guard = m_constraints.guards[inst];
503 auto newGc = applyConstraint(guard, gc);
504 ITRACE(2, "constrainGuard({}, {}): {} -> {}\n", *inst, gc, guard, newGc);
505 Indent _i;
507 auto const changed = guard != newGc;
508 if (changed && !gc.weak) guard = newGc;
510 return changed;
513 bool IRBuilder::constrainValue(SSATmp* const val, GuardConstraint gc) {
514 if (!shouldConstrainGuards() || gc.empty()) return false;
516 if (!val) {
517 ITRACE(1, "attempted to constrain nullptr SSATmp*; bailing\n", gc);
518 return false;
520 auto inst = val->inst();
522 ITRACE(1, "constraining {} to {}\n", *inst, gc);
523 Indent _i;
525 if (inst->is(LdLoc, LdStk) || isMBaseLoad(inst)) {
526 // If the value's type source is non-null and not a FramePtr, it's a real
527 // value that was killed by a Call. The value won't be live but it's ok to
528 // use it to track down the guard.
530 always_assert_flog(m_constraints.typeSrcs.count(inst),
531 "no typeSrcs found for {}", *inst);
533 bool changed = false;
534 auto const typeSrcs = get_required(m_constraints.typeSrcs, inst);
536 for (auto typeSrc : typeSrcs) {
537 if (typeSrc.isGuard()) {
538 if (inst->is(LdLoc)) {
539 ITRACE(1, "constraining guard for local[{}]\n",
540 inst->extra<LdLoc>()->locId);
541 } else if (inst->is(LdStk)) {
542 ITRACE(1, "constraining guard for stack[{}]\n",
543 inst->extra<LdStk>()->offset.offset);
544 } else {
545 assertx(isMBaseLoad(inst));
546 ITRACE(1, "constraining guard for mbase\n");
549 changed |= constrainTypeSrc(typeSrc, gc);
551 return changed;
554 if (inst->is(AssertType)) {
555 // Sometimes code in irgen asks for a value with DataTypeSpecific but can
556 // tolerate a less specific value. If that happens, there's nothing to
557 // constrain.
558 if (!typeFitsConstraint(val->type(), gc)) return false;
560 return constrainAssert(inst, gc, inst->src(0)->type());
563 if (inst->is(CheckType)) {
564 // Sometimes code in irgen asks for a value with DataTypeSpecific but can
565 // tolerate a less specific value. If that happens, there's nothing to
566 // constrain.
567 if (!typeFitsConstraint(val->type(), gc)) return false;
569 return constrainCheck(inst, gc, inst->src(0)->type());
572 if (inst->isPassthrough()) {
573 return constrainValue(inst->getPassthroughValue(), gc);
576 if (inst->is(DefLabel)) {
577 auto changed = false;
578 auto dst = 0;
579 for (; dst < inst->numDsts(); dst++) {
580 if (val == inst->dst(dst)) break;
582 assertx(dst != inst->numDsts());
583 for (auto& pred : inst->block()->preds()) {
584 assertx(pred.inst()->is(Jmp));
585 auto src = pred.inst()->src(dst);
586 changed |= constrainValue(src, gc);
588 return changed;
591 // Any instructions not special cased above produce a new value, so there's
592 // no guard for us to constrain.
593 ITRACE(2, "value is new in this trace, bailing\n");
594 return false;
597 bool IRBuilder::constrainLocation(Location l, GuardConstraint gc,
598 const std::string& why) {
599 if (!shouldConstrainGuards() || gc.empty()) return false;
601 ITRACE(1, "constraining {} to {} (for {})\n", show(l), gc, why);
602 Indent _i;
604 bool changed = false;
605 for (auto typeSrc : m_state.typeSrcsOf(l)) {
606 changed |= constrainTypeSrc(typeSrc, gc);
608 return changed;
611 bool IRBuilder::constrainLocation(Location l, GuardConstraint gc) {
612 return constrainLocation(l, gc, "");
615 bool IRBuilder::constrainLocal(uint32_t locID, GuardConstraint gc,
616 const std::string& why) {
617 return constrainLocation(loc(locID), gc, why);
620 bool IRBuilder::constrainStack(IRSPRelOffset offset, GuardConstraint gc) {
621 return constrainLocation(stk(offset), gc);
624 bool IRBuilder::constrainTypeSrc(TypeSource typeSrc, GuardConstraint gc) {
625 if (!shouldConstrainGuards() || gc.empty()) return false;
627 ITRACE(1, "constraining type source {} to {}\n", show(typeSrc), gc);
628 Indent _i;
630 if (typeSrc.isValue()) return constrainValue(typeSrc.value, gc);
632 assertx(typeSrc.isGuard());
633 auto const guard = typeSrc.guard;
635 always_assert(guard->is(AssertLoc, CheckLoc,
636 AssertStk, CheckStk,
637 AssertMBase, CheckMBase));
639 // If the dest of the Assert/Check doesn't fit `gc', there's no point in
640 // continuing.
641 auto prevType = get_required(m_constraints.prevTypes, guard);
642 if (!typeFitsConstraint(prevType & guard->typeParam(), gc)) {
643 return false;
646 if (guard->is(AssertLoc, AssertStk, AssertMBase)) {
647 return constrainAssert(guard, gc, prevType);
649 return constrainCheck(guard, gc, prevType);
653 * Constrain the sources of an Assert instruction.
655 * We also have to constrain the sources for Check instructions, and we share
656 * this codepath for that purpose. However, for Checks, we first pre-relax the
657 * instruction's typeParam, which we pass as `knownType'. (Otherwise, the
658 * typeParam will be used as the `knownType'.)
660 bool IRBuilder::constrainAssert(const IRInstruction* inst,
661 GuardConstraint gc, Type srcType,
662 folly::Optional<Type> knownType) {
663 if (!knownType) knownType = inst->typeParam();
665 // If the known type fits the constraint, we're done.
666 if (typeFitsConstraint(*knownType, gc)) return false;
668 auto const newGC = relaxConstraint(gc, *knownType, srcType);
669 ITRACE(1, "tracing through {}, orig gc: {}, new gc: {}\n",
670 *inst, gc, newGC);
672 if (inst->is(AssertType, CheckType)) {
673 return constrainValue(inst->src(0), newGC);
676 auto changed = false;
677 auto const& typeSrcs = get_required(m_constraints.typeSrcs, inst);
679 for (auto typeSrc : typeSrcs) {
680 changed |= constrainTypeSrc(typeSrc, newGC);
682 return changed;
686 * Constrain the typeParam and sources of a Check instruction.
688 bool IRBuilder::constrainCheck(const IRInstruction* inst,
689 GuardConstraint gc, Type srcType) {
690 assertx(inst->is(CheckType, CheckLoc, CheckStk, CheckMBase));
692 auto changed = false;
693 auto const typeParam = inst->typeParam();
695 // Constrain the guard on the Check instruction, but first relax the
696 // constraint based on what's known about `srcType'.
697 auto const guardGC = relaxConstraint(gc, srcType, typeParam);
698 changed |= constrainGuard(inst, guardGC);
700 // Relax typeParam with its current constraint. This is used below to
701 // recursively relax the constraint on the source, if needed.
702 auto constraint = applyConstraint(m_constraints.guards[inst], guardGC);
703 auto const knownType = relaxToConstraint(typeParam, constraint);
705 changed |= constrainAssert(inst, gc, srcType, knownType);
707 return changed;
710 uint32_t IRBuilder::numGuards() const {
711 uint32_t count = 0;
712 for (auto& g : m_constraints.guards) {
713 if (g.second != DataTypeGeneric) count++;
715 return count;
718 ///////////////////////////////////////////////////////////////////////////////
720 const LocalState& IRBuilder::local(uint32_t id, GuardConstraint gc) {
721 constrainLocal(id, gc, "");
722 return m_state.local(id);
725 const StackState& IRBuilder::stack(IRSPRelOffset offset, GuardConstraint gc) {
726 constrainStack(offset, gc);
727 return m_state.stack(offset);
730 SSATmp* IRBuilder::valueOf(Location l, GuardConstraint gc) {
731 constrainLocation(l, gc, "");
732 return m_state.valueOf(l);
735 Type IRBuilder::typeOf(Location l, GuardConstraint gc) {
736 constrainLocation(l, gc, "");
737 return m_state.typeOf(l);
741 * Wrap a local or stack ID into a Location.
743 Location IRBuilder::loc(uint32_t id) const {
744 return Location::Local { id };
746 Location IRBuilder::stk(IRSPRelOffset off) const {
747 auto const fpRel = off.to<FPInvOffset>(m_state.irSPOff());
748 return Location::Stack { fpRel };
751 ///////////////////////////////////////////////////////////////////////////////
752 // Bytecode-level control flow.
754 bool IRBuilder::canStartBlock(Block* block) const {
755 return m_state.hasStateFor(block);
758 bool IRBuilder::startBlock(Block* block, bool hasUnprocessedPred) {
759 assertx(block);
760 assertx(m_savedBlocks.empty()); // No bytecode control flow in exits.
762 if (block == m_curBlock) return true;
764 // Return false if we don't have a FrameState saved for `block' yet
765 // -- meaning it isn't reachable from the entry block yet.
766 if (!canStartBlock(block)) return false;
768 // There's no reason for us to be starting on the entry block when it's not
769 // our current block.
770 always_assert(!block->isEntry());
772 auto& lastInst = m_curBlock->back();
773 always_assert(lastInst.isBlockEnd());
774 always_assert(lastInst.isTerminal() || m_curBlock->next() != nullptr);
776 m_state.finishBlock(m_curBlock);
777 m_curBlock = block;
779 m_state.startBlock(m_curBlock, hasUnprocessedPred);
780 always_assert(m_state.sp() != nullptr);
781 always_assert(m_state.fp() != nullptr);
783 FTRACE(2, "IRBuilder switching to block B{}: {}\n", block->id(),
784 show(m_state));
785 return true;
788 Block* IRBuilder::makeBlock(SrcKey sk, uint64_t profCount) {
789 auto it = m_skToBlockMap.find(sk);
790 if (it == m_skToBlockMap.end()) {
791 auto const block = m_unit.defBlock(profCount);
792 m_skToBlockMap.emplace(sk, block);
793 return block;
795 return it->second;
798 bool IRBuilder::hasBlock(SrcKey sk) const {
799 return m_skToBlockMap.count(sk);
802 void IRBuilder::setBlock(SrcKey sk, Block* block) {
803 assertx(!hasBlock(sk));
804 m_skToBlockMap[sk] = block;
807 void IRBuilder::appendBlock(Block* block) {
808 m_state.finishBlock(m_curBlock);
809 FTRACE(2, "appending B{}\n", block->id());
810 m_state.startBlock(block, false);
811 m_curBlock = block;
813 if (block->numPreds() == 0) {
814 FTRACE(2, "Newly-appended B{} is unreachable!\n", block->id());
815 gen(Unreachable, ASSERT_REASON);
819 void IRBuilder::resetBlock(Block* block, Block* pred) {
820 block->instrs().clear();
821 m_state.resetBlock(block, pred);
824 void IRBuilder::resetOffsetMapping() {
825 m_skToBlockMap.clear();
828 jit::flat_map<SrcKey, Block*> IRBuilder::saveAndClearOffsetMapping() {
829 return std::move(m_skToBlockMap);
832 void IRBuilder::restoreOffsetMapping(
833 jit::flat_map<SrcKey, Block*>&& offsetMapping
835 m_skToBlockMap = std::move(offsetMapping);
838 Block* IRBuilder::guardFailBlock() const {
839 return m_guardFailBlock;
842 void IRBuilder::setGuardFailBlock(Block* block) {
843 m_guardFailBlock = block;
846 void IRBuilder::resetGuardFailBlock() {
847 m_guardFailBlock = nullptr;
850 void IRBuilder::pushBlock(const BCMarker& marker, Block* b) {
851 FTRACE(2, "IRBuilder::pushBlock:\n saved: B{} @ {}\n pushed: B{} @ {}\n",
852 m_curBlock->id(), curMarker().show(), b->id(), marker.show());
853 assertx(b);
855 m_savedBlocks.push_back(
856 BlockState { m_curBlock, m_curBCContext, m_exnStack }
858 m_state.pauseBlock(m_curBlock);
859 m_state.startBlock(b, false);
860 m_curBlock = b;
861 m_curBCContext = BCContext { marker, 0 };
863 if (debug) {
864 for (UNUSED auto const& state : m_savedBlocks) {
865 assertx(state.block != b &&
866 "Can't push a block that's already in the saved stack");
871 void IRBuilder::popBlock() {
872 assertx(!m_savedBlocks.empty());
874 auto const& top = m_savedBlocks.back();
875 FTRACE(2, "IRBuilder::popBlock:\n popped: B{} @ {}\n restored: B{} @ {}\n",
876 m_curBlock->id(), curMarker().show(),
877 top.block->id(), top.bcctx.marker.show());
878 m_state.finishBlock(m_curBlock);
879 m_state.unpauseBlock(top.block);
880 m_curBlock = top.block;
881 m_curBCContext = top.bcctx;
882 m_exnStack = top.exnStack;
883 m_savedBlocks.pop_back();
886 bool IRBuilder::inUnreachableState() const {
887 return !m_curBlock->empty() && m_curBlock->back().isTerminal();
890 ///////////////////////////////////////////////////////////////////////////////