Initial work to get loops through the JIT
[hiphop-php.git] / hphp / runtime / vm / jit / frame-state.cpp
blobc8908278837daf5158805568505fde001a03ed06
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-2014 Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #include "hphp/runtime/vm/jit/frame-state.h"
18 #include <algorithm>
20 #include "hphp/util/trace.h"
21 #include "hphp/runtime/vm/jit/ir-instruction.h"
22 #include "hphp/runtime/vm/jit/simplifier.h"
23 #include "hphp/runtime/vm/jit/ssa-tmp.h"
25 TRACE_SET_MOD(hhir);
27 namespace HPHP {
28 namespace JIT {
30 using Trace::Indent;
32 FrameState::FrameState(IRUnit& unit, BCMarker marker)
33 : FrameState(unit, marker.spOff(), marker.func(), marker.func()->numLocals())
35 assert(!marker.isDummy());
38 FrameState::FrameState(IRUnit& unit, Offset initialSpOffset, const Func* func,
39 uint32_t numLocals)
40 : m_unit(unit)
41 , m_curFunc(func)
42 , m_spValue(nullptr)
43 , m_fpValue(nullptr)
44 , m_spOffset(initialSpOffset)
45 , m_thisAvailable(false)
46 , m_frameSpansCall(false)
47 , m_stackDeficit(0)
48 , m_evalStack()
49 , m_locals(numLocals)
50 , m_enableCse(false)
51 , m_snapshots()
55 void FrameState::update(const IRInstruction* inst) {
56 ITRACE(3, "FrameState::update processing {}\n", *inst);
57 Indent _i;
59 if (auto* taken = inst->taken()) {
60 // When we're building the IR, we append a conditional jump after
61 // generating its target block: see emitJmpCondHelper, where we
62 // call makeExit() before gen(JmpZero). It doesn't make sense to
63 // update the target block state at this point, so don't. The
64 // state doesn't have this problem during optimization passes,
65 // because we'll always process the jump before the target block.
66 if (!m_building || taken->empty()) save(taken);
69 auto const opc = inst->op();
71 getLocalEffects(inst, *this);
73 switch (opc) {
74 case DefInlineFP: trackDefInlineFP(inst); break;
75 case InlineReturn: trackInlineReturn(); break;
77 case Call:
78 m_spValue = inst->dst();
79 m_frameSpansCall = true;
80 // A call pops the ActRec and the arguments, and then pushes a
81 // return value.
82 m_spOffset -= kNumActRecCells + inst->extra<Call>()->numParams;
83 m_spOffset += 1;
84 assert(m_spOffset >= 0);
85 clearCse();
86 break;
88 case CallArray:
89 m_spValue = inst->dst();
90 m_frameSpansCall = true;
91 // A CallArray pops the ActRec an array arg and pushes a return value.
92 m_spOffset -= kNumActRecCells;
93 assert(m_spOffset >= 0);
94 clearCse();
95 break;
97 case ContEnter:
98 m_spValue = inst->dst();
99 m_frameSpansCall = true;
100 clearCse();
101 break;
103 case DefFP:
104 case FreeActRec:
105 m_fpValue = inst->dst();
106 break;
108 case ReDefSP:
109 m_spValue = inst->dst();
110 m_spOffset = inst->extra<ReDefSP>()->spOffset;
111 break;
113 case DefSP:
114 m_spValue = inst->dst();
115 m_spOffset = inst->extra<StackOffset>()->offset;
116 break;
118 case AssertStk:
119 case CastStk:
120 case CastStkIntToDbl:
121 case CoerceStk:
122 case CheckStk:
123 case GuardStk:
124 case ExceptionBarrier:
125 m_spValue = inst->dst();
126 break;
128 case SpillStack: {
129 m_spValue = inst->dst();
130 // Push the spilled values but adjust for the popped values
131 int64_t stackAdjustment = inst->src(1)->intVal();
132 m_spOffset -= stackAdjustment;
133 m_spOffset += spillValueCells(inst);
134 break;
137 case SpillFrame:
138 case CufIterSpillFrame:
139 m_spValue = inst->dst();
140 m_spOffset += kNumActRecCells;
141 break;
143 case InterpOne:
144 case InterpOneCF: {
145 m_spValue = inst->dst();
146 auto const& extra = *inst->extra<InterpOneData>();
147 int64_t stackAdjustment = extra.cellsPopped - extra.cellsPushed;
148 // push the return value if any and adjust for the popped values
149 m_spOffset -= stackAdjustment;
150 break;
153 case AssertLoc:
154 case GuardLoc:
155 case CheckLoc:
156 m_fpValue = inst->dst();
157 break;
159 case LdThis:
160 m_thisAvailable = true;
161 break;
163 default:
164 break;
167 if (inst->modifiesStack()) {
168 m_spValue = inst->modifiedStkPtr();
171 // update the CSE table
172 if (m_enableCse && inst->canCSE()) {
173 cseInsert(inst);
176 // if the instruction kills any of its sources, remove them from the
177 // CSE table
178 if (inst->killsSources()) {
179 for (int i = 0; i < inst->numSrcs(); ++i) {
180 if (inst->killsSource(i)) {
181 cseKill(inst->src(i));
186 // Save state for each block at the end.
187 if (inst->isTerminal()) {
188 save(inst->block());
192 static const StaticString s_php_errormsg("php_errormsg");
194 void FrameState::getLocalEffects(const IRInstruction* inst,
195 LocalStateHook& hook) const {
196 auto killIterLocals = [&](const std::initializer_list<uint32_t>& ids) {
197 for (auto id : ids) {
198 hook.setLocalValue(id, nullptr);
202 auto killedCallLocals = false;
203 if ((inst->is(CallArray) && inst->extra<CallArray>()->destroyLocals) ||
204 (inst->is(Call) && inst->extra<Call>()->destroyLocals) ||
205 (inst->is(CallBuiltin) && inst->extra<CallBuiltin>()->destroyLocals)) {
206 clearLocals(hook);
207 killedCallLocals = true;
210 switch (inst->op()) {
211 case Call:
212 case CallArray:
213 case ContEnter:
214 killLocalsForCall(hook, killedCallLocals);
215 break;
217 case StRef: {
218 SSATmp* newRef = inst->dst();
219 SSATmp* prevRef = inst->src(0);
220 // update other tracked locals that also contain prevRef
221 updateLocalRefValues(hook, prevRef, newRef);
222 break;
225 case StLocNT:
226 case StLoc:
227 hook.setLocalValue(inst->extra<LocalId>()->locId, inst->src(1));
228 break;
230 case LdGbl: {
231 auto const type = inst->typeParam().relaxToGuardable();
232 hook.setLocalType(inst->extra<LdGbl>()->locId, type);
233 break;
235 case StGbl: {
236 auto const type = inst->src(1)->type().relaxToGuardable();
237 hook.setLocalType(inst->extra<StGbl>()->locId, type);
238 break;
241 case LdLoc:
242 hook.setLocalValue(inst->extra<LdLoc>()->locId, inst->dst());
243 break;
245 case AssertLoc:
246 case GuardLoc:
247 case CheckLoc:
248 hook.refineLocalType(inst->extra<LocalId>()->locId, inst->typeParam(),
249 inst->dst());
250 break;
252 case TrackLoc:
253 hook.setLocalValue(inst->extra<LocalId>()->locId, inst->src(0));
254 break;
256 case CheckType:
257 case AssertType: {
258 SSATmp* newVal = inst->dst();
259 SSATmp* oldVal = inst->src(0);
260 refineLocalValues(hook, oldVal, newVal);
261 break;
264 case IterInitK:
265 case WIterInitK:
266 // kill the locals to which this instruction stores iter's key and value
267 killIterLocals({inst->extra<IterData>()->keyId,
268 inst->extra<IterData>()->valId});
269 break;
271 case IterInit:
272 case WIterInit:
273 // kill the local to which this instruction stores iter's value
274 killIterLocals({inst->extra<IterData>()->valId});
275 break;
277 case IterNextK:
278 case WIterNextK:
279 // kill the locals to which this instruction stores iter's key and value
280 killIterLocals({inst->extra<IterData>()->keyId,
281 inst->extra<IterData>()->valId});
282 break;
284 case IterNext:
285 case WIterNext:
286 // kill the local to which this instruction stores iter's value
287 killIterLocals({inst->extra<IterData>()->valId});
288 break;
290 case InterpOne:
291 case InterpOneCF: {
292 auto const& id = *inst->extra<InterpOneData>();
293 assert(!id.smashesAllLocals || id.nChangedLocals == 0);
294 if (id.smashesAllLocals) {
295 clearLocals(hook);
296 } else {
297 auto it = id.changedLocals;
298 auto const end = it + id.nChangedLocals;
299 for (; it != end; ++it) {
300 auto& loc = *it;
301 // If changing the inner type of a boxed local, also drop the
302 // information about inner types for any other boxed locals.
303 if (loc.type.isBoxed()) dropLocalRefsInnerTypes(hook);
304 hook.setLocalType(loc.id, loc.type);
307 break;
309 default:
310 break;
313 // If this instruction may raise an error and our function has a local named
314 // "php_errormsg", we have to clobber it. See
315 // http://www.php.net/manual/en/reserved.variables.phperrormsg.php
316 if (inst->mayRaiseError()) {
317 auto id = m_curFunc->lookupVarId(s_php_errormsg.get());
318 if (id != -1) hook.setLocalValue(id, nullptr);
321 if (MInstrEffects::supported(inst)) MInstrEffects::get(inst, hook);
324 ///// Support helpers for getLocalEffects /////
325 void FrameState::clearLocals(LocalStateHook& hook) const {
326 for (unsigned i = 0; i < m_locals.size(); ++i) {
327 hook.setLocalValue(i, nullptr);
331 void FrameState::refineLocalValues(LocalStateHook& hook,
332 SSATmp* oldVal, SSATmp* newVal) const {
333 assert(newVal->inst()->is(CheckType, AssertType));
334 assert(newVal->inst()->src(0) == oldVal);
336 walkAllInlinedLocals(
337 [&](uint32_t i, unsigned inlineIdx, const LocalState& local) {
338 if (local.value == oldVal) {
339 hook.refineLocalValue(i, inlineIdx, oldVal, newVal);
344 void FrameState::forEachFrame(FrameFunc body) const {
345 body(m_fpValue, m_spOffset);
347 // We push each new frame onto the end of m_inlineSavedStates, so walk it
348 // backwards to go from inner frames to outer frames.
349 for (auto it = m_inlineSavedStates.rbegin();
350 it != m_inlineSavedStates.rend(); ++it) {
351 auto const& state = *it;
352 body(state.fpValue, state.spOffset);
356 template<typename L>
357 void FrameState::walkAllInlinedLocals(L body, bool skipThisFrame) const {
358 auto doBody = [&](const LocalVec& locals, unsigned inlineIdx) {
359 for (uint32_t i = 0, n = locals.size(); i < n; ++i) {
360 body(i, inlineIdx, locals[i]);
364 if (!skipThisFrame) {
365 doBody(m_locals, 0);
367 for (int i = 0, n = m_inlineSavedStates.size(); i < n; ++i) {
368 doBody(m_inlineSavedStates[i].locals, i + 1);
372 void FrameState::forEachLocal(LocalFunc body) const {
373 walkAllInlinedLocals(
374 [&](uint32_t i, unsigned inlineIdx, const LocalState& local) {
375 body(i, local.value);
380 * Called to clear out the tracked local values at a call site. Calls kill all
381 * registers, so we don't want to keep locals in registers across calls. We do
382 * continue tracking the types in locals, however.
384 void FrameState::killLocalsForCall(LocalStateHook& hook,
385 bool skipThisFrame) const {
386 walkAllInlinedLocals(
387 [&](uint32_t i, unsigned inlineIdx, const LocalState& local) {
388 auto* value = local.value;
389 if (!value || value->inst()->is(DefConst)) return;
391 hook.killLocalForCall(i, inlineIdx, value);
393 skipThisFrame);
397 // This method updates the tracked values and types of all locals that contain
398 // oldRef so that they now contain newRef.
399 // This should only be called for ref/boxed types.
401 void FrameState::updateLocalRefValues(LocalStateHook& hook,
402 SSATmp* oldRef, SSATmp* newRef) const {
403 assert(oldRef->type().isBoxed());
404 assert(newRef->type().isBoxed());
406 walkAllInlinedLocals(
407 [&](uint32_t i, unsigned inlineIdx, const LocalState& local) {
408 if (local.value != oldRef) return;
410 hook.updateLocalRefValue(i, inlineIdx, oldRef, newRef);
415 * This method changes any boxed local into a BoxedInitCell type. It's safe to
416 * assume they're init because you can never have a reference to uninit.
418 void FrameState::dropLocalRefsInnerTypes(LocalStateHook& hook) const {
419 walkAllInlinedLocals(
420 [&](uint32_t i, unsigned inlineIdx, const LocalState& local) {
421 if (local.type.isBoxed()) {
422 hook.dropLocalInnerType(i, inlineIdx);
427 ///// Methods for managing and merge block state /////
428 void FrameState::startBlock(Block* block) {
429 auto it = m_snapshots.find(block);
430 assert(IMPLIES(block->numPreds() > 0,
431 it != m_snapshots.end() || RuntimeOption::EvalJitLoops));
432 if (it != m_snapshots.end()) {
433 load(it->second);
434 ITRACE(4, "Loading state for B{}: {}\n", block->id(), show(*this));
435 m_inlineSavedStates = it->second.inlineSavedStates;
436 m_snapshots.erase(it);
440 void FrameState::finishBlock(Block* block) {
441 assert(block->back().isTerminal() == !block->next());
443 if (!block->back().isTerminal()) {
444 save(block->next());
446 if (m_building) {
447 save(block);
451 void FrameState::pauseBlock(Block* block) {
452 save(block);
455 void FrameState::clearBlock(Block* block) {
456 auto it = m_snapshots.find(block);
457 if (it != m_snapshots.end()) {
458 ITRACE(4, "Clearing state for B{}\n", block->id());
459 m_snapshots.erase(it);
463 FrameState::Snapshot FrameState::createSnapshot() const {
464 Snapshot state;
465 state.spValue = m_spValue;
466 state.fpValue = m_fpValue;
467 state.curFunc = m_curFunc;
468 state.spOffset = m_spOffset;
469 state.thisAvailable = m_thisAvailable;
470 state.stackDeficit = m_stackDeficit;
471 state.evalStack = m_evalStack;
472 state.locals = m_locals;
473 state.curMarker = m_marker;
474 state.frameSpansCall = m_frameSpansCall;
475 assert(state.curMarker.valid());
476 return state;
480 * Save current state for block. If this is the first time saving state for
481 * block, create a new snapshot. Otherwise merge the current state into the
482 * existing snapshot.
484 void FrameState::save(Block* block) {
485 ITRACE(4, "Saving current state to B{}: {}\n", block->id(), show(*this));
486 auto it = m_snapshots.find(block);
487 if (it != m_snapshots.end()) {
488 merge(it->second);
489 ITRACE(4, "Merged state: {}\n", show(*this));
490 } else {
491 auto& snapshot = m_snapshots[block] = createSnapshot();
492 snapshot.inlineSavedStates = m_inlineSavedStates;
496 bool FrameState::compatible(Block* block) {
497 auto it = m_snapshots.find(block);
498 // If we didn't find a snapshot, it's because we never saved one.
499 // Probably because the other incoming edge is unreachable.
500 if (it == m_snapshots.end()) return true;
501 auto& snapshot = it->second;
502 if (m_fpValue != snapshot.fpValue) return false;
504 assert(m_locals.size() == snapshot.locals.size());
505 for (int i = 0; i < m_locals.size(); ++i) {
506 // Enforce strict equality of types for now. Eventually we could
507 // relax this depending on downstream operations.
509 // TODO(t3729135): We don't bother to check values here because we
510 // clear the CSE table at any merge. Eventually we will support
511 // phis instead.
512 if (m_locals[i].type != snapshot.locals[i].type) {
513 return false;
517 // TODO(t3730468): We don't check the stack here, because we always
518 // spill the stack on all paths leading up to a merge, and insert a
519 // DefSP at the merge point to block walking the use-def chain past
520 // it. It would be better to do proper type analysis on the stack
521 // values flowing in and insert phis or exits as needed.
523 return true;
526 const FrameState::LocalVec& FrameState::localsForBlock(Block* b) const {
527 auto bit = m_snapshots.find(b);
528 assert(bit != m_snapshots.end());
529 return bit->second.locals;
532 void FrameState::load(Snapshot& state) {
533 m_spValue = state.spValue;
534 m_fpValue = state.fpValue;
535 m_spOffset = state.spOffset;
536 m_curFunc = state.curFunc;
537 m_thisAvailable = state.thisAvailable;
538 m_stackDeficit = state.stackDeficit;
539 m_evalStack = std::move(state.evalStack);
540 m_locals = std::move(state.locals);
541 m_marker = state.curMarker;
542 m_frameSpansCall = m_frameSpansCall || state.frameSpansCall;
546 * Merge current state into state. Frame pointers and stack depth must match.
547 * If the stack pointer tmps are different, clear the tracked value (we can
548 * make a new one, given fp and spOffset).
550 * thisIsAvailable remains true if it's true in both states.
551 * local variable values are preserved if the match in both states.
552 * types are combined using Type::unionOf.
554 void FrameState::merge(Snapshot& state) {
555 // cannot merge fp or spOffset state, so assert they match
556 assert(state.fpValue == m_fpValue);
557 assert(state.spOffset == m_spOffset);
558 assert(state.curFunc == m_curFunc);
559 if (state.spValue != m_spValue) {
560 // we have two different sp definitions but we know they're equal
561 // because spOffset matched.
562 state.spValue = nullptr;
564 // this is available iff it's available in both states
565 state.thisAvailable &= m_thisAvailable;
567 assert(m_locals.size() == state.locals.size());
568 for (unsigned i = 0; i < m_locals.size(); ++i) {
569 auto& local = state.locals[i];
571 // preserve local values if they're the same in both states,
572 if (local.value != m_locals[i].value) {
573 // try to merge SSATmps for the local if one of them came from
574 // a passthrough instruction with the other as the source.
575 auto isParent = [](SSATmp* parent, SSATmp* child) -> bool {
576 return child && child->inst()->isPassthrough() &&
577 child->inst()->getPassthroughValue() == parent;
579 if (isParent(m_locals[i].value, local.value)) {
580 local.value = m_locals[i].value;
581 } else if (!isParent(local.value, m_locals[i].value)) {
582 local.value = nullptr;
585 if (local.typeSource != m_locals[i].typeSource) local.typeSource = nullptr;
586 if (local.value && !local.typeSource) local.typeSource = local.value;
588 local.type = Type::unionOf(local.type, m_locals[i].type);
591 // TODO(t3729135): If we are merging states from different bytecode
592 // paths, we must conservatively clear the CSE table. Since the
593 // markers may or may not have been updated, we always clear. What
594 // we need is a global CSE algorithm.
595 if (RuntimeOption::EvalHHIRBytecodeControlFlow) {
596 clearCse();
599 // For now, we shouldn't be merging states with different inline states.
600 assert(m_inlineSavedStates == state.inlineSavedStates);
603 void FrameState::trackDefInlineFP(const IRInstruction* inst) {
604 auto const target = inst->extra<DefInlineFP>()->target;
605 auto const savedSPOff = inst->extra<DefInlineFP>()->retSPOff;
606 auto const calleeFP = inst->dst();
607 auto const calleeSP = inst->src(0);
608 auto const savedSP = inst->src(1);
610 // Saved IRBuilder state will include the "return" fp/sp.
611 // Whatever the current fpValue is is good enough, but we have to be
612 // passed in the StkPtr that represents the stack prior to the
613 // ActRec being allocated.
614 m_spOffset = savedSPOff;
615 m_spValue = savedSP;
617 auto const stackValues = collectStackValues(m_spValue, m_spOffset);
618 for (DEBUG_ONLY auto& val : stackValues) {
619 ITRACE(4, " marking caller stack value available: {}\n",
620 val->toString());
623 m_inlineSavedStates.emplace_back(createSnapshot());
626 * Set up the callee state.
628 * We set m_thisIsAvailable to true on any object method, because we
629 * just don't inline calls to object methods with a null $this.
631 m_fpValue = calleeFP;
632 m_spValue = calleeSP;
633 m_thisAvailable = target->cls() != nullptr && !target->isStatic();
634 m_curFunc = target;
635 m_frameSpansCall = false;
637 m_locals.clear();
638 m_locals.resize(target->numLocals());
641 void FrameState::trackInlineReturn() {
642 assert(m_inlineSavedStates.size());
643 assert(m_inlineSavedStates.back().inlineSavedStates.empty());
644 load(m_inlineSavedStates.back());
645 m_inlineSavedStates.pop_back();
648 CSEHash* FrameState::cseHashTable(const IRInstruction* inst) {
649 return inst->is(DefConst) ? &m_unit.constTable() : &m_cseHash;
652 void FrameState::cseInsert(const IRInstruction* inst) {
653 cseHashTable(inst)->insert(inst->dst());
656 void FrameState::cseKill(SSATmp* src) {
657 if (src->inst()->canCSE()) {
658 cseHashTable(src->inst())->erase(src);
662 void FrameState::clearCse() {
663 m_cseHash.clear();
666 SSATmp* FrameState::cseLookup(IRInstruction* inst,
667 Block* srcBlock,
668 const folly::Optional<IdomVector>& idoms) {
669 auto tmp = cseHashTable(inst)->lookup(inst);
670 if (tmp && idoms) {
671 // During a reoptimize pass, we need to make sure that any values
672 // we want to reuse for CSE are only reused in blocks dominated by
673 // the block that defines it.
674 if (!dominates(tmp->inst()->block(), srcBlock, *idoms)) {
675 return nullptr;
678 return tmp;
681 void FrameState::clear() {
682 // A previous run of reoptimize could've legitimately exited the trace in an
683 // inlined callee. If that happened, just pop all the saved states to return
684 // to the top-level func.
685 while (inlineDepth()) {
686 trackInlineReturn();
689 clearCse();
690 clearLocals(*this);
691 m_frameSpansCall = false;
692 m_spValue = m_fpValue = nullptr;
693 m_spOffset = 0;
694 m_thisAvailable = false;
695 m_marker = BCMarker();
696 m_snapshots.clear();
697 assert(m_inlineSavedStates.empty());
700 SSATmp* FrameState::localValue(uint32_t id) const {
701 always_assert(id < m_locals.size());
702 return m_locals[id].value;
705 SSATmp* FrameState::localTypeSource(uint32_t id) const {
706 always_assert(id < m_locals.size());
707 auto const& local = m_locals[id];
709 always_assert(!local.value || local.value == local.typeSource ||
710 local.typeSource->isA(Type::FramePtr));
711 return local.typeSource;
714 Type FrameState::localType(uint32_t id) const {
715 always_assert(id < m_locals.size());
716 return m_locals[id].type;
719 void FrameState::setLocalValue(uint32_t id, SSATmp* value) {
720 always_assert(id < m_locals.size());
721 m_locals[id].value = value;
722 m_locals[id].type = value ? value->type() : Type::Gen;
723 m_locals[id].typeSource = value;
726 void FrameState::refineLocalType(uint32_t id, Type type, SSATmp* typeSource) {
727 always_assert(id < m_locals.size());
728 auto& local = m_locals[id];
729 Type newType = refineType(local.type, type);
730 ITRACE(2, "updating local {}'s type: {} -> {}\n",
731 id, local.type, newType);
732 always_assert_flog(newType != Type::Bottom,
733 "Bad new type for local {}: {} & {} = {}",
734 id, local.type, type, newType);
735 local.type = newType;
736 local.typeSource = typeSource;
739 void FrameState::setLocalType(uint32_t id, Type type) {
740 always_assert(id < m_locals.size());
741 m_locals[id].value = nullptr;
742 m_locals[id].type = type;
743 m_locals[id].typeSource = nullptr;
747 * Get a reference to the LocalVec from an inline index. 0 means the current
748 * frame, otherwise it's index (inlineIdx - 1) in m_inlineSavedStates.
750 FrameState::LocalVec& FrameState::locals(unsigned inlineIdx) {
751 if (inlineIdx == 0) {
752 return m_locals;
753 } else {
754 --inlineIdx;
755 assert(inlineIdx < m_inlineSavedStates.size());
756 return m_inlineSavedStates[inlineIdx].locals;
760 void FrameState::refineLocalValue(uint32_t id, unsigned inlineIdx,
761 SSATmp* oldVal, SSATmp* newVal) {
762 auto& locs = locals(inlineIdx);
763 always_assert(id < locs.size());
764 auto& local = locs[id];
765 local.value = newVal;
766 local.type = newVal->type();
767 local.typeSource = newVal;
770 void FrameState::killLocalForCall(uint32_t id, unsigned inlineIdx,
771 SSATmp* val) {
772 auto& locs = locals(inlineIdx);
773 always_assert(id < locs.size());
774 locs[id].value = nullptr;
777 void FrameState::updateLocalRefValue(uint32_t id, unsigned inlineIdx,
778 SSATmp* oldRef, SSATmp* newRef) {
779 auto& local = locals(inlineIdx)[id];
780 assert(local.value == oldRef);
781 local.value = newRef;
782 local.type = newRef->type();
783 local.typeSource = newRef;
786 void FrameState::dropLocalInnerType(uint32_t id, unsigned inlineIdx) {
787 auto& local = locals(inlineIdx)[id];
788 assert(local.type.isBoxed());
789 local.type = Type::BoxedInitCell;
792 std::string show(const FrameState& state) {
793 return folly::format("func: {}, bcOff: {}, spOff: {}{}{}",
794 state.func()->fullName()->data(),
795 state.marker().bcOff(),
796 state.spOffset(),
797 state.thisAvailable() ? ", thisAvailable" : "",
798 state.frameSpansCall() ? ", frameSpansCall" : ""
799 ).str();