Fix a bug in preOptimizeStLoc
[hiphop-php.git] / hphp / runtime / vm / jit / trace-builder.cpp
blobbff59d7792d6538573ca2e7694aa3e839dcf2475
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-2013 Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #include "hphp/runtime/vm/jit/trace-builder.h"
19 #include "folly/ScopeGuard.h"
21 #include "hphp/util/trace.h"
22 #include "hphp/runtime/vm/jit/ir-unit.h"
23 #include "hphp/runtime/vm/jit/guard-relaxation.h"
24 #include "hphp/runtime/base/rds.h"
25 #include "hphp/util/assertions.h"
27 namespace HPHP { namespace JIT {
29 TRACE_SET_MOD(hhir);
31 TraceBuilder::TraceBuilder(Offset initialBcOffset,
32 Offset initialSpOffsetFromFp,
33 IRUnit& unit,
34 const Func* func)
35 : m_unit(unit)
36 , m_simplifier(*this)
37 , m_state(unit, initialSpOffsetFromFp, func)
38 , m_curTrace(m_unit.main())
39 , m_curBlock(nullptr)
40 , m_enableSimplification(false)
41 , m_inReoptimize(false)
43 if (RuntimeOption::EvalHHIRGenOpts) {
44 m_state.setEnableCse(RuntimeOption::EvalHHIRCse);
45 m_enableSimplification = RuntimeOption::EvalHHIRSimplification;
49 TraceBuilder::~TraceBuilder() {
53 * Returns whether or not the given value might have its type relaxed by guard
54 * relaxation. If tmp is null, only conditions that apply to all values are
55 * checked.
57 bool TraceBuilder::typeMightRelax(SSATmp* tmp /* = nullptr */) const {
58 if (!RuntimeOption::EvalHHIRRelaxGuards) return false;
59 if (inReoptimize()) return false;
60 if (tmp && (tmp->isConst() || tmp->isA(Type::Cls))) return false;
62 return true;
66 * To help guard relaxation, there are some situations where we want to keep
67 * around an Assert(Type|Stk|Loc) instruction that doesn't provide a more
68 * specific type than its source.
70 bool TraceBuilder::shouldElideAssertType(Type oldType, Type newType,
71 SSATmp* oldVal) const {
72 assert(oldType.maybe(newType));
74 if (!typeMightRelax(oldVal)) return newType >= oldType;
75 if (oldType == Type::Cls || newType == Type::Gen) return true;
77 return newType > oldType;
80 SSATmp* TraceBuilder::genDefUninit() {
81 return gen(DefConst, Type::Uninit, ConstData(0));
84 SSATmp* TraceBuilder::genDefInitNull() {
85 return gen(DefConst, Type::InitNull, ConstData(0));
88 SSATmp* TraceBuilder::genDefNull() {
89 return gen(DefConst, Type::Null, ConstData(0));
92 SSATmp* TraceBuilder::genPtrToInitNull() {
93 return gen(DefConst, Type::PtrToInitNull, ConstData(&init_null_variant));
96 SSATmp* TraceBuilder::genPtrToUninit() {
97 return gen(DefConst, Type::PtrToUninit, ConstData(&null_variant));
100 SSATmp* TraceBuilder::genDefNone() {
101 return gen(DefConst, Type::None, ConstData(0));
104 void TraceBuilder::appendInstruction(IRInstruction* inst, Block* block) {
105 assert(inst->marker().valid());
106 Opcode opc = inst->op();
107 if (opc != Nop && opc != DefConst) {
108 block->push_back(inst);
112 void TraceBuilder::appendInstruction(IRInstruction* inst) {
113 if (m_curWhere) {
114 // We have a specific position to insert instructions.
115 assert(!inst->isBlockEnd());
116 auto& it = m_curWhere.get();
117 it = m_curBlock->insert(it, inst);
118 ++it;
119 return;
122 Block* block = m_curTrace->back();
123 if (!block->empty()) {
124 IRInstruction* prev = &block->back();
125 if (prev->isBlockEnd()) {
126 // start a new block
127 Block* next = m_unit.defBlock();
128 FTRACE(2, "lazily adding B{}\n", next->id());
129 m_curTrace->push_back(next);
130 if (!prev->isTerminal()) {
131 // new block is reachable from old block so link it.
132 block->setNext(next);
133 next->setHint(block->hint());
135 block = next;
138 appendInstruction(inst, block);
139 if (m_savedTraces.empty()) {
140 // We don't track state on non-main traces for now. t2982555
141 m_state.update(inst);
145 void TraceBuilder::appendBlock(Block* block) {
146 assert(m_savedTraces.empty()); // TODO(t2982555): Don't require this
148 m_state.finishBlock(m_curTrace->back());
150 FTRACE(2, "appending B{}\n", block->id());
151 // Load up the state for the new block.
152 m_state.startBlock(block);
153 m_curTrace->push_back(block);
156 std::vector<RegionDesc::TypePred> TraceBuilder::getKnownTypes() const {
157 std::vector<RegionDesc::TypePred> result;
158 auto const curFunc = m_state.func();
159 auto const sp = m_state.sp();
160 auto const spOffset = m_state.spOffset();
162 for (unsigned i = 0; i < curFunc->maxStackCells(); ++i) {
163 auto t = getStackValue(sp, i).knownType;
164 if (!t.equals(Type::StackElem)) {
165 result.push_back({ RegionDesc::Location::Stack{i, spOffset - i}, t });
169 for (unsigned i = 0; i < curFunc->numLocals(); ++i) {
170 auto t = m_state.localType(i);
171 if (!t.equals(Type::Gen)) {
172 result.push_back({ RegionDesc::Location::Local{i}, t });
175 return result;
178 //////////////////////////////////////////////////////////////////////
180 SSATmp* TraceBuilder::preOptimizeCheckLoc(IRInstruction* inst) {
181 auto const locId = inst->extra<CheckLoc>()->locId;
182 Type typeParam = inst->typeParam();
184 if (auto const prevValue = localValue(locId, DataTypeGeneric)) {
185 return gen(CheckType, typeParam, inst->taken(), prevValue);
188 auto const prevType = localType(locId, DataTypeSpecific);
190 if (prevType <= typeParam) {
191 return inst->src(0);
192 } else {
194 // Normally, it doesn't make sense to be checking something that's
195 // deemed to fail. Incompatible boxed types are ok though, since
196 // we don't track them precisely, but instead check them at every
197 // use.
199 // However, in JitPGO mode right now, this pathological case can
200 // happen, because profile counters are not accurate and we
201 // currently don't analyze Block post-conditions when picking its
202 // successors during region selection. This can lead to
203 // incompatible types in blocks selected for the same region.
205 if (!typeParam.isBoxed() || !prevType.isBoxed()) {
206 if ((typeParam & prevType) == Type::Bottom) {
207 assert(RuntimeOption::EvalJitPGO);
208 return gen(Jmp, inst->taken());
213 return nullptr;
216 SSATmp* TraceBuilder::preOptimizeAssertLoc(IRInstruction* inst) {
217 auto const locId = inst->extra<AssertLoc>()->locId;
219 auto const prevType = localType(locId, DataTypeGeneric);
220 auto const typeParam = inst->typeParam();
222 if (prevType.not(typeParam)) {
223 TRACE_PUNT("Invalid AssertLoc");
226 if (shouldElideAssertType(prevType, typeParam, nullptr)) {
227 return inst->src(0);
230 if (filterAssertType(inst, prevType)) {
231 constrainLocal(locId, categoryForType(prevType), "AssertLoc");
234 return nullptr;
237 SSATmp* TraceBuilder::preOptimizeLdThis(IRInstruction* inst) {
238 if (m_state.thisAvailable()) {
239 auto fpInst = frameRoot(inst->src(0)->inst());
241 if (fpInst->is(DefInlineFP)) {
242 if (!m_state.frameSpansCall()) { // check that we haven't nuked the SSATmp
243 auto spInst = findSpillFrame(fpInst->src(0));
244 // In an inlined call, we should always be able to find our SpillFrame.
245 always_assert(spInst && spInst->src(0) == fpInst->src(1));
246 if (spInst->src(3)->isA(Type::Obj)) {
247 return spInst->src(3);
251 inst->setTaken(nullptr);
253 return nullptr;
256 SSATmp* TraceBuilder::preOptimizeLdCtx(IRInstruction* inst) {
257 if (m_state.thisAvailable()) return gen(LdThis, m_state.fp());
258 return nullptr;
261 SSATmp* TraceBuilder::preOptimizeDecRefThis(IRInstruction* inst) {
263 * If $this is available, convert to an instruction sequence that
264 * doesn't need to test if it's already live.
266 if (thisAvailable()) {
267 auto const thiss = gen(LdThis, m_state.fp());
268 gen(DecRef, thiss);
269 inst->convertToNop();
272 return nullptr;
275 SSATmp* TraceBuilder::preOptimizeDecRefLoc(IRInstruction* inst) {
276 auto const locId = inst->extra<DecRefLoc>()->locId;
279 * Refine the type if we can.
281 * We can't really rely on the types held in the boxed values since aliasing
282 * stores may change them, and we only guard during LdRef. So we have to
283 * change any boxed type to BoxedCell.
285 * DataTypeGeneric is used because we don't want a DecRef to be the only
286 * thing keeping a guard around. This code is designed to tolerate the
287 * incoming type being relaxed.
289 auto knownType = localType(locId, DataTypeGeneric);
290 if (knownType.isBoxed()) {
291 knownType = Type::BoxedCell;
295 * If we have the local value in flight, use a DecRef on it instead of doing
296 * it in memory.
298 if (auto tmp = localValue(locId, DataTypeGeneric)) {
299 gen(DecRef, tmp);
300 inst->convertToNop();
301 return nullptr;
304 if (!typeMightRelax()) {
305 inst->setTypeParam(
306 Type::mostRefined(knownType, inst->typeParam())
310 return nullptr;
313 SSATmp* TraceBuilder::preOptimizeLdLoc(IRInstruction* inst) {
314 auto const locId = inst->extra<LdLoc>()->locId;
315 if (auto tmp = localValue(locId, DataTypeGeneric)) {
316 return tmp;
319 auto const type = localType(locId, DataTypeGeneric);
320 // If FrameState's type isn't as good as the type param, we're missing
321 // information in the IR.
322 assert(inst->typeParam() >= type);
323 inst->setTypeParam(Type::mostRefined(type, inst->typeParam()));
324 return nullptr;
327 SSATmp* TraceBuilder::preOptimizeLdLocAddr(IRInstruction* inst) {
328 auto const locId = inst->extra<LdLocAddr>()->locId;
329 auto const type = localType(locId, DataTypeGeneric);
330 assert(inst->typeParam().deref() >= type);
331 inst->setTypeParam(Type::mostRefined(type.ptr(), inst->typeParam()));
332 return nullptr;
335 SSATmp* TraceBuilder::preOptimizeStLoc(IRInstruction* inst) {
336 // Guard relaxation might change the current local type, so don't try to
337 // change to StLocNT until after relaxation happens.
338 if (!inReoptimize()) return nullptr;
340 auto locId = inst->extra<StLoc>()->locId;
341 auto const curType = localType(locId, DataTypeGeneric);
342 auto const newType = inst->src(1)->type();
344 assert(inst->typeParam() == Type::None);
347 * There's no need to store the type if it's going to be the same
348 * KindOfFoo. We'll still have to store string types because we
349 * aren't specific about storing KindOfStaticString
350 * vs. KindOfString, and a Type::Null might mean KindOfUninit or
351 * KindOfNull.
353 auto const bothBoxed = curType.isBoxed() && newType.isBoxed();
354 auto const sameUnboxed = [&] {
355 auto avoidable = { Type::Uninit,
356 Type::InitNull,
357 Type::Int,
358 Type::Dbl,
359 // No strings.
360 Type::Arr,
361 Type::Obj,
362 Type::Res };
363 for (auto& t : avoidable) {
364 if (curType.subtypeOf(t) && newType.subtypeOf(t)) return true;
366 return false;
368 if (bothBoxed || sameUnboxed()) {
369 inst->setOpcode(StLocNT);
372 return nullptr;
375 SSATmp* TraceBuilder::preOptimize(IRInstruction* inst) {
376 #define X(op) case op: return preOptimize##op(inst)
377 switch (inst->op()) {
378 X(CheckLoc);
379 X(AssertLoc);
380 X(LdThis);
381 X(LdCtx);
382 X(DecRefThis);
383 X(DecRefLoc);
384 X(LdLoc);
385 X(LdLocAddr);
386 X(StLoc);
387 default:
388 break;
390 #undef X
391 return nullptr;
394 //////////////////////////////////////////////////////////////////////
396 SSATmp* TraceBuilder::optimizeWork(IRInstruction* inst,
397 const folly::Optional<IdomVector>& idoms) {
398 // Since some of these optimizations inspect tracked state, we don't
399 // perform any of them on non-main traces.
400 if (m_savedTraces.size() > 0) return nullptr;
402 static DEBUG_ONLY __thread int instNest = 0;
403 if (debug) ++instNest;
404 SCOPE_EXIT { if (debug) --instNest; };
405 DEBUG_ONLY auto indent = [&] { return std::string(instNest * 2, ' '); };
407 FTRACE(1, "optimizing {}{}\n", indent(), inst->toString());
409 // First pass of tracebuilder optimizations try to replace an
410 // instruction based on tracked state before we do anything else.
411 // May mutate the IRInstruction in place (and return nullptr) or
412 // return an SSATmp*.
413 if (SSATmp* preOpt = preOptimize(inst)) {
414 FTRACE(1, " {}preOptimize returned: {}\n",
415 indent(), preOpt->inst()->toString());
416 return preOpt;
418 if (inst->op() == Nop) return nullptr;
420 // copy propagation on inst source operands
421 copyProp(inst);
423 SSATmp* result = nullptr;
425 if (m_enableSimplification) {
426 result = m_simplifier.simplify(inst);
427 if (result) {
428 inst = result->inst();
429 if (inst->producesReference(0)) {
430 // This effectively prevents CSE from kicking in below, which
431 // would replace the instruction with an IncRef. That is
432 // correct if the simplifier morphed the instruction, but it's
433 // incorrect if the simplifier returned one of original
434 // instruction sources. We currently have no way to
435 // distinguish the two cases, so we prevent CSE completely for
436 // now.
437 return result;
442 if (m_state.enableCse() && inst->canCSE()) {
443 SSATmp* cseResult = m_state.cseLookup(inst, idoms);
444 if (cseResult) {
445 // Found a dominating instruction that can be used instead of inst
446 FTRACE(1, " {}cse found: {}\n",
447 indent(), cseResult->inst()->toString());
449 assert(!inst->consumesReferences());
450 if (inst->producesReference(0)) {
451 // Replace with an IncRef
452 FTRACE(1, " {}cse of refcount-producing instruction\n", indent());
453 gen(IncRef, cseResult);
455 return cseResult;
459 return result;
462 SSATmp* TraceBuilder::optimizeInst(IRInstruction* inst, CloneFlag doClone) {
463 if (auto const tmp = optimizeWork(inst, folly::none)) {
464 return tmp;
466 // Couldn't CSE or simplify the instruction; clone it and append.
467 if (inst->op() != Nop) {
468 if (doClone == CloneFlag::Yes) inst = m_unit.cloneInstruction(inst);
469 appendInstruction(inst);
470 // returns nullptr if instruction has no dest, returns the first
471 // (possibly only) dest otherwise
472 return inst->dst(0);
474 return nullptr;
478 * reoptimize() runs a trace through a second pass of TraceBuilder
479 * optimizations, like this:
481 * reset state.
482 * move all blocks to a temporary list.
483 * compute immediate dominators.
484 * for each block in trace order:
485 * if we have a snapshot state for this block:
486 * clear cse entries that don't dominate this block.
487 * use snapshot state.
488 * move all instructions to a temporary list.
489 * for each instruction:
490 * optimizeWork - do CSE and simplify again
491 * if not simplified:
492 * append existing instruction and update state.
493 * else:
494 * if the instruction has a result, insert a mov from the
495 * simplified tmp to the original tmp and discard the instruction.
496 * if the last conditional branch was turned into a jump, remove the
497 * fall-through edge to the next block.
499 void TraceBuilder::reoptimize() {
500 FTRACE(5, "ReOptimize:vvvvvvvvvvvvvvvvvvvv\n");
501 SCOPE_EXIT { FTRACE(5, "ReOptimize:^^^^^^^^^^^^^^^^^^^^\n"); };
502 assert(m_curTrace->isMain());
503 assert(m_savedTraces.empty());
505 m_state.setEnableCse(RuntimeOption::EvalHHIRCse);
506 m_enableSimplification = RuntimeOption::EvalHHIRSimplification;
507 if (!m_state.enableCse() && !m_enableSimplification) return;
508 always_assert(!m_inReoptimize);
509 m_inReoptimize = true;
511 BlockList sortedBlocks = rpoSortCfg(m_unit);
512 auto const idoms = findDominators(m_unit, sortedBlocks);
513 m_state.clear();
515 auto& traceBlocks = m_curTrace->blocks();
516 BlockList blocks(traceBlocks.begin(), traceBlocks.end());
517 traceBlocks.clear();
518 for (auto* block : blocks) {
519 assert(block->trace() == m_curTrace);
520 FTRACE(5, "Block: {}\n", block->id());
522 assert(m_curTrace->isMain());
523 m_state.startBlock(block);
524 m_curTrace->push_back(block);
526 auto instructions = std::move(block->instrs());
527 assert(block->empty());
528 while (!instructions.empty()) {
529 auto *inst = &instructions.front();
530 instructions.pop_front();
531 m_state.setMarker(inst->marker());
533 // merging state looks at the current marker, and optimizeWork
534 // below may create new instructions. Use the marker from this
535 // instruction.
536 assert(inst->marker().valid());
537 setMarker(inst->marker());
539 auto const tmp = optimizeWork(inst, idoms); // Can generate new instrs!
540 if (!tmp) {
541 // Could not optimize; keep the old instruction
542 appendInstruction(inst, block);
543 m_state.update(inst);
544 continue;
546 SSATmp* dst = inst->dst();
547 if (dst->type() != Type::None && dst != tmp) {
548 // The result of optimization has a different destination than the inst.
549 // Generate a mov(tmp->dst) to get result into dst. If we get here then
550 // assume the last instruction in the block isn't a guard. If it was,
551 // we would have to insert the mov on the fall-through edge.
552 assert(block->empty() || !block->back().isBlockEnd());
553 IRInstruction* mov = m_unit.mov(dst, tmp, inst->marker());
554 appendInstruction(mov, block);
555 m_state.update(mov);
557 // Not re-adding inst; remove the inst->taken edge
558 if (inst->taken()) inst->setTaken(nullptr);
561 if (block->empty()) {
562 // If all the instructions in the block were optimized away, remove it
563 // from the trace.
564 auto it = traceBlocks.end();
565 --it;
566 assert(*it == block);
567 m_curTrace->unlink(it);
568 } else {
569 if (block->back().isTerminal()) {
570 // Could have converted a conditional branch to Jmp; clear next.
571 block->setNext(nullptr);
573 m_state.finishBlock(block);
578 bool TraceBuilder::shouldConstrainGuards() const {
579 return RuntimeOption::EvalHHIRRelaxGuards &&
580 !inReoptimize();
584 * Returns true iff a guard to constrain was found, and tc was more specific
585 * than the guard's existing constraint. Note that this doesn't necessarily
586 * mean that the guard was constrained: tc.weak might be true.
588 bool TraceBuilder::constrainGuard(IRInstruction* inst,
589 TypeConstraint tc) {
590 if (!shouldConstrainGuards()) return false;
592 auto& guard = m_guardConstraints[inst];
593 auto changed = false;
595 if (tc.innerCat) {
596 // If the constraint is for the inner type and is better than what guard
597 // has, update it.
598 auto cat = tc.innerCat.get();
599 if (guard.innerCat && guard.innerCat >= cat) return false;
600 if (!tc.weak) {
601 FTRACE(1, "constraining inner type of {}: {} -> {}\n",
602 *inst, guard.innerCat ? guard.innerCat.get() : DataTypeGeneric,
603 cat);
604 guard.innerCat = cat;
606 return true;
609 if (tc.category > guard.category) {
610 if (!tc.weak) {
611 FTRACE(1, "constraining {}: {} -> {}\n",
612 *inst, guard.category, tc.category);
613 guard.category = tc.category;
615 changed = true;
618 assert(tc.knownType.maybe(guard.knownType));
619 if (tc.knownType < guard.knownType) {
620 // We don't check tc.weak here because knownType is supposed to be
621 // statically known type information.
622 FTRACE(1, "refining knownType of {}: {} -> {}\n",
623 *inst, guard.knownType, tc.knownType);
624 guard.knownType = tc.knownType;
627 return changed;
631 * Trace back to the guard that provided the type of val, if
632 * any. Constrain it so its type will not be relaxed beyond the given
633 * DataTypeCategory. Returns true iff one or more guard instructions
634 * were constrained.
636 bool TraceBuilder::constrainValue(SSATmp* const val,
637 TypeConstraint tc) {
638 if (!shouldConstrainGuards()) return false;
640 if (!val) {
641 FTRACE(1, "constrainValue(nullptr, {}), bailing\n", tc);
642 return false;
645 FTRACE(1, "constrainValue({}, {})\n", *val->inst(), tc);
647 auto inst = val->inst();
648 if (inst->is(LdLoc, LdLocAddr)) {
649 // We've hit a LdLoc(Addr). If the source of the value is non-null and not
650 // a FramePtr, it's a real value that was killed by a Call. The value won't
651 // be live but it's ok to use it to track down the guard.
653 auto source = inst->extra<LocalData>()->valSrc;
654 if (!source) {
655 // val was newly created in this trace. Nothing to constrain.
656 FTRACE(2, " - valSrc is null, bailing\n");
657 return false;
660 // If valSrc is a FramePtr, it represents the frame the value was
661 // originally loaded from. Look for the guard for this local.
662 if (source->isA(Type::FramePtr)) {
663 return constrainLocal(inst->extra<LocalId>()->locId, source, tc,
664 "constrainValue");
667 // Otherwise, keep chasing down the source of val.
668 return constrainValue(source, tc);
669 } else if (inst->is(LdStack, LdStackAddr)) {
670 return constrainStack(inst->src(0), inst->extra<StackOffset>()->offset,
671 tc);
672 } else if (inst->is(CheckType, AssertType)) {
673 // If the dest type of the instruction fits the constraint we want, we can
674 // stop here without constraining any further. Otherwise, continue through
675 // to the source.
676 auto changed = false;
677 if (inst->is(CheckType)) changed = constrainGuard(inst, tc) || changed;
679 auto dstType = inst->typeParam();
680 if (!typeFitsConstraint(dstType, tc.category)) {
681 changed = constrainValue(inst->src(0), tc) || changed;
683 return changed;
684 } else if (inst->is(StRef, StRefNT, Box, BoxPtr)) {
685 // If our caller cares about the inner type, propagate that through.
686 // Otherwise we're done.
687 if (tc.innerCat) {
688 auto src = inst->src(inst->is(StRef, StRefNT) ? 1 : 0);
689 tc.innerCat.reset();
690 return constrainValue(src, tc);
692 return false;
693 } else if (inst->is(LdRef, Unbox, UnboxPtr)) {
694 // Pass through to the source of the box, remembering that we care about
695 // the inner type of the box.
696 assert(!tc.innerCat);
697 tc.innerCat = tc.category;
698 return constrainValue(inst->src(0), tc);
699 } else if (inst->isPassthrough()) {
700 return constrainValue(inst->getPassthroughValue(), tc);
701 } else {
702 // Any instructions not special cased above produce a new value, so
703 // there's no guard for us to constrain.
704 FTRACE(2, " - value is new in this trace, bailing\n");
705 return false;
707 // TODO(t2598894): Should be able to do something with LdMem<T> here
710 bool TraceBuilder::constrainLocal(uint32_t locId, TypeConstraint tc,
711 const std::string& why) {
712 return constrainLocal(locId, localValueSource(locId), tc, why);
715 bool TraceBuilder::constrainLocal(uint32_t locId, SSATmp* valSrc,
716 TypeConstraint tc,
717 const std::string& why) {
718 if (!shouldConstrainGuards()) return false;
720 FTRACE(1, "constrainLocal({}, {}, {}, {})\n",
721 locId, valSrc ? valSrc->inst()->toString() : "null", tc, why);
723 if (!valSrc) return false;
724 if (!valSrc->isA(Type::FramePtr)) {
725 return constrainValue(valSrc, tc);
728 // When valSrc is a FramePtr, that means we loaded the value the local had
729 // coming into the trace. Trace through the FramePtr chain, looking for a
730 // guard for this local id. If we find it, constrain the guard. If we don't
731 // find it, there wasn't a guard for this local so there's nothing to
732 // constrain.
733 auto guard = guardForLocal(locId, valSrc);
734 while (guard) {
735 if (guard->is(AssertLoc)) {
736 // If the refined the type of the local satisfies the constraint we're
737 // trying to apply, we can stop here. This can happen if we assert a
738 // more general type than what we already know. Otherwise we need to
739 // keep tracing back to the guard.
740 if (typeFitsConstraint(guard->typeParam(), tc.category)) return false;
741 guard = guardForLocal(locId, guard->src(0));
742 } else {
743 assert(guard->is(GuardLoc, AssertLoc));
744 FTRACE(2, " - found guard to constrain\n");
745 return constrainGuard(guard, tc);
749 FTRACE(2, " - no guard to constrain\n");
750 return false;
753 bool TraceBuilder::constrainStack(int32_t idx, TypeConstraint tc) {
754 return constrainStack(sp(), idx, tc);
757 bool TraceBuilder::constrainStack(SSATmp* sp, int32_t idx,
758 TypeConstraint tc) {
759 if (!shouldConstrainGuards()) return false;
761 FTRACE(1, "constrainStack({}, {}, {})\n", *sp->inst(), idx, tc);
762 assert(sp->isA(Type::StkPtr));
764 // We've hit a LdStack. If getStackValue gives us a value, recurse on
765 // that. Otherwise, look at the instruction that gave us the type of the
766 // stack element. If it's a GuardStk or CheckStk, it's our target. If it's
767 // anything else, the value is new so there's no guard to relax.
768 auto stackInfo = getStackValue(sp, idx);
769 if (stackInfo.value) {
770 FTRACE(1, " - value = {}\n", *stackInfo.value->inst());
771 return constrainValue(stackInfo.value, tc);
772 } else {
773 auto typeSrc = stackInfo.typeSrc;
774 FTRACE(1, " - typeSrc = {}\n", *typeSrc);
775 return typeSrc->is(GuardStk, CheckStk) && constrainGuard(typeSrc, tc);
779 Type TraceBuilder::localType(uint32_t id, TypeConstraint tc) {
780 constrainLocal(id, tc, "localType");
781 return m_state.localType(id);
784 SSATmp* TraceBuilder::localValue(uint32_t id, TypeConstraint tc) {
785 constrainLocal(id, tc, "localValue");
786 return m_state.localValue(id);
789 void TraceBuilder::setMarker(BCMarker marker) {
790 auto const oldMarker = m_state.marker();
792 if (marker == oldMarker) return;
793 FTRACE(2, "TraceBuilder changing current marker from {} to {}\n",
794 oldMarker.func ? oldMarker.show() : "<invalid>", marker.show());
795 assert(marker.valid());
796 m_state.setMarker(marker);
799 void TraceBuilder::pushTrace(IRTrace* t, BCMarker marker, Block* b,
800 const boost::optional<Block::iterator>& where) {
801 FTRACE(2, "TraceBuilder saving {}@{} and using {}@{}\n",
802 m_curTrace, m_state.marker().show(), t, marker.show());
803 assert(t);
804 assert(bool(b) == bool(where));
805 assert(IMPLIES(b, b->trace() == t));
807 m_savedTraces.push(
808 TraceState{ m_curTrace, m_curBlock, m_state.marker(), m_curWhere });
809 m_curTrace = t;
810 m_curBlock = b;
811 setMarker(marker);
812 m_curWhere = where;
815 void TraceBuilder::popTrace() {
816 assert(!m_savedTraces.empty());
818 auto const& top = m_savedTraces.top();
819 FTRACE(2, "TraceBuilder popping {}@{} to restore {}@{}\n",
820 m_curTrace, m_state.marker().show(), top.trace, top.marker.show());
821 m_curTrace = top.trace;
822 m_curBlock = top.block;
823 setMarker(top.marker);
824 m_curWhere = top.where;
825 m_savedTraces.pop();