Optimize unaligned vanilla vec iteration
[hiphop-php.git] / hphp / runtime / vm / jit / memory-effects.cpp
blobd101e5ec8d077b4e2ee0645ea2cd45ce786f5c47
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/runtime/vm/jit/memory-effects.h"
18 #include "hphp/util/match.h"
19 #include "hphp/util/safe-cast.h"
20 #include "hphp/util/assertions.h"
22 #include "hphp/runtime/base/implicit-context.h"
24 #include "hphp/runtime/vm/bytecode.h"
25 #include "hphp/runtime/vm/jit/analysis.h"
26 #include "hphp/runtime/vm/jit/ir-instruction.h"
27 #include "hphp/runtime/vm/jit/ssa-tmp.h"
28 #include "hphp/runtime/vm/jit/type-array-elem.h"
30 namespace HPHP::jit {
32 namespace {
34 const StaticString s_GLOBALS("GLOBALS");
36 uint32_t iterId(const IRInstruction& inst) {
37 return inst.extra<IterId>()->iterId;
40 //////////////////////////////////////////////////////////////////////
42 AliasClass all_pointees(folly::Range<SSATmp**> srcs) {
43 auto ret = AliasClass{AEmpty};
44 for (auto const& src : srcs) {
45 if (src->isA(TMem)) ret = ret | pointee(src);
47 return ret;
50 // Return an AliasClass containing all locations pointed to by any MemToCell
51 // sources to an instruction.
52 AliasClass all_pointees(const IRInstruction& inst) {
53 return all_pointees(inst.srcs());
56 // Return an AliasClass representing a range of the eval stack that contains
57 // everything below a logical depth.
58 AliasClass stack_below(IRSPRelOffset offset) {
59 return AStack::below(offset);
62 //////////////////////////////////////////////////////////////////////
64 // Return an AliasClass representing an entire ActRec at base + offset.
65 AliasClass actrec(SSATmp* base, IRSPRelOffset offset) {
66 return AStack::range(offset, offset + int32_t{kNumActRecCells});
70 * AliasClass that can be used to represent effects on liveFrame().
72 AliasClass livefp(SSATmp* fp) {
73 return AFBasePtr | AActRec { fp };
76 AliasClass livefp(const IRInstruction& inst) {
77 return livefp(inst.marker().fixupFP());
80 //////////////////////////////////////////////////////////////////////
82 // Determine an AliasClass representing any locals in the instruction's frame
83 // which might be accessed via debug_backtrace().
85 std::pair<const Func*, uint32_t> func_and_depth_from_fp(SSATmp* fp) {
86 if (!fp) return {nullptr, 0};
87 auto fpInst = fp->inst();
88 if (fpInst->is(DefFP)) return {fpInst->marker().func(), 0};
89 if (fpInst->is(DefFuncEntryFP)) {
90 return {fpInst->extra<DefFuncEntryFP>()->func, 0};
92 if (fpInst->is(BeginInlining)) {
93 auto const extra = fpInst->extra<BeginInlining>();
94 return {extra->func, extra->depth};
96 always_assert(false);
99 AliasClass backtrace_locals(const IRInstruction& inst) {
100 auto eachFunc = [&] (auto fn) {
101 auto ac = AEmpty;
102 for (auto fp = inst.marker().fp(); fp; ) {
103 auto const [func, depth] = func_and_depth_from_fp(fp);
104 ac |= fn(func, depth);
105 if (fp->inst()->is(BeginInlining)) {
106 // Walking the marker fp chain in this manner is suspect, but here we
107 // are careful to only materialize func, depth pair using it.
108 fp = fp->inst()->marker().fp();
109 } else {
110 fp = nullptr;
113 return ac;
116 auto const addInspectable =
117 [&] (const Func* func, uint32_t depth) -> AliasClass {
118 auto const meta = func->lookupVarId(s_86metadata.get());
119 return meta != kInvalidId ? ALocal { depth, (uint32_t)meta } : AEmpty;;
122 // Either there's no func or no frame-pointer. Either way, be conservative and
123 // assume anything can be read. This can happen in test code, for instance.
124 if (!inst.marker().fp()) return ALocalAny;
126 if (!RuntimeOption::EnableArgsInBacktraces) return eachFunc(addInspectable);
128 return eachFunc([&] (const Func* func, uint32_t depth) {
129 auto ac = AEmpty;
130 auto const numParams = func->numParams();
132 if (func->hasReifiedGenerics()) {
133 // First non param local contains reified generics
134 AliasIdSet reifiedgenerics{ AliasIdSet::IdRange{numParams, numParams + 1} };
135 ac |= ALocal { depth, reifiedgenerics };
138 if (func->cls() && func->cls()->hasReifiedGenerics()) {
139 // There is no way to access the SSATmp for ObjectData of `this` here,
140 // so be very pessimistic
141 ac |= APropAny;
144 if (!numParams) return addInspectable(func, depth) | ac;
146 AliasIdSet params{ AliasIdSet::IdRange{0, numParams} };
147 return addInspectable(func, depth) | ac | ALocal { depth, params };
151 /////////////////////////////////////////////////////////////////////
154 * Modify a GeneralEffects to take potential VM re-entry into account. This
155 * affects may-load, may-store, and kills information for the instruction. The
156 * GeneralEffects should already contain AHeapAny in both loads and stores if
157 * it affects those locations for reasons other than re-entry, but does not
158 * need to if it doesn't.
160 * For loads, we need to take into account any locals potentially accessed by
161 * debug_backtrace().
163 * For kills, locations on the eval stack below the re-entry depth should all
164 * be added.
166 GeneralEffects may_reenter(const IRInstruction& inst, GeneralEffects x) {
167 auto const may_reenter_is_ok =
168 inst.taken() && inst.taken()->isCatch();
169 always_assert_flog(
170 may_reenter_is_ok,
171 "instruction {} claimed may_reenter, but it isn't allowed to say that",
172 inst
176 * We want to union `killed_stack' into whatever else the instruction already
177 * said it must kill, but if we end up with an unrepresentable AliasClass we
178 * can't return a set that's too big (the `kills' set is unlike the other
179 * AliasClasses in GeneralEffects in that means it kills /everything/ in the
180 * set, since it's must-information).
182 * If we can't represent the union, just take the stack, in part because we
183 * have some debugging asserts about this right now---but also nothing
184 * actually uses may_reenter with a non-AEmpty kills at the time of this
185 * writing anyway.
187 auto const new_kills = [&] {
188 if (inst.marker().fp() == nullptr) return AEmpty;
190 auto const offset = [&]() -> IRSPRelOffset {
191 auto const fp = canonical(inst.marker().fp());
192 if (fp->inst()->is(BeginInlining)) {
193 auto const extra = fp->inst()->extra<BeginInlining>();
194 auto const fpOffset = extra->spOffset;
195 auto const numSlotsInFrame = extra->func->numSlotsInFrame();
196 auto const numStackElems = inst.marker().bcSPOff() - SBInvOffset{0};
197 return fpOffset - numSlotsInFrame - numStackElems;
200 assertx(fp->inst()->is(DefFP, DefFuncEntryFP));
201 auto const sp = inst.marker().sp();
202 auto const irSPOff = sp->inst()->extra<DefStackData>()->irSPOff;
203 return inst.marker().bcSPOff().to<IRSPRelOffset>(irSPOff);
204 }();
206 auto const killed_stack = stack_below(offset);
207 auto const kills_union = x.kills.precise_union(killed_stack);
208 return kills_union ? *kills_union : killed_stack;
209 }();
211 return GeneralEffects {
212 x.loads | AHeapAny | ARdsAny | AVMRegAny | AVMRegState,
213 x.stores | AHeapAny | ARdsAny | AVMRegAny,
214 x.moves,
215 new_kills,
216 x.inout,
217 backtrace_locals(inst)
221 //////////////////////////////////////////////////////////////////////
223 GeneralEffects may_load_store(AliasClass loads, AliasClass stores) {
224 return GeneralEffects { loads, stores, AEmpty, AEmpty, AEmpty, AEmpty };
227 GeneralEffects may_load_store_kill(AliasClass loads,
228 AliasClass stores,
229 AliasClass kill) {
230 return GeneralEffects { loads, stores, AEmpty, kill, AEmpty, AEmpty };
233 GeneralEffects may_load_store_move(AliasClass loads,
234 AliasClass stores,
235 AliasClass move) {
236 assertx(move <= loads);
237 return GeneralEffects { loads, stores, move, AEmpty, AEmpty, AEmpty };
240 //////////////////////////////////////////////////////////////////////
243 * Helper for iterator instructions. They all affect some locals, but are
244 * otherwise the same. Value iters touch one local; key-value iters touch two.
246 GeneralEffects iter_effects(const IRInstruction& inst,
247 SSATmp* fp,
248 AliasClass locals) {
249 auto const iters =
250 AliasClass { aiter_all(fp, inst.extra<IterData>()->args.iterId) };
251 return may_load_store_kill(
252 iters | locals | AHeapAny,
253 iters | locals | AHeapAny,
254 AMIStateAny
259 * Construct effects for InterpOne, using the information in its extra data.
261 * We always consider an InterpOne as potentially doing anything to the heap,
262 * potentially re-entering, potentially raising warnings in the current frame,
263 * potentially reading any locals, and potentially reading/writing any stack
264 * location that isn't below the bottom of the stack.
266 * The extra data for the InterpOne comes with some additional information
267 * about which local(s) it may modify, which is all we try to be more precise
268 * about right now.
270 GeneralEffects interp_one_effects(const IRInstruction& inst) {
271 auto const extra = inst.extra<InterpOne>();
272 auto loads = AHeapAny | AStackAny | ALocalAny | ARdsAny | livefp(inst);
273 auto stores = AHeapAny | AStackAny | ARdsAny | AVMRegAny | AVMRegState;
274 if (extra->smashesAllLocals) {
275 stores = stores | ALocalAny;
276 } else {
277 for (auto i = uint32_t{0}; i < extra->nChangedLocals; ++i) {
278 stores = stores | ALocal { inst.src(1), extra->changedLocals[i].id };
282 auto kills = AEmpty;
283 if (isMemberBaseOp(extra->opcode)) {
284 stores = stores | AMIStateAny;
285 kills = kills | AMIStateAny;
286 } else if (isMemberDimOp(extra->opcode) || isMemberFinalOp(extra->opcode)) {
287 stores = stores | AMIStateAny;
288 loads = loads | AMIStateAny;
289 } else {
290 kills = kills | AMIStateAny;
293 return may_load_store_kill(loads, stores, kills);
296 ////////////////////////////////////////////////////////////////////////////////
299 * Construct effects for member instructions that take &tvRef as their last
300 * argument.
302 * These instructions never load tvRef or roProp, but they might store to it.
304 MemEffects minstr_with_tvref(const IRInstruction& inst) {
305 auto const tvRef = inst.src(2);
306 assertx(tvRef->isA(TMemToMISTemp) || tvRef->isA(TNullptr));
307 auto stores = AHeapAny | AMIStateROProp;
308 if (!tvRef->isA(TNullptr)) stores |= pointee(tvRef);
309 return may_load_store(AHeapAny, stores);
312 //////////////////////////////////////////////////////////////////////
314 MemEffects memory_effects_impl(const IRInstruction& inst) {
315 switch (inst.op()) {
317 //////////////////////////////////////////////////////////////////////
318 // Region exits
320 // These exits don't leave the current php function, and could head to code
321 // that could read or write anything as far as we know (including frame
322 // locals).
323 case ReqBindJmp:
324 return ExitEffects {
325 *AUnknown.exclude_vm_reg(),
326 stack_below(inst.extra<ReqBindJmp>()->irSPOff)
328 case ReqInterpBBNoTranslate:
329 return ExitEffects {
330 *AUnknown.exclude_vm_reg(),
331 stack_below(inst.extra<ReqInterpBBNoTranslate>()->irSPOff)
333 case ReqRetranslate:
334 return ExitEffects {
335 *AUnknown.exclude_vm_reg(),
336 stack_below(inst.extra<ReqRetranslate>()->offset)
338 case ReqRetranslateOpt:
339 return ExitEffects {
340 *AUnknown.exclude_vm_reg(),
341 stack_below(inst.extra<ReqRetranslateOpt>()->offset)
343 case JmpSwitchDest:
344 return ExitEffects {
345 *AUnknown.exclude_vm_reg(),
346 *stack_below(inst.extra<JmpSwitchDest>()->spOffBCFromIRSP).
347 precise_union(AMIStateAny)
349 case JmpSSwitchDest:
350 return ExitEffects {
351 *AUnknown.exclude_vm_reg(),
352 *stack_below(inst.extra<JmpSSwitchDest>()->offset).
353 precise_union(AMIStateAny)
356 //////////////////////////////////////////////////////////////////////
357 // Unusual instructions
360 * The ReturnHook sets up the ActRec so the unwinder knows everything is
361 * already released (i.e. it calls ar->setLocalsDecRefd()).
363 * The eval stack is also dead at this point (the return value is passed to
364 * ReturnHook as src(1), and the ReturnHook may not access the stack).
366 case ReturnHook:
367 return may_load_store_kill(
368 AHeapAny | AActRec {inst.src(0)}, AHeapAny,
369 [&] {
370 auto const u =
371 AStackAny.precise_union(ALocalAny)->precise_union(AMIStateAny);
372 // The return hooks trashes the frame context in debug builds
373 if (debug) return *u->precise_union(AFContext { inst.src(0) });
374 return *u;
378 // The suspend hooks can load anything (re-entering the VM), but can't write
379 // to frame locals.
380 case SuspendHookAwaitEF:
381 case SuspendHookAwaitEG:
382 case SuspendHookAwaitR:
383 case SuspendHookCreateCont:
384 case SuspendHookYield:
385 // TODO: may-load here probably doesn't need to include ALocalAny normally.
386 return may_load_store_kill(AUnknown, AHeapAny, AMIStateAny);
389 * If we're returning from a function, it's ReturnEffects. The RetCtrl
390 * opcode also suspends resumables, which we model as having any possible
391 * effects.
393 case RetCtrl:
394 if (inst.extra<RetCtrl>()->suspendingResumed) {
395 // Suspending can go anywhere, and doesn't even kill locals.
396 return UnknownEffects {};
398 return ReturnEffects {
399 AStackAny | ALocalAny | AMIStateAny | AFBasePtr
402 case AsyncFuncRetPrefetch:
403 return IrrelevantEffects {};
405 case AsyncFuncRet:
406 case AsyncFuncRetSlow:
407 case AsyncGenRetR:
408 return ReturnEffects { AStackAny | AMIStateAny | livefp(inst.src(1)) };
410 case AsyncGenYieldR:
411 case AsyncSwitchFast:
412 // Suspending can go anywhere, and doesn't even kill locals.
413 return UnknownEffects {};
415 case GenericRetDecRefs:
417 * The may-store information here is ALocalAny: even though we
418 * know it doesn't really "store" to the frame locals, the values
419 * that used to be there are no longer available because they are
420 * DecRef'd, which we are required to report as may-store
421 * information to make it visible to reference count
422 * optimizations. It's conceptually the same as if it was storing
423 * an Uninit over each of the locals, but the stores of uninits
424 * would be dead so we're not actually doing that.
426 return may_load_store_kill(
427 ALocalAny | AHeapAny,
428 ALocalAny | AHeapAny,
429 AMIStateAny
432 case EndCatch: {
433 auto const stack_kills = stack_below(inst.extra<EndCatch>()->offset);
434 return ExitEffects {
435 AUnknown,
436 stack_kills | AMIStateAny
440 case EnterTCUnwind: {
441 auto const stack_kills = stack_below(inst.extra<EnterTCUnwind>()->offset);
442 return ExitEffects {
443 AUnknown,
444 stack_kills | AMIStateAny
449 * BeginInlining must always be the first instruction in the inlined call. It
450 * defines a new FP for the callee but does not perform any stores or
451 * otherwise initialize the FP.
453 case BeginInlining: {
455 * SP relative offset of the firstin the inlined call.
457 auto inlineStackOff =
458 inst.extra<BeginInlining>()->spOffset + kNumActRecCells;
459 return may_load_store_kill(
460 AEmpty,
461 AEmpty,
463 * This prevents stack slots from the caller from being sunk into the
464 * callee. Note that some of these stack slots overlap with the frame
465 * locals of the callee-- those slots are inacessible in the inlined
466 * call as frame and stack locations may not alias.
468 stack_below(inlineStackOff)
472 case EndInlining: {
473 assertx(inst.src(0)->inst()->is(BeginInlining));
474 auto const fp = inst.src(0);
475 auto const callee = inst.src(0)->inst()->extra<BeginInlining>()->func;
476 const AliasClass ar = AActRec { inst.src(0) };
477 auto const locals = [&] () -> AliasClass {
478 if (!callee->numLocals()) return AEmpty;
479 return ALocal {fp, AliasIdSet::IdRange(0, callee->numLocals())};
480 }();
482 // NB: It's okay if the AliasIdSet for locals cannot be precise. We want to
483 // kill *every* local in the frame so there's nothing else that can
484 // accidentally be included in the set.
485 return may_load_store_kill(AEmpty, AEmpty, ar | locals | AMIStateAny);
488 case InlineCall:
489 return PureInlineCall {
490 AFBasePtr,
491 inst.src(0),
493 // Right now when we "publish" a frame by storing it in rvmfp() we
494 // implicitly depend on the AFFunc and AFMeta bits being stored. In the
495 // future we may want to track this explicitly.
497 // We also need to ensure that all of our parent frames have this stored
498 // this information. To achieve this we also register a load on AFBasePtr,
499 // forcing them to also be published. Notice that we don't actually
500 // depend on this load to properly initialize m_sfp or rvmfp().
501 AliasClass(AFFunc { inst.src(0) }) | AFMeta { inst.src(0) } | AFBasePtr
504 case InterpOne:
505 return interp_one_effects(inst);
506 case InterpOneCF: {
507 auto const extra = inst.extra<InterpOneData>();
508 return ExitEffects {
509 *AUnknown.exclude_vm_reg(),
510 stack_below(extra->spOffset) | AMIStateAny
514 case NativeImpl:
515 return UnknownEffects {};
518 // These C++ helpers can invoke the user error handler and go do whatever
519 // they want to non-frame locations.
520 case VerifyParamCallable:
521 case VerifyParamCls:
522 case VerifyParamFail:
523 case VerifyParamFailHard:
524 case VerifyPropCls:
525 case VerifyPropFail:
526 case VerifyPropFailHard:
527 case VerifyProp:
528 case VerifyPropAll:
529 case VerifyPropCoerce:
530 case VerifyPropCoerceAll:
531 case VerifyReifiedLocalType:
532 case VerifyReifiedReturnType:
533 case VerifyRetCallable:
534 case VerifyRetCls:
535 case VerifyRetFail:
536 case VerifyRetFailHard:
537 return may_load_store(AHeapAny, AHeapAny);
539 case ContEnter:
541 auto const extra = inst.extra<ContEnter>();
542 return CallEffects {
543 // Kills. Everything on the stack.
544 stack_below(extra->spOffset) | AMIStateAny | AVMRegAny,
545 // No inputs. The value being sent is passed explicitly.
546 AEmpty,
547 // ActRec. It is on the heap and we already implicitly assume that
548 // CallEffects can perform arbitrary heap operations.
549 AEmpty,
550 // No outputs.
551 AEmpty,
552 // Locals.
553 backtrace_locals(inst)
557 case Call:
559 auto const extra = inst.extra<Call>();
560 return CallEffects {
561 // Kills. Everything on the stack below the incoming parameters.
562 stack_below(extra->spOffset) | AMIStateAny | AVMRegAny,
563 // Input arguments.
564 extra->numInputs() == 0 ? AEmpty : AStack::range(
565 extra->spOffset,
566 extra->spOffset + extra->numInputs()
568 // ActRec.
569 actrec(inst.src(0), extra->spOffset + extra->numInputs()),
570 // Inout outputs.
571 extra->numOut == 0 ? AEmpty : AStack::range(
572 extra->spOffset + extra->numInputs() + kNumActRecCells,
573 extra->spOffset + extra->numInputs() + kNumActRecCells +
574 extra->numOut
576 // Locals.
577 backtrace_locals(inst)
581 case CallBuiltin:
583 auto const extra = inst.extra<CallBuiltin>();
584 auto const callee = extra->callee;
585 auto const [inout, read] = [&] {
586 auto read = AEmpty;
587 auto inout = AEmpty;
588 auto const paramOff = callee->isMethod() ? 3 : 2;
589 for (auto i = paramOff; i < inst.numSrcs(); ++i) {
590 if (inst.src(i)->type() <= TPtr) {
591 auto const cls = pointee(inst.src(i));
592 if (callee->isInOut(i - paramOff)) {
593 inout = inout | cls;
594 } else {
595 read = read | cls;
599 return std::make_pair(inout, read);
600 }();
601 auto const foldable = callee->isFoldable() ? AEmpty : ARdsAny;
602 return GeneralEffects {
603 read | AHeapAny | foldable | AVMRegAny | AVMRegState,
604 AHeapAny | foldable | AVMRegAny,
605 AEmpty,
606 stack_below(extra->spOffset) | AMIStateAny,
607 inout,
608 AEmpty
612 // Resumable suspension takes everything from the frame and moves it into the
613 // heap.
614 case CreateGen:
615 case CreateAGen:
616 case CreateAFWH: {
617 auto const fp = canonical(inst.src(0));
618 auto fpInst = fp->inst();
619 auto const frame = [&] () -> AliasClass {
620 if (fpInst->is(DefFP, DefFuncEntryFP)) return ALocalAny;
621 assertx(fpInst->is(BeginInlining));
622 auto const nlocals = fpInst->extra<BeginInlining>()->func->numLocals();
623 return nlocals
624 ? ALocal { fp, AliasIdSet::IdRange(0, nlocals)}
625 : AEmpty;
626 }();
627 return may_load_store_move(
628 frame | AActRec { fp },
629 AHeapAny,
630 frame
634 // AGWH construction updates the AsyncGenerator object.
635 case CreateAGWH:
636 return may_load_store(AHeapAny | AActRec { inst.src(0) }, AHeapAny);
638 case CreateAAWH:
640 auto const extra = inst.extra<CreateAAWH>();
641 auto const frame = ALocal {
642 inst.src(0),
643 AliasIdSet {
644 AliasIdSet::IdRange{ extra->first, extra->first + extra->count }
647 return may_load_store(frame, AHeapAny);
650 case CountWHNotDone:
652 auto const extra = inst.extra<CountWHNotDone>();
653 auto const frame = ALocal {
654 inst.src(0),
655 AliasIdSet {
656 AliasIdSet::IdRange{ extra->first, extra->first + extra->count }
659 return may_load_store(frame, AEmpty);
662 // This re-enters to call extension-defined instance constructors.
663 case ConstructInstance:
664 return may_load_store(AHeapAny, AHeapAny);
666 // Closures don't ever throw or reenter on construction
667 case ConstructClosure:
668 return IrrelevantEffects{};
670 case CheckStackOverflow:
671 case CheckSurpriseFlagsEnter:
672 case CheckSurpriseAndStack:
673 return may_load_store(AEmpty, AEmpty);
675 //////////////////////////////////////////////////////////////////////
676 // Iterator instructions
678 case IterInit:
679 case LIterInit:
680 case IterNext:
681 case LIterNext: {
682 auto const& args = inst.extra<IterData>()->args;
683 assertx(!args.hasKey());
684 auto const fp = inst.src(inst.op() == IterNext ? 0 : 1);
685 AliasClass val = ALocal { fp, safe_cast<uint32_t>(args.valId) };
686 return iter_effects(inst, fp, val);
689 case IterInitK:
690 case LIterInitK:
691 case IterNextK:
692 case LIterNextK: {
693 auto const& args = inst.extra<IterData>()->args;
694 assertx(args.hasKey());
695 auto const fp = inst.src(inst.op() == IterNextK ? 0 : 1);
696 AliasClass key = ALocal { fp, safe_cast<uint32_t>(args.keyId) };
697 AliasClass val = ALocal { fp, safe_cast<uint32_t>(args.valId) };
698 return iter_effects(inst, fp, key | val);
701 case IterFree: {
702 auto const base = aiter_base(inst.src(0), iterId(inst));
703 return may_load_store(AHeapAny | base, AHeapAny);
706 case CheckIter: {
707 auto const iter = inst.extra<CheckIter>()->iterId;
708 return may_load_store(aiter_type(inst.src(0), iter), AEmpty);
711 case LdIterBase:
712 return PureLoad { aiter_base(inst.src(0), iterId(inst)) };
714 case LdIterPos:
715 return PureLoad { aiter_pos(inst.src(0), iterId(inst)) };
717 case LdIterEnd:
718 return PureLoad { aiter_end(inst.src(0), iterId(inst)) };
720 case StIterBase:
721 return PureStore { aiter_base(inst.src(0), iterId(inst)), inst.src(1) };
723 case StIterType: {
724 auto const iter = inst.extra<StIterType>()->iterId;
725 return PureStore { aiter_type(inst.src(0), iter), nullptr };
728 case StIterPos:
729 return PureStore { aiter_pos(inst.src(0), iterId(inst)), inst.src(1) };
731 case StIterEnd:
732 return PureStore { aiter_end(inst.src(0), iterId(inst)), inst.src(1) };
734 case KillActRec:
735 return may_load_store_kill(AEmpty, AEmpty, AActRec { inst.src(0) });
737 case KillLoc: {
738 auto const local = inst.extra<LocalId>()->locId;
739 return may_load_store_kill(AEmpty, AEmpty, ALocal { inst.src(0), local });
742 case KillIter: {
743 auto const iters = aiter_all(inst.src(0), iterId(inst));
744 return may_load_store_kill(AEmpty, AEmpty, iters);
747 //////////////////////////////////////////////////////////////////////
748 // Instructions that explicitly manipulate locals
750 case StLoc:
751 case StLocMeta:
752 return PureStore {
753 ALocal { inst.src(0), inst.extra<LocalId>()->locId },
754 inst.src(1),
755 nullptr
758 case StLocRange:
760 auto const extra = inst.extra<StLocRange>();
761 auto acls = AEmpty;
763 for (auto locId = extra->start; locId < extra->end; ++locId) {
764 acls = acls | ALocal { inst.src(0), locId };
766 return PureStore { acls, inst.src(1), nullptr };
769 case LdLoc:
770 return PureLoad { ALocal { inst.src(0), inst.extra<LocalId>()->locId } };
772 case LdLocForeign:
773 return may_load_store(ALocalAny, AEmpty);
775 case CheckLoc:
776 return may_load_store(
777 ALocal { inst.src(0), inst.extra<LocalId>()->locId },
778 AEmpty
781 //////////////////////////////////////////////////////////////////////
782 // Pointer-based loads and stores
784 case LdMem:
785 return PureLoad { pointee(inst.src(0)) };
786 case StMem:
787 case StMemMeta:
788 return PureStore { pointee(inst.src(0)), inst.src(1), inst.src(0) };
790 case LdClsInitElem:
791 return PureLoad { AHeapAny };
793 case StClsInitElem:
794 return PureStore { AHeapAny };
796 case LdPairElem:
797 return PureLoad { AHeapAny };
799 case LdMBase:
800 return PureLoad { AMIStateBase };
802 case StMBase:
803 return PureStore { AMIStateBase, inst.src(0), nullptr };
805 case StMROProp:
806 return PureStore { AMIStateROProp, inst.src(0), nullptr };
808 case CheckMROProp:
809 return may_load_store(AMIStateROProp, AEmpty);
811 case FinishMemberOp:
812 return may_load_store_kill(AEmpty, AEmpty, AMIStateAny);
814 case IsNTypeMem:
815 case IsTypeMem:
816 case CheckTypeMem:
817 case CheckInitMem:
818 return may_load_store(pointee(inst.src(0)), AEmpty);
820 case CheckRDSInitialized:
821 return may_load_store(
822 ARds { inst.extra<CheckRDSInitialized>()->handle },
823 AEmpty
825 case MarkRDSInitialized:
826 return may_load_store(
827 AEmpty,
828 ARds { inst.extra<MarkRDSInitialized>()->handle }
830 case MarkRDSAccess:
831 return IrrelevantEffects{};
832 // LdTVFromRDS and StTVInRDS load/store aux bit, so they cannot be
833 // PureLoad/PureStore -- load/store elim do not track aux bit accesses.
834 case LdTVFromRDS:
835 return may_load_store(
836 ARds { inst.extra<LdTVFromRDS>()->handle },
837 AEmpty
839 case StTVInRDS:
840 return may_load_store(
841 AEmpty,
842 ARds { inst.extra<StTVInRDS>()->handle }
845 case InitProps:
846 return may_load_store(
847 AHeapAny,
848 AHeapAny | ARds { inst.extra<InitProps>()->cls->propHandle() }
851 case InitSProps:
852 return may_load_store(
853 AHeapAny,
854 AHeapAny | ARds { inst.extra<InitSProps>()->cls->sPropInitHandle() }
857 case LdARFunc:
858 case LdClsFromClsMeth:
859 case LdFuncFromClsMeth:
860 case LdFuncFromRFunc:
861 case LdGenericsFromRFunc:
862 case LdClsFromRClsMeth:
863 case LdFuncFromRClsMeth:
864 case LdGenericsFromRClsMeth:
865 return may_load_store(AEmpty, AEmpty);
867 //////////////////////////////////////////////////////////////////////
868 // Object/Ref loads/stores
870 case InitObjProps:
871 return may_load_store(AEmpty, APropAny);
873 case InitObjMemoSlots:
874 // Writes to memo slots, but these are not modeled.
875 return IrrelevantEffects {};
877 case LockObj:
878 // Writes object attributes, but these are not modeled.
879 return IrrelevantEffects {};
881 // Loads $obj->trace, stores $obj->file and $obj->line.
882 case InitThrowableFileAndLine:
883 return may_load_store(AHeapAny, APropAny);
885 //////////////////////////////////////////////////////////////////////
886 // Array loads and stores
888 case InitVecElem: {
889 auto const arr = inst.src(0);
890 auto const val = inst.src(1);
891 auto const idx = inst.extra<InitVecElem>()->index;
892 return PureStore { AElemI { arr, idx }, val, arr };
895 case InitDictElem: {
896 auto const arr = inst.src(0);
897 auto const val = inst.src(1);
898 auto const key = inst.extra<InitDictElem>()->key;
899 return PureStore { AElemS { arr, key }, val, arr };
902 case InitStructElem: {
903 auto const arr = inst.src(0);
904 auto const val = inst.src(1);
905 auto const key = inst.extra<InitStructElem>()->key;
906 return PureStore { AElemS { arr, key }, val, arr };
909 case LdMonotypeDictVal: {
910 // TODO(mcolavita): When we have a type-array-elem method to get the key
911 // of an arbitrary array-like type, use that to narrow this load.
912 return PureLoad { AElemAny };
915 case LdMonotypeVecElem:
916 case LdVecElem: {
917 auto const base = inst.src(0);
918 auto const key = inst.src(1);
919 return PureLoad {
920 key->hasConstVal() ? AElemI { base, key->intVal() } : AElemIAny
924 case DictGetK:
925 case KeysetGetK:
926 case BespokeGet:
927 case KeysetGetQuiet:
928 case DictGetQuiet: {
929 auto const base = inst.src(0);
930 auto const key = inst.src(1);
931 assertx(key->type().subtypeOfAny(TInt, TStr));
932 if (key->isA(TInt)) {
933 return PureLoad {
934 key->hasConstVal() ? AElemI { base, key->intVal() } : AElemIAny,
936 } else {
937 return PureLoad {
938 key->hasConstVal() ? AElemS { base, key->strVal() } : AElemSAny,
943 case DictIsset:
944 case DictIdx:
945 case KeysetIsset:
946 case KeysetIdx:
947 case AKExistsDict:
948 case AKExistsKeyset:
949 case BespokeGetThrow: {
950 auto const base = inst.src(0);
951 auto const key = inst.src(1);
952 assertx(key->type().subtypeOfAny(TInt, TStr));
953 auto const elem = [&] {
954 if (key->isA(TInt)) {
955 return key->hasConstVal() ? AElemI { base, key->intVal() } : AElemIAny;
956 } else {
957 return key->hasConstVal() ? AElemS { base, key->strVal() } : AElemSAny;
959 }();
960 return may_load_store(elem, AEmpty);
963 case InitVecElemLoop:
965 auto const extra = inst.extra<InitVecElemLoop>();
966 auto const stack_in = AStack::range(
967 extra->offset,
968 extra->offset + static_cast<int32_t>(extra->size)
970 return may_load_store_move(stack_in, AElemIAny, stack_in);
973 // These ops may read anything referenced by the input array or object,
974 // but not any of the locals or stack frame slots.
975 case NewLoggingArray:
976 case ProfileArrLikeProps:
977 return may_load_store(AHeapAny, AEmpty);
979 case NewKeysetArray:
981 // NewKeysetArray is reading elements from the stack, but writes to a
982 // completely new array, so we can treat the store set as empty.
983 auto const extra = inst.extra<NewKeysetArray>();
984 auto const stack_in = AStack::range(
985 extra->offset,
986 extra->offset + static_cast<int32_t>(extra->size)
988 return may_load_store_move(stack_in, AEmpty, stack_in);
991 case NewStructDict:
993 // NewStructDict reads elements from the stack, but writes to a
994 // completely new array, so we can treat the store set as empty.
995 auto const extra = inst.extra<NewStructData>();
996 auto const stack_in = AStack::range(
997 extra->offset,
998 extra->offset + static_cast<int32_t>(extra->numKeys)
1000 return may_load_store_move(stack_in, AEmpty, stack_in);
1003 case NewBespokeStructDict:
1005 // NewBespokeStructDict reads elements from the stack, but writes to
1006 // a completely new array, so we can treat the stores as empty.
1007 auto const extra = inst.extra<NewBespokeStructDict>();
1008 auto const stack_in = AStack::range(
1009 extra->offset,
1010 extra->offset + static_cast<int32_t>(extra->numSlots)
1012 return may_load_store_move(stack_in, AEmpty, stack_in);
1015 case MemoGetStaticValue:
1016 case MemoGetLSBValue:
1017 case MemoGetInstanceValue:
1018 // Only reads the memo value (which isn't modeled here).
1019 return may_load_store(AEmpty, AEmpty);
1021 case MemoSetStaticValue:
1022 case MemoSetLSBValue:
1023 case MemoSetInstanceValue:
1024 // Writes to the memo value (which isn't modeled)
1025 return may_load_store(AEmpty, AEmpty);
1027 case MemoGetStaticCache:
1028 case MemoGetLSBCache:
1029 case MemoSetStaticCache:
1030 case MemoSetLSBCache: {
1031 // Reads some (non-zero) set of locals for keys, and reads/writes from the
1032 // memo cache (which isn't modeled).
1033 auto const extra = inst.extra<MemoCacheStaticData>();
1034 auto const frame = ALocal {
1035 inst.src(0),
1036 AliasIdSet{
1037 AliasIdSet::IdRange{
1038 extra->keys.first,
1039 extra->keys.first + extra->keys.count
1044 return may_load_store(frame, AEmpty);
1047 case MemoGetInstanceCache:
1048 case MemoSetInstanceCache: {
1049 // Reads some set of locals for keys, and reads/writes from the memo cache
1050 // (which isn't modeled).
1051 auto const extra = inst.extra<MemoCacheInstanceData>();
1052 auto const frame = [&]() -> AliasClass {
1053 // Unlike MemoGet/SetStaticCache, we can have an empty key range here.
1054 if (extra->keys.count == 0) return AEmpty;
1056 return ALocal {
1057 inst.src(0),
1058 AliasIdSet{
1059 AliasIdSet::IdRange{
1060 extra->keys.first,
1061 extra->keys.first + extra->keys.count
1065 }();
1066 return may_load_store(frame, AEmpty);
1069 case BespokeIterGetKey:
1070 case LdPtrIterKey:
1071 // Array element keys are not tracked by memory effects right
1072 // now. Be conservative and use AElemAny.
1073 return may_load_store(AElemAny, AEmpty);
1075 case LdPtrIterVal:
1076 return PureLoad { AElemAny };
1078 case BespokeIterGetVal:
1079 return may_load_store(AElemAny, AEmpty);
1081 case ElemDictK:
1082 return IrrelevantEffects {};
1084 case VecFirst: {
1085 auto const base = inst.src(0);
1086 return may_load_store(AElemI { base, 0 }, AEmpty);
1088 case VecLast: {
1089 auto const base = inst.src(0);
1090 if (base->hasConstVal(TArrLike)) {
1091 auto const index = static_cast<int64_t>(base->arrLikeVal()->size() - 1);
1092 return may_load_store(AElemI { base, index }, AEmpty);
1094 return may_load_store(AElemIAny, AEmpty);
1096 case DictFirst:
1097 case DictLast:
1098 case KeysetFirst:
1099 case KeysetLast:
1100 return may_load_store(AElemAny, AEmpty);
1102 case DictFirstKey:
1103 case DictLastKey:
1104 case LdMonotypeDictTombstones:
1105 case LdMonotypeDictKey:
1106 return may_load_store(AEmpty, AEmpty);
1108 case CheckDictKeys:
1109 case CheckDictOffset:
1110 case CheckKeysetOffset:
1111 case CheckMissingKeyInArrLike:
1112 case ProfileDictAccess:
1113 case ProfileKeysetAccess:
1114 case CheckArrayCOW:
1115 case ProfileArrayCOW:
1116 return may_load_store(AHeapAny, AEmpty);
1118 case SameArrLike:
1119 case NSameArrLike:
1120 return may_load_store(AElemAny, AEmpty);
1122 case EqArrLike:
1123 case NeqArrLike: {
1124 if (inst.src(0)->type() <= TKeyset && inst.src(1)->type() <= TKeyset) {
1125 return may_load_store(AElemAny, AEmpty);
1126 } else {
1127 return may_load_store(AHeapAny, AHeapAny);
1131 case AKExistsObj:
1132 return may_load_store(AHeapAny, AHeapAny);
1134 //////////////////////////////////////////////////////////////////////
1135 // Member instructions
1137 case CheckMBase:
1138 return may_load_store(pointee(inst.src(0)), AEmpty);
1141 * Various minstr opcodes that take a Lval in src 0, which may or may not
1142 * point to a frame local or the evaluation stack. Some may read or write to
1143 * that pointer while some only read. They can all re-enter the VM and access
1144 * arbitrary heap locations.
1146 case IncDecElem:
1147 case SetElem:
1148 case SetNewElem:
1149 case SetOpElem:
1150 case SetNewElemDict:
1151 case SetNewElemVec:
1152 case SetNewElemKeyset:
1153 case UnsetElem:
1154 case ElemDictD:
1155 case ElemDictU:
1156 case BespokeElem:
1157 case ElemDX:
1158 case ElemUX:
1159 case SetRange:
1160 case SetRangeRev:
1161 // These member ops will load and store from the base lval which they
1162 // take as their first argument, which may point anywhere in the heap.
1163 return may_load_store(
1164 AHeapAny | all_pointees(inst),
1165 AHeapAny | all_pointees(inst)
1168 case CGetElem:
1169 case IssetElem:
1170 case ElemX:
1171 case CGetProp:
1172 case CGetPropQ:
1173 case SetProp:
1174 case UnsetProp:
1175 case IssetProp:
1176 case IncDecProp:
1177 case SetOpProp:
1178 case ReserveVecNewElem:
1179 return may_load_store(AHeapAny, AHeapAny);
1182 * Intermediate minstr operations. In addition to a base pointer like the
1183 * operations above, these may take a pointer to MInstrState::tvRef
1184 * which they may store to (but not read from).
1186 case PropX:
1187 case PropDX:
1188 case PropQ:
1189 return minstr_with_tvref(inst);
1192 * Collection accessors can read from their inner array buffer, but stores
1193 * COW and behave as if they only affect collection memory locations. We
1194 * don't track those, so it's returning AEmpty for now.
1196 case MapIsset:
1197 case PairIsset:
1198 case VectorIsset:
1199 return may_load_store(AHeapAny, AEmpty /* Note */);
1200 case MapGet:
1201 case MapSet:
1202 case VectorSet:
1203 return may_load_store(AHeapAny, AEmpty /* Note */);
1205 case LdInitPropAddr:
1206 return may_load_store(
1207 AProp {
1208 inst.src(0),
1209 safe_cast<uint16_t>(inst.extra<LdInitPropAddr>()->index)
1211 AEmpty
1213 case LdInitRDSAddr:
1214 return may_load_store(
1215 ARds { inst.extra<LdInitRDSAddr>()->handle },
1216 AEmpty
1219 //////////////////////////////////////////////////////////////////////
1220 // Instructions that allocate new objects, without reading any other memory
1221 // at all, so any effects they have on some types of memory locations we
1222 // track are isolated from anything else we care about.
1224 case NewClsMeth:
1225 case NewRClsMeth:
1226 case NewCol:
1227 case NewColFromArray:
1228 case NewPair:
1229 case NewInstanceRaw:
1230 case NewDictArray:
1231 case NewRFunc:
1232 case FuncCred:
1233 case AllocVec:
1234 case AllocStructDict:
1235 case AllocBespokeStructDict:
1236 case ConvDblToStr:
1237 case ConvIntToStr:
1238 case InitStructPositions:
1239 case AllocInitROM:
1240 case StPtrAt:
1241 case StTypeAt:
1242 case VoidPtrAsDataType:
1243 return IrrelevantEffects {};
1245 case AllocObj:
1246 // AllocObj re-enters to call constructors, but if it weren't for that we
1247 // could ignore its loads and stores since it's a new object.
1248 return may_load_store(AEmpty, AEmpty);
1249 case AllocObjReified:
1250 // Similar to AllocObj but also stores the reification
1251 return may_load_store(AEmpty, AHeapAny);
1253 //////////////////////////////////////////////////////////////////////
1254 // Instructions that explicitly manipulate the stack.
1256 case LdStk:
1257 return PureLoad { AStack::at(inst.extra<LdStk>()->offset) };
1259 case StStk:
1260 case StStkMeta:
1261 return PureStore {
1262 AStack::at(inst.extra<IRSPRelOffsetData>()->offset),
1263 inst.src(1),
1264 nullptr
1267 case StStkRange: {
1268 auto const extra = inst.extra<StStkRange>();
1269 auto const startOff = extra->start;
1270 auto const count = extra->count;
1271 return PureStore {
1272 AStack::range(startOff, startOff + static_cast<int32_t>(count)),
1273 inst.src(1),
1274 nullptr
1278 case StOutValue:
1279 // Technically these writes affect the caller's stack, but there is no way
1280 // to actually observe them from within the callee. They can also only
1281 // occur once on any exit path from a function.
1282 return may_load_store(AEmpty, AEmpty);
1284 case LdOutAddr:
1285 return IrrelevantEffects{};
1287 case CheckStk:
1288 return may_load_store(
1289 AStack::at(inst.extra<CheckStk>()->offset),
1290 AEmpty
1293 case DbgTraceCall:
1294 return may_load_store(AStackAny | ALocalAny, AEmpty);
1296 case Unreachable:
1297 // Unreachable code kills every memory location.
1298 return may_load_store_kill(AEmpty, AEmpty, AUnknown);
1300 case ResolveTypeStruct: {
1301 auto const extra = inst.extra<ResolveTypeStructData>();
1302 auto const stack_in = AStack::range(
1303 extra->offset,
1304 extra->offset + static_cast<int32_t>(extra->size)
1306 return may_load_store(AliasClass(stack_in)|AHeapAny, AHeapAny);
1309 case DefFP:
1310 return may_load_store(AFBasePtr, AFBasePtr);
1312 case DefFuncEntryFP:
1313 return may_load_store(livefp(inst.src(0)), livefp(inst.dst()));
1315 case LdARFlags:
1316 return PureLoad { AFMeta { inst.src(0) }};
1318 case LdUnitPerRequestFilepath:
1319 return PureLoad {
1320 ARds { inst.extra<LdUnitPerRequestFilepath>()->handle },
1323 case LdImplicitContext:
1324 return PureLoad { ARds { ImplicitContext::activeCtx.handle() } };
1326 case StImplicitContext:
1327 return PureStore {
1328 ARds { ImplicitContext::activeCtx.handle() }, inst.src(0), nullptr
1331 case StImplicitContextWH:
1332 return may_load_store(
1333 ARds { ImplicitContext::activeCtx.handle() },
1334 AEmpty
1337 //////////////////////////////////////////////////////////////////////
1338 // Instructions that never read or write memory locations tracked by this
1339 // module.
1341 case AbsDbl:
1342 case AddDbl:
1343 case AddInt:
1344 case AddIntO:
1345 case AddOffset:
1346 case AdvanceDictPtrIter:
1347 case AdvanceVecPtrIter:
1348 case AndInt:
1349 case AssertType:
1350 case AssertLoc:
1351 case AssertStk:
1352 case AssertMBase:
1353 case BespokeIterEnd:
1354 case BespokeIterFirstPos:
1355 case BespokeIterLastPos:
1356 case ConvFuncPrologueFlagsToARFlags:
1357 case DefFrameRelSP:
1358 case DefFuncPrologueCallee:
1359 case DefFuncPrologueCtx:
1360 case DefFuncPrologueFlags:
1361 case DefFuncPrologueNumArgs:
1362 case DefRegSP:
1363 case EndGuards:
1364 case EnterPrologue:
1365 case EqBool:
1366 case EqCls:
1367 case EqLazyCls:
1368 case EqFunc:
1369 case EqStrPtr:
1370 case EqArrayDataPtr:
1371 case EqDbl:
1372 case EqInt:
1373 case EqPtrIter:
1374 case GetDictPtrIter:
1375 case GetVecPtrIter:
1376 case GteBool:
1377 case GteInt:
1378 case GtBool:
1379 case GtInt:
1380 case Jmp:
1381 case JmpNZero:
1382 case JmpZero:
1383 case LdPropAddr:
1384 case LdStkAddr:
1385 case LdVecElemAddr:
1386 case LteBool:
1387 case LteDbl:
1388 case LteInt:
1389 case LtBool:
1390 case LtInt:
1391 case GtDbl:
1392 case GteDbl:
1393 case LtDbl:
1394 case DivDbl:
1395 case DivInt:
1396 case MulDbl:
1397 case MulInt:
1398 case MulIntO:
1399 case NeqBool:
1400 case NeqDbl:
1401 case NeqInt:
1402 case SameObj:
1403 case NSameObj:
1404 case EqRes:
1405 case NeqRes:
1406 case CmpBool:
1407 case CmpInt:
1408 case CmpDbl:
1409 case SubDbl:
1410 case SubInt:
1411 case SubIntO:
1412 case XorBool:
1413 case XorInt:
1414 case OrInt:
1415 case AssertNonNull:
1416 case CheckNonNull:
1417 case CheckNullptr:
1418 case CheckSmashableClass:
1419 case Ceil:
1420 case Floor:
1421 case DefLabel:
1422 case CheckInit:
1423 case Nop:
1424 case Mod:
1425 case Conjure:
1426 case ConjureUse:
1427 case EndBlock:
1428 case ConvBoolToInt:
1429 case ConvBoolToDbl:
1430 case DefConst:
1431 case LdLocAddr:
1432 case Sqrt:
1433 case Shl:
1434 case Shr:
1435 case Lshr:
1436 case IsNType:
1437 case IsType:
1438 case Mov:
1439 case ConvDblToBool:
1440 case ConvDblToInt:
1441 case DblAsBits:
1442 case LdMIStateTempBaseAddr:
1443 case LdClsCns:
1444 case LdSubClsCns:
1445 case LdResolvedTypeCns:
1446 case LdResolvedTypeCnsClsName:
1447 case LdResolvedTypeCnsNoCheck:
1448 case CheckSubClsCns:
1449 case LdClsCnsVecLen:
1450 case FuncHasAttr:
1451 case ClassHasAttr:
1452 case LdFuncRequiredCoeffects:
1453 case IsFunReifiedGenericsMatched:
1454 case JmpPlaceholder:
1455 case LdSmashable:
1456 case LdSmashableFunc:
1457 case LdRDSAddr:
1458 case CheckRange:
1459 case ProfileType:
1460 case LdIfaceMethod:
1461 case InstanceOfIfaceVtable:
1462 case IsTypeStructCached:
1463 case LdTVAux:
1464 case MethodExists:
1465 case GetTime:
1466 case GetTimeNs:
1467 case ProfileInstanceCheck:
1468 case Select:
1469 case LookupSPropSlot:
1470 case ConvPtrToLval:
1471 case ProfileProp:
1472 case ProfileIsTypeStruct:
1473 case LdLazyClsName:
1474 case DirFromFilepath:
1475 case CheckFuncNeedsCoverage:
1476 case RecordFuncCall:
1477 case LoadBCSP:
1478 case StructDictSlot:
1479 case StructDictElemAddr:
1480 case StructDictAddNextSlot:
1481 case StructDictTypeBoundCheck:
1482 return IrrelevantEffects {};
1484 case LookupClsCns:
1485 case LookupClsCtxCns:
1486 return may_load_store(AEmpty, AEmpty);
1488 case StClosureArg:
1489 return PureStore {
1490 AProp {
1491 inst.src(0),
1492 safe_cast<uint16_t>(inst.extra<StClosureArg>()->index)
1494 inst.src(1),
1495 inst.src(0)
1498 case StArResumeAddr:
1499 return PureStore { AFMeta { inst.src(0) }, nullptr };
1501 case LdFrameThis:
1502 case LdFrameCls:
1503 return PureLoad { AFContext { inst.src(0) }};
1505 case StFrameCtx:
1506 return PureStore { AFContext { inst.src(0) }, inst.src(1) };
1508 case StFrameFunc:
1509 return PureStore { AFFunc { inst.src(0) }, nullptr };
1511 case StFrameMeta:
1512 return PureStore { AFMeta { inst.src(0) }, nullptr };
1514 case StVMFP:
1515 return PureStore { AVMFP, inst.src(0) };
1517 case StVMSP:
1518 return PureStore { AVMSP, inst.src(0) };
1520 case StVMPC:
1521 return PureStore { AVMPC, nullptr };
1523 case StVMReturnAddr:
1524 return PureStore { AVMRetAddr, inst.src(0) };
1526 case StVMRegState:
1527 return PureStore { AVMRegState, inst.src(0) };
1529 //////////////////////////////////////////////////////////////////////
1530 // Instructions that technically do some things w/ memory, but not in any way
1531 // we currently care about. They however don't return IrrelevantEffects
1532 // because we assume (in refcount-opts) that IrrelevantEffects instructions
1533 // can't even inspect Countable reference count fields, and several of these
1534 // can. All GeneralEffects instructions are assumed to possibly do so.
1536 case DecRefNZ:
1537 case ProfileDecRef:
1538 case AFWHBlockOn:
1539 case AFWHPushTailFrame:
1540 case IncRef:
1541 case LdClosureCls:
1542 case LdClosureThis:
1543 case LdRetVal:
1544 case ConvStrToInt:
1545 case ConvResToInt:
1546 case OrdStr:
1547 case ChrInt:
1548 case CreateSSWH:
1549 case CheckSurpriseFlags:
1550 case CheckType:
1551 case ZeroErrorLevel:
1552 case RestoreErrorLevel:
1553 case CheckCold:
1554 case ContValid:
1555 case IncProfCounter:
1556 case IncCallCounter:
1557 case IncStat:
1558 case ContCheckNext:
1559 case CountVec:
1560 case CountDict:
1561 case CountKeyset:
1562 case HasReifiedGenerics:
1563 case InstanceOf:
1564 case InstanceOfBitmask:
1565 case NInstanceOfBitmask:
1566 case InstanceOfIface:
1567 case InterfaceSupportsArrLike:
1568 case InterfaceSupportsDbl:
1569 case InterfaceSupportsInt:
1570 case InterfaceSupportsStr:
1571 case IsLegacyArrLike:
1572 case IsWaitHandle:
1573 case IsCol:
1574 case HasToString:
1575 case DbgAssertRefCount:
1576 case DbgCheckLocalsDecRefd:
1577 case GtStr:
1578 case GteStr:
1579 case LtStr:
1580 case LteStr:
1581 case EqStr:
1582 case NeqStr:
1583 case SameStr:
1584 case NSameStr:
1585 case CmpStr:
1586 case GtRes:
1587 case GteRes:
1588 case LtRes:
1589 case LteRes:
1590 case CmpRes:
1591 case LdBindAddr:
1592 case LdSSwitchDest:
1593 case RBTraceEntry:
1594 case RBTraceMsg:
1595 case ConvIntToBool:
1596 case ConvIntToDbl:
1597 case ConvStrToBool:
1598 case ConvStrToDbl:
1599 case ConvResToDbl:
1600 case ExtendsClass:
1601 case LdUnwinderValue:
1602 case LdClsName:
1603 case LdLazyCls:
1604 case LdAFWHActRec:
1605 case LdContActRec:
1606 case LdContArKey:
1607 case LdContArValue:
1608 case LdContField:
1609 case LdContResumeAddr:
1610 case StContArKey:
1611 case StContArValue:
1612 case StContArState:
1613 case ContArIncIdx:
1614 case ContArIncKey:
1615 case ContArUpdateIdx:
1616 case LdClsCachedSafe:
1617 case LdClsInitData:
1618 case UnwindCheckSideExit:
1619 case LdCns:
1620 case LdFuncVecLen:
1621 case LdClsMethod:
1622 case LdClsMethodCacheCls:
1623 case LdClsMethodCacheFunc:
1624 case LdClsMethodFCacheFunc:
1625 case LdTypeCns:
1626 case LdTypeCnsNoThrow:
1627 case LdTypeCnsClsName:
1628 case ProfileSwitchDest:
1629 case LdFuncCls:
1630 case LdFuncInOutBits:
1631 case LdFuncNumParams:
1632 case LdFuncName:
1633 case LdMethCallerName:
1634 case LdObjClass:
1635 case LdObjInvoke:
1636 case LdObjMethodD:
1637 case LdObjMethodS:
1638 case LdStrLen:
1639 case StringIsset:
1640 case LdWHResult:
1641 case LdWHState:
1642 case LdWHNotDone:
1643 case LookupClsMethod:
1644 case LookupClsRDS:
1645 case StrictlyIntegerConv:
1646 case DbgAssertFunc:
1647 case ProfileCall:
1648 case ProfileMethod:
1649 case DecReleaseCheck:
1650 return may_load_store(AEmpty, AEmpty);
1652 case BeginCatch:
1653 return may_load_store(AEmpty, AVMRegAny | AVMRegState);
1655 case LogArrayReach:
1656 case LogGuardFailure:
1657 return may_load_store(AHeapAny, AEmpty);
1659 // Some that touch memory we might care about later, but currently don't:
1660 case ColIsEmpty:
1661 case ColIsNEmpty:
1662 case ConvTVToBool:
1663 case ConvObjToBool:
1664 case CountCollection:
1665 case LdVectorSize:
1666 case CheckVecBounds:
1667 case LdColVec:
1668 case LdColDict:
1669 return may_load_store(AEmpty, AEmpty);
1671 //////////////////////////////////////////////////////////////////////
1672 // Instructions that can re-enter the VM and touch most heap things. They
1673 // also may generally write to the eval stack below an offset (see
1674 // alias-class.h above AStack for more).
1676 case DecRef:
1677 return may_load_store(AHeapAny, AHeapAny);
1679 case ReleaseShallow:
1680 return may_load_store(AHeapAny, AHeapAny);
1682 case GetMemoKey:
1683 return may_load_store(AHeapAny, AHeapAny);
1685 case GetMemoKeyScalar:
1686 return IrrelevantEffects{};
1688 case ProfileGlobal:
1689 case LdGblAddr:
1690 case LdGblAddrDef:
1691 case BaseG:
1692 return may_load_store(AEmpty, AEmpty);
1694 case LdClsCtor:
1695 return may_load_store(AEmpty, AEmpty);
1697 case RaiseCoeffectsCallViolation:
1698 case RaiseCoeffectsFunParamCoeffectRulesViolation:
1699 case RaiseCoeffectsFunParamTypeViolation:
1700 return may_load_store(AEmpty, AEmpty);
1702 case LdClsPropAddrOrNull: // may run 86{s,p}init, which can autoload
1703 case LdClsPropAddrOrRaise: // raises errors, and 86{s,p}init
1704 return may_load_store(
1705 AHeapAny,
1706 AHeapAny | all_pointees(inst) | AMIStateROProp
1708 case Clone:
1709 case ThrowArrayIndexException:
1710 case ThrowArrayKeyException:
1711 case ThrowUninitLoc:
1712 case ThrowUndefPropException:
1713 case RaiseTooManyArg:
1714 case RaiseError:
1715 case RaiseNotice:
1716 case RaiseWarning:
1717 case RaiseForbiddenDynCall:
1718 case RaiseForbiddenDynConstruct:
1719 case RaiseStrToClassNotice:
1720 case CheckClsMethFunc:
1721 case CheckClsReifiedGenericMismatch:
1722 case CheckFunReifiedGenericMismatch:
1723 case CheckInOutMismatch:
1724 case CheckReadonlyMismatch:
1725 case ConvTVToStr:
1726 case ConvObjToStr:
1727 case Count: // re-enters on CountableClass
1728 case GtObj:
1729 case GteObj:
1730 case LtObj:
1731 case LteObj:
1732 case EqObj:
1733 case NeqObj:
1734 case CmpObj:
1735 case GtArrLike:
1736 case GteArrLike:
1737 case LtArrLike:
1738 case LteArrLike:
1739 case CmpArrLike:
1740 case OODeclExists:
1741 case LdCls: // autoload
1742 case LdClsCached: // autoload
1743 case LdFunc: // autoload
1744 case LdFuncCached: // autoload
1745 case InitClsCns: // autoload
1746 case InitSubClsCns: // May run 86cinit
1747 case ProfileSubClsCns: // May run 86cinit
1748 case LookupClsMethodCache: // autoload
1749 case LookupClsMethodFCache: // autoload
1750 case LookupCnsE:
1751 case LookupFuncCached: // autoload
1752 case StringGet: // raise_notice
1753 case OrdStrIdx: // raise_notice
1754 case AddNewElemKeyset: // can re-enter
1755 case DictGet:
1756 case KeysetGet:
1757 case DictSet:
1758 case BespokeSet:
1759 case BespokeAppend:
1760 case BespokeUnset:
1761 case StructDictUnset:
1762 case ConcatStrStr:
1763 case PrintStr:
1764 case PrintBool:
1765 case PrintInt:
1766 case ConcatIntStr:
1767 case ConcatStrInt:
1768 case ConvObjToDbl:
1769 case ConvObjToInt:
1770 case ConvTVToInt:
1771 case ConcatStr3:
1772 case ConcatStr4:
1773 case ConvTVToDbl:
1774 case ConvObjToVec:
1775 case ConvObjToDict:
1776 case ConvObjToKeyset:
1777 case ThrowOutOfBounds:
1778 case ThrowInvalidArrayKey:
1779 case ThrowInvalidOperation:
1780 case ThrowCallReifiedFunctionWithoutGenerics:
1781 case ThrowDivisionByZeroException:
1782 case ThrowHasThisNeedStatic:
1783 case ThrowInOutMismatch:
1784 case ThrowReadonlyMismatch:
1785 case ThrowLateInitPropError:
1786 case ThrowMissingArg:
1787 case ThrowMissingThis:
1788 case ThrowParameterWrongType:
1789 case ArrayMarkLegacyShallow:
1790 case ArrayMarkLegacyRecursive:
1791 case ThrowCannotModifyReadonlyCollection:
1792 case ThrowLocalMustBeValueTypeException:
1793 case ThrowMustBeEnclosedInReadonly:
1794 case ThrowMustBeMutableException:
1795 case ThrowMustBeReadonlyException:
1796 case ThrowMustBeValueTypeException:
1797 case ArrayUnmarkLegacyShallow:
1798 case ArrayUnmarkLegacyRecursive:
1799 case SetOpTV:
1800 case OutlineSetOp:
1801 case ThrowAsTypeStructException:
1802 case PropTypeRedefineCheck: // Can raise and autoload
1803 case HandleRequestSurprise:
1804 case BespokeEscalateToVanilla:
1805 return may_load_store(AHeapAny, AHeapAny);
1807 case AddNewElemVec:
1808 case RaiseErrorOnInvalidIsAsExpressionType:
1809 case IsTypeStruct:
1810 case RecordReifiedGenericsAndGetTSList:
1811 case CopyArray:
1812 return may_load_store(AElemAny, AEmpty);
1814 case ConvArrLikeToVec:
1815 case ConvArrLikeToDict:
1816 case ConvArrLikeToKeyset: // Decrefs input values
1817 return may_load_store(AElemAny, AEmpty);
1819 // debug_backtrace() traverses stack and WaitHandles on the heap.
1820 case DebugBacktrace:
1821 return may_load_store(AHeapAny|ALocalAny|AStackAny, AHeapAny);
1823 // This instruction doesn't touch memory we track, except that it may
1824 // re-enter to construct php Exception objects. During this re-entry anything
1825 // can happen (e.g. a surprise flag check could cause a php signal handler to
1826 // run arbitrary code).
1827 case AFWHPrepareChild:
1828 return may_load_store(AActRec { inst.src(0) }, AEmpty);
1830 //////////////////////////////////////////////////////////////////////
1831 // The following instructions are used for debugging memory optimizations.
1832 // We can't ignore them, because they can prevent future optimizations;
1833 // eg t1 = LdStk<N>; DbgTrashStk<N>; StStk<N> t1
1834 // If we ignore the DbgTrashStk it looks like the StStk is redundant
1836 case DbgTrashStk:
1837 return may_load_store_kill(
1838 AEmpty, AEmpty, AStack::at(inst.extra<DbgTrashStk>()->offset));
1839 case DbgTrashFrame:
1840 return may_load_store_kill(
1841 AEmpty, AEmpty, actrec(inst.src(0), inst.extra<DbgTrashFrame>()->offset));
1842 case DbgTrashMem:
1843 return may_load_store_kill(AEmpty, AEmpty, pointee(inst.src(0)));
1845 //////////////////////////////////////////////////////////////////////
1849 not_reached();
1852 //////////////////////////////////////////////////////////////////////
1854 DEBUG_ONLY bool check_effects(const IRInstruction& inst, MemEffects me) {
1855 SCOPE_ASSERT_DETAIL("Memory Effects") {
1856 return folly::sformat(" inst: {}\n effects: {}\n", inst, show(me));
1859 auto const check_obj = [&] (SSATmp* obj) {
1860 // canonicalize() may have replaced the SSATmp with a less refined
1861 // one, so we cannot assert <= TObj.
1862 always_assert_flog(
1863 obj->type() <= TBottom ||
1864 obj->type().maybe(TObj),
1865 "Non obj pointer in memory effects"
1869 auto const check = [&] (AliasClass a) {
1870 if (auto const pr = a.prop()) check_obj(pr->obj);
1873 match<void>(
1875 [&] (GeneralEffects x) {
1876 check(x.loads);
1877 check(x.stores);
1878 check(x.moves);
1879 check(x.kills);
1880 check(x.inout);
1881 check(x.backtrace);
1883 // Locations may-moved always should also count as may-loads.
1884 always_assert(x.moves <= x.loads);
1886 if (inst.mayRaiseErrorWithSources()) {
1887 // Any instruction that can raise an error can run a user error handler
1888 // and have arbitrary effects on the heap.
1889 always_assert(AHeapAny <= x.loads);
1890 always_assert(AHeapAny <= x.stores);
1892 * They also ought to kill /something/ on the stack, because of
1893 * possible re-entry. It's not incorrect to leave things out of the
1894 * kills set, but this assertion is here because we shouldn't do it on
1895 * purpose, so this is here until we have a reason not to assert it.
1897 * The mayRaiseError instructions should all be going through
1898 * may_reenter right now, which will kill the stack below the re-entry
1899 * depth---unless the marker for `inst' doesn't have an fp set.
1901 always_assert(inst.marker().fixupFP() == nullptr ||
1902 AStackAny.maybe(x.kills));
1905 [&] (PureLoad x) { check(x.src); },
1906 [&] (PureStore x) { check(x.dst); },
1907 [&] (ExitEffects x) { check(x.live); check(x.kills); },
1908 [&] (IrrelevantEffects) {},
1909 [&] (UnknownEffects) {},
1910 [&] (CallEffects x) { check(x.kills);
1911 check(x.inputs);
1912 check(x.actrec);
1913 check(x.outputs);
1914 check(x.locals); },
1915 [&] (PureInlineCall x) { check(x.base);
1916 check(x.actrec); },
1917 [&] (ReturnEffects x) { check(x.kills); }
1920 return true;
1923 //////////////////////////////////////////////////////////////////////
1927 MemEffects memory_effects(const IRInstruction& inst) {
1928 auto const inner = memory_effects_impl(inst);
1929 auto const ret = [&] () -> MemEffects {
1930 if (!inst.mayRaiseErrorWithSources()) {
1931 if (inst.maySyncVMRegsWithSources()) {
1932 auto fail = [&] {
1933 always_assert_flog(
1934 false,
1935 "Instruction {} has effects {} but has been marked as MaySyncVMRegs "
1936 "without MayRaiseError.",
1937 inst,
1938 show(inner)
1940 return may_load_store(AUnknown, AUnknown);
1942 return match<MemEffects>(
1943 inner,
1944 [&] (GeneralEffects x) {
1945 return GeneralEffects {
1946 x.loads | AVMRegAny | AVMRegState,
1947 x.stores | AVMRegAny,
1948 x.moves, x.kills, x.inout, x.backtrace,
1951 [&] (CallEffects x) { return fail(); },
1952 [&] (UnknownEffects x) { return fail(); },
1953 [&] (PureLoad x) {
1954 return may_load_store(
1955 x.src | AVMRegAny | AVMRegState,
1956 AVMRegAny
1959 [&] (PureStore) { return fail(); },
1960 [&] (ExitEffects) { return fail(); },
1961 [&] (PureInlineCall) { return fail(); },
1962 [&] (IrrelevantEffects) { return fail(); },
1963 [&] (ReturnEffects) { return fail(); }
1966 return inner;
1969 auto fail = [&] {
1970 always_assert_flog(
1971 false,
1972 "Instruction {} has effects {}, but has been marked as MayRaiseError "
1973 "and must use a UnknownEffects, GeneralEffects, or CallEffects type.",
1974 inst,
1975 show(inner)
1977 return may_load_store(AUnknown, AUnknown);
1980 // Calls are implicitly MayRaise, all other instructions must use the
1981 // GeneralEffects or UnknownEffects class of memory effects
1982 return match<MemEffects>(
1983 inner,
1984 [&] (GeneralEffects x) { return may_reenter(inst, x); },
1985 [&] (CallEffects x) { return x; },
1986 [&] (UnknownEffects x) { return x; },
1987 [&] (PureLoad) { return fail(); },
1988 [&] (PureStore) { return fail(); },
1989 [&] (ExitEffects) { return fail(); },
1990 [&] (PureInlineCall) { return fail(); },
1991 [&] (IrrelevantEffects) { return fail(); },
1992 [&] (ReturnEffects) { return fail(); }
1994 }();
1995 assertx(check_effects(inst, ret));
1996 return ret;
1999 AliasClass pointee(const SSATmp* tmp) {
2000 auto acls = AEmpty;
2001 auto const visit = [&] (const IRInstruction* sinst, const SSATmp* ptr) {
2002 acls |= [&] () -> AliasClass {
2003 auto const type = ptr->type() & TMem;
2004 always_assert(type != TBottom);
2006 if (sinst->is(LdMBase)) return sinst->extra<LdMBase>()->acls;
2007 if (sinst->is(LdRDSAddr, LdInitRDSAddr)) {
2008 return ARds { sinst->extra<RDSHandleData>()->handle };
2011 auto const specific = [&] () -> Optional<AliasClass> {
2012 if (type <= TMemToFrame) {
2013 if (sinst->is(LdLocAddr)) {
2014 return AliasClass {
2015 ALocal { sinst->src(0), sinst->extra<LdLocAddr>()->locId }
2018 return ALocalAny;
2021 if (type <= TMemToStk) {
2022 if (sinst->is(LdStkAddr)) {
2023 return AliasClass {
2024 AStack::at(sinst->extra<LdStkAddr>()->offset)
2027 return AStackAny;
2030 if (type <= TMemToProp) {
2031 if (sinst->is(LdPropAddr, LdInitPropAddr)) {
2032 return AliasClass {
2033 AProp {
2034 sinst->src(0),
2035 safe_cast<uint16_t>(sinst->extra<IndexData>()->index)
2039 return APropAny;
2042 auto const elem = [&] () -> AliasClass {
2043 auto const base = sinst->src(0);
2044 auto const key = sinst->src(1);
2045 always_assert(base->isA(TArrLike));
2047 if (key->isA(TInt)) {
2048 if (key->hasConstVal()) return AElemI { base, key->intVal() };
2049 return AElemIAny;
2051 if (key->isA(TStr)) {
2052 assertx(base->isA(TBottom) || !base->isA(TVec));
2053 if (key->hasConstVal()) return AElemS { base, key->strVal() };
2054 return AElemSAny;
2056 return AElemAny;
2059 if (type <= TMemToElem) {
2060 if (sinst->is(LdVecElemAddr, ElemDictK, StructDictElemAddr)) {
2061 return elem();
2063 return AElemAny;
2066 return std::nullopt;
2067 }();
2069 if (specific) {
2070 // A pointer has to point at *something*, so we should not get
2071 // AEmpty here.
2072 assertx(*specific != AEmpty);
2073 // We don't currently ever form pointers to something that's not a
2074 // TypedValue.
2075 assertx(*specific <= AUnknownTV);
2076 return *specific;
2080 * None of the above worked, so try to make the smallest union
2081 * we can based on the pointer type.
2083 return pointee(type);
2084 }();
2085 return true;
2087 visitEveryDefiningInst(tmp, visit);
2088 return acls;
2091 AliasClass pointee(const Type& type) {
2092 assertx(type.maybe(TMem));
2094 auto ret = AEmpty;
2095 if (type.maybe(TMemToStk)) ret = ret | AStackAny;
2096 if (type.maybe(TMemToFrame)) ret = ret | ALocalAny;
2097 if (type.maybe(TMemToProp)) ret = ret | APropAny;
2098 if (type.maybe(TMemToElem)) ret = ret | AElemAny;
2099 if (type.maybe(TMemToMISTemp)) ret = ret | AMIStateTempBase;
2100 if (type.maybe(TMemToClsInit)) ret = ret | AHeapAny;
2101 if (type.maybe(TMemToSProp)) ret = ret | ARdsAny;
2102 if (type.maybe(TMemToGbl)) ret = ret | AOther | ARdsAny;
2103 if (type.maybe(TMemToOther)) ret = ret | AOther | ARdsAny;
2104 if (type.maybe(TMemToConst)) ret = ret | AOther;
2106 // The pointer type should lie completely within the above
2107 // locations.
2108 assertx(type <= (TMemToStk|TMemToFrame|TMemToProp|TMemToElem|
2109 TMemToMISTemp|TMemToClsInit|TMemToSProp|TMemToGbl|
2110 TMemToOther|TMemToConst));
2111 assertx(ret != AEmpty);
2112 assertx(ret <= AUnknownTV);
2113 return ret;
2116 //////////////////////////////////////////////////////////////////////
2118 MemEffects canonicalize(MemEffects me) {
2119 using R = MemEffects;
2120 return match<R>(
2122 [&] (GeneralEffects x) -> R {
2123 return GeneralEffects {
2124 canonicalize(x.loads),
2125 canonicalize(x.stores),
2126 canonicalize(x.moves),
2127 canonicalize(x.kills),
2128 canonicalize(x.inout),
2129 canonicalize(x.backtrace),
2132 [&] (PureLoad x) -> R {
2133 return PureLoad { canonicalize(x.src) };
2135 [&] (PureStore x) -> R {
2136 return PureStore { canonicalize(x.dst), x.value, x.dep };
2138 [&] (ExitEffects x) -> R {
2139 return ExitEffects { canonicalize(x.live), canonicalize(x.kills) };
2141 [&] (PureInlineCall x) -> R {
2142 return PureInlineCall {
2143 canonicalize(x.base),
2144 x.fp,
2145 canonicalize(x.actrec)
2148 [&] (CallEffects x) -> R {
2149 return CallEffects {
2150 canonicalize(x.kills),
2151 canonicalize(x.inputs),
2152 canonicalize(x.actrec),
2153 canonicalize(x.outputs),
2154 canonicalize(x.locals)
2157 [&] (ReturnEffects x) -> R {
2158 return ReturnEffects { canonicalize(x.kills) };
2160 [&] (IrrelevantEffects x) -> R { return x; },
2161 [&] (UnknownEffects x) -> R { return x; }
2165 //////////////////////////////////////////////////////////////////////
2167 std::string show(MemEffects effects) {
2168 using folly::sformat;
2169 return match<std::string>(
2170 effects,
2171 [&] (GeneralEffects x) {
2172 return sformat("mlsmkib({} ; {} ; {} ; {} ; {} ; {})",
2173 show(x.loads),
2174 show(x.stores),
2175 show(x.moves),
2176 show(x.kills),
2177 show(x.inout),
2178 show(x.backtrace)
2181 [&] (ExitEffects x) {
2182 return sformat("exit({} ; {})", show(x.live), show(x.kills));
2184 [&] (PureInlineCall x) {
2185 return sformat("inline_call({} ; {})",
2186 show(x.base),
2187 show(x.actrec)
2190 [&] (CallEffects x) {
2191 return sformat("call({} ; {} ; {} ; {} ; {})",
2192 show(x.kills),
2193 show(x.inputs),
2194 show(x.actrec),
2195 show(x.outputs),
2196 show(x.locals)
2199 [&] (PureLoad x) { return sformat("ld({})", show(x.src)); },
2200 [&] (PureStore x) { return sformat("st({})", show(x.dst)); },
2201 [&] (ReturnEffects x) { return sformat("return({})", show(x.kills)); },
2202 [&] (IrrelevantEffects) { return "IrrelevantEffects"; },
2203 [&] (UnknownEffects) { return "UnknownEffects"; }
2207 //////////////////////////////////////////////////////////////////////
2209 GeneralEffects general_effects_for_vmreg_liveness(
2210 GeneralEffects l, KnownRegState liveness) {
2211 auto ret = GeneralEffects { l.loads, l.stores, l.moves, l.kills, l.inout, l.backtrace };
2213 if (liveness == KnownRegState::Dead) {
2214 ret.loads = l.loads.exclude_vm_reg().value_or(AEmpty);
2215 } else if (liveness == KnownRegState::Live) {
2216 ret.stores = l.stores.exclude_vm_reg().value_or(AEmpty);
2219 return ret;
2222 //////////////////////////////////////////////////////////////////////
2224 bool hasMInstrBaseEffects(const IRInstruction& inst) {
2225 switch (inst.op()) {
2226 case ElemDictD:
2227 case ElemDX:
2228 case BespokeElem:
2229 case ElemDictU:
2230 case ElemUX:
2231 case SetElem:
2232 case UnsetElem:
2233 case SetOpElem:
2234 case IncDecElem:
2235 case SetNewElem:
2236 case SetNewElemVec:
2237 case SetNewElemDict:
2238 case SetNewElemKeyset:
2239 case SetRange:
2240 case SetRangeRev:
2241 return true;
2242 default:
2243 return false;
2247 Optional<Type> mInstrBaseEffects(const IRInstruction& inst, Type old) {
2248 assertx(hasMInstrBaseEffects(inst));
2250 switch (inst.op()) {
2251 case ElemDictD:
2252 case ElemDX:
2253 case SetOpElem:
2254 case IncDecElem:
2255 // Always COWs arrays, leaves strings alone
2256 return old.maybe(TArrLike)
2257 ? make_optional(
2258 ((old & TArrLike).modified() & TCounted) | (old - TArrLike)
2260 : std::nullopt;
2261 case ElemDictU:
2262 case ElemUX:
2263 case UnsetElem:
2264 // Might COW arrays (depending if key is present), leaves strings alone
2265 return old.maybe(TArrLike)
2266 ? make_optional(((old & TArrLike).modified() & TCounted) | old)
2267 : std::nullopt;
2268 case SetElem:
2269 // COWs both arrays and strings
2270 return old.maybe(TArrLike | TStr)
2271 ? make_optional(old.modified() & TCounted)
2272 : std::nullopt;
2273 case SetNewElem:
2274 case SetNewElemVec:
2275 case SetNewElemDict:
2276 case SetNewElemKeyset: {
2277 // Vecs and keysets will always COW. Dicts will COW in almost
2278 // all situations except if the "next key" hits the limit.
2279 if (!old.maybe(TArrLike)) return std::nullopt;
2280 return
2281 ((old & TArrLike).modified() & TCounted) | (old - (TVec | TKeyset));
2283 case SetRange:
2284 case SetRangeRev:
2285 // Always COWs strings
2286 return old.maybe(TStr)
2287 ? make_optional(((old & TStr).modified() & TCounted) | (old - TStr))
2288 : std::nullopt;
2289 case BespokeElem: {
2290 // Behaves like define if S2 is true, unset if false
2291 if (!old.maybe(TArrLike)) return std::nullopt;
2292 auto const t = (old & TArrLike).modified() & TCounted;
2293 return inst.src(2)->boolVal()
2294 ? t | (old - TArrLike)
2295 : t | old;
2297 default:
2298 not_reached();
2302 //////////////////////////////////////////////////////////////////////