Bug 1837620 - Part 1: Remove baseline ICs that guard shapes when the shape becomes...
[gecko.git] / js / src / jit / JitScript.cpp
blobdbdf5381036b464935ef15f320e4d2a9dc909f2e
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #include "jit/JitScript-inl.h"
9 #include "mozilla/BinarySearch.h"
10 #include "mozilla/CheckedInt.h"
12 #include <utility>
14 #include "jit/BaselineIC.h"
15 #include "jit/BaselineJIT.h"
16 #include "jit/BytecodeAnalysis.h"
17 #include "jit/IonScript.h"
18 #include "jit/JitFrames.h"
19 #include "jit/JitSpewer.h"
20 #include "jit/ScriptFromCalleeToken.h"
21 #include "jit/TrialInlining.h"
22 #include "vm/BytecodeUtil.h"
23 #include "vm/Compartment.h"
24 #include "vm/FrameIter.h" // js::OnlyJSJitFrameIter
25 #include "vm/JitActivation.h"
26 #include "vm/JSScript.h"
28 #include "gc/GCContext-inl.h"
29 #include "jit/JSJitFrameIter-inl.h"
30 #include "vm/JSContext-inl.h"
31 #include "vm/JSScript-inl.h"
33 using namespace js;
34 using namespace js::jit;
36 using mozilla::CheckedInt;
38 JitScript::JitScript(JSScript* script, Offset fallbackStubsOffset,
39 Offset endOffset, const char* profileString)
40 : profileString_(profileString),
41 endOffset_(endOffset),
42 icScript_(script->getWarmUpCount(),
43 fallbackStubsOffset - offsetOfICScript(),
44 endOffset - offsetOfICScript(),
45 /*depth=*/0) {
46 // Ensure the baselineScript_ and ionScript_ fields match the BaselineDisabled
47 // and IonDisabled script flags.
48 if (!script->canBaselineCompile()) {
49 setBaselineScriptImpl(script, BaselineDisabledScriptPtr);
51 if (!script->canIonCompile()) {
52 setIonScriptImpl(script, IonDisabledScriptPtr);
56 #ifdef DEBUG
57 JitScript::~JitScript() {
58 // The contents of the stub space are removed and freed separately after the
59 // next minor GC. See prepareForDestruction.
60 MOZ_ASSERT(jitScriptStubSpace_.isEmpty());
62 // BaselineScript and IonScript must have been destroyed at this point.
63 MOZ_ASSERT(!hasBaselineScript());
64 MOZ_ASSERT(!hasIonScript());
66 #else
67 JitScript::~JitScript() = default;
68 #endif
70 bool JSScript::createJitScript(JSContext* cx) {
71 MOZ_ASSERT(!hasJitScript());
72 cx->check(this);
74 // Scripts with a JitScript can run in the Baseline Interpreter. Make sure
75 // we don't create a JitScript for scripts we shouldn't Baseline interpret.
76 MOZ_ASSERT_IF(IsBaselineInterpreterEnabled(),
77 CanBaselineInterpretScript(this));
79 // Store the profile string in the JitScript if the profiler is enabled.
80 const char* profileString = nullptr;
81 if (cx->runtime()->geckoProfiler().enabled()) {
82 profileString = cx->runtime()->geckoProfiler().profileString(cx, this);
83 if (!profileString) {
84 return false;
88 static_assert(sizeof(JitScript) % sizeof(uintptr_t) == 0,
89 "Trailing arrays must be aligned properly");
90 static_assert(sizeof(ICEntry) % sizeof(uintptr_t) == 0,
91 "Trailing arrays must be aligned properly");
93 static_assert(
94 sizeof(JitScript) == offsetof(JitScript, icScript_) + sizeof(ICScript),
95 "icScript_ must be the last field");
97 // Calculate allocation size.
98 CheckedInt<uint32_t> allocSize = sizeof(JitScript);
99 allocSize += CheckedInt<uint32_t>(numICEntries()) * sizeof(ICEntry);
100 allocSize += CheckedInt<uint32_t>(numICEntries()) * sizeof(ICFallbackStub);
101 if (!allocSize.isValid()) {
102 ReportAllocationOverflow(cx);
103 return false;
106 void* raw = cx->pod_malloc<uint8_t>(allocSize.value());
107 MOZ_ASSERT(uintptr_t(raw) % alignof(JitScript) == 0);
108 if (!raw) {
109 return false;
112 size_t fallbackStubsOffset =
113 sizeof(JitScript) + numICEntries() * sizeof(ICEntry);
115 UniquePtr<JitScript> jitScript(new (raw) JitScript(
116 this, fallbackStubsOffset, allocSize.value(), profileString));
118 // Sanity check the length computation.
119 MOZ_ASSERT(jitScript->numICEntries() == numICEntries());
121 jitScript->icScript()->initICEntries(cx, this);
123 warmUpData_.initJitScript(jitScript.release());
124 AddCellMemory(this, allocSize.value(), MemoryUse::JitScript);
126 // We have a JitScript so we can set the script's jitCodeRaw pointer to the
127 // Baseline Interpreter code.
128 updateJitCodeRaw(cx->runtime());
130 return true;
133 void JSScript::maybeReleaseJitScript(JS::GCContext* gcx) {
134 MOZ_ASSERT(hasJitScript());
136 if (zone()->jitZone()->keepJitScripts() || jitScript()->hasBaselineScript() ||
137 jitScript()->active()) {
138 return;
141 releaseJitScript(gcx);
144 void JSScript::releaseJitScript(JS::GCContext* gcx) {
145 MOZ_ASSERT(hasJitScript());
146 MOZ_ASSERT(!hasBaselineScript());
147 MOZ_ASSERT(!hasIonScript());
149 gcx->removeCellMemory(this, jitScript()->allocBytes(), MemoryUse::JitScript);
151 JitScript::Destroy(zone(), jitScript());
152 warmUpData_.clearJitScript();
153 updateJitCodeRaw(gcx->runtime());
156 void JSScript::releaseJitScriptOnFinalize(JS::GCContext* gcx) {
157 MOZ_ASSERT(hasJitScript());
159 if (hasIonScript()) {
160 IonScript* ion = jitScript()->clearIonScript(gcx, this);
161 jit::IonScript::Destroy(gcx, ion);
164 if (hasBaselineScript()) {
165 BaselineScript* baseline = jitScript()->clearBaselineScript(gcx, this);
166 jit::BaselineScript::Destroy(gcx, baseline);
169 releaseJitScript(gcx);
172 void JitScript::trace(JSTracer* trc) {
173 icScript_.trace(trc);
175 if (hasBaselineScript()) {
176 baselineScript()->trace(trc);
179 if (hasIonScript()) {
180 ionScript()->trace(trc);
183 if (templateEnv_.isSome()) {
184 TraceNullableEdge(trc, templateEnv_.ptr(), "jitscript-template-env");
187 if (hasInliningRoot()) {
188 inliningRoot()->trace(trc);
192 void JitScript::traceWeak(JSTracer* trc) {
193 if (!icScript_.traceWeak(trc)) {
194 #ifdef DEBUG
195 hasPurgedStubs_ = true;
196 #endif
199 if (hasInliningRoot()) {
200 inliningRoot()->traceWeak(trc);
203 if (hasIonScript()) {
204 ionScript()->traceWeak(trc);
208 void ICScript::trace(JSTracer* trc) {
209 // Mark all IC stub codes hanging off the IC stub entries.
210 for (size_t i = 0; i < numICEntries(); i++) {
211 ICEntry& ent = icEntry(i);
212 ent.trace(trc);
216 bool ICScript::traceWeak(JSTracer* trc) {
217 // Mark all IC stub codes hanging off the IC stub entries.
218 bool allSurvived = true;
219 for (size_t i = 0; i < numICEntries(); i++) {
220 ICEntry& ent = icEntry(i);
221 if (!ent.traceWeak(trc)) {
222 allSurvived = false;
226 return allSurvived;
229 bool ICScript::addInlinedChild(JSContext* cx, UniquePtr<ICScript> child,
230 uint32_t pcOffset) {
231 MOZ_ASSERT(!hasInlinedChild(pcOffset));
233 if (!inlinedChildren_) {
234 inlinedChildren_ = cx->make_unique<Vector<CallSite>>(cx);
235 if (!inlinedChildren_) {
236 return false;
240 // First reserve space in inlinedChildren_ to ensure that if the ICScript is
241 // added to the inlining root, it can also be added to inlinedChildren_.
242 CallSite callsite(child.get(), pcOffset);
243 if (!inlinedChildren_->reserve(inlinedChildren_->length() + 1)) {
244 return false;
246 if (!inliningRoot()->addInlinedScript(std::move(child))) {
247 return false;
249 inlinedChildren_->infallibleAppend(callsite);
250 return true;
253 ICScript* ICScript::findInlinedChild(uint32_t pcOffset) {
254 for (auto& callsite : *inlinedChildren_) {
255 if (callsite.pcOffset_ == pcOffset) {
256 return callsite.callee_;
259 MOZ_CRASH("Inlined child expected at pcOffset");
262 void ICScript::removeInlinedChild(uint32_t pcOffset) {
263 MOZ_ASSERT(inliningRoot());
264 inlinedChildren_->eraseIf([pcOffset](const CallSite& callsite) -> bool {
265 return callsite.pcOffset_ == pcOffset;
269 bool ICScript::hasInlinedChild(uint32_t pcOffset) {
270 if (!inlinedChildren_) {
271 return false;
273 for (auto& callsite : *inlinedChildren_) {
274 if (callsite.pcOffset_ == pcOffset) {
275 return true;
278 return false;
281 void JitScript::resetWarmUpCount(uint32_t count) {
282 icScript_.resetWarmUpCount(count);
283 if (hasInliningRoot()) {
284 inliningRoot()->resetWarmUpCounts(count);
288 void JitScript::ensureProfileString(JSContext* cx, JSScript* script) {
289 MOZ_ASSERT(cx->runtime()->geckoProfiler().enabled());
291 if (profileString_) {
292 return;
295 AutoEnterOOMUnsafeRegion oomUnsafe;
296 profileString_ = cx->runtime()->geckoProfiler().profileString(cx, script);
297 if (!profileString_) {
298 oomUnsafe.crash("Failed to allocate profile string");
302 /* static */
303 void JitScript::Destroy(Zone* zone, JitScript* script) {
304 script->prepareForDestruction(zone);
306 js_delete(script);
309 void JitScript::prepareForDestruction(Zone* zone) {
310 // When the script contains pointers to nursery things, the store buffer can
311 // contain entries that point into the fallback stub space. Since we can
312 // destroy scripts outside the context of a GC, this situation could result
313 // in us trying to mark invalid store buffer entries.
315 // Defer freeing any allocated blocks until after the next minor GC.
316 jitScriptStubSpace_.freeAllAfterMinorGC(zone);
318 // Trigger write barriers.
319 baselineScript_.set(zone, nullptr);
320 ionScript_.set(zone, nullptr);
323 struct FallbackStubs {
324 ICScript* const icScript_;
326 explicit FallbackStubs(ICScript* icScript) : icScript_(icScript) {}
328 size_t numEntries() const { return icScript_->numICEntries(); }
329 ICFallbackStub* operator[](size_t index) const {
330 return icScript_->fallbackStub(index);
334 static bool ComputeBinarySearchMid(FallbackStubs stubs, uint32_t pcOffset,
335 size_t* loc) {
336 return mozilla::BinarySearchIf(
337 stubs, 0, stubs.numEntries(),
338 [pcOffset](const ICFallbackStub* stub) {
339 if (pcOffset < stub->pcOffset()) {
340 return -1;
342 if (stub->pcOffset() < pcOffset) {
343 return 1;
345 return 0;
347 loc);
350 ICEntry& ICScript::icEntryFromPCOffset(uint32_t pcOffset) {
351 size_t mid;
352 MOZ_ALWAYS_TRUE(ComputeBinarySearchMid(FallbackStubs(this), pcOffset, &mid));
354 MOZ_ASSERT(mid < numICEntries());
356 ICEntry& entry = icEntry(mid);
357 MOZ_ASSERT(fallbackStubForICEntry(&entry)->pcOffset() == pcOffset);
358 return entry;
361 ICEntry* ICScript::interpreterICEntryFromPCOffset(uint32_t pcOffset) {
362 // We have to return the entry to store in BaselineFrame::interpreterICEntry
363 // when resuming in the Baseline Interpreter at pcOffset. The bytecode op at
364 // pcOffset does not necessarily have an ICEntry, so we want to return the
365 // first ICEntry for which the following is true:
367 // entry.pcOffset() >= pcOffset
369 // Fortunately, ComputeBinarySearchMid returns exactly this entry.
371 size_t mid;
372 ComputeBinarySearchMid(FallbackStubs(this), pcOffset, &mid);
374 if (mid < numICEntries()) {
375 ICEntry& entry = icEntry(mid);
376 MOZ_ASSERT(fallbackStubForICEntry(&entry)->pcOffset() >= pcOffset);
377 return &entry;
380 // Resuming at a pc after the last ICEntry. Just return nullptr:
381 // BaselineFrame::interpreterICEntry will never be used in this case.
382 return nullptr;
385 void JitScript::purgeOptimizedStubs(JSScript* script) {
386 MOZ_ASSERT(script->jitScript() == this);
388 Zone* zone = script->zone();
389 if (IsAboutToBeFinalizedUnbarriered(script)) {
390 // We're sweeping and the script is dead. Don't purge optimized stubs
391 // because (1) accessing CacheIRStubInfo pointers in ICStubs is invalid
392 // because we may have swept them already when we started (incremental)
393 // sweeping and (2) it's unnecessary because this script will be finalized
394 // soon anyway.
395 return;
398 JitSpew(JitSpew_BaselineIC, "Purging optimized stubs");
400 icScript()->purgeOptimizedStubs(zone);
401 if (hasInliningRoot()) {
402 inliningRoot()->purgeOptimizedStubs(zone);
404 #ifdef DEBUG
405 failedICHash_.reset();
406 hasPurgedStubs_ = true;
407 #endif
410 void ICScript::purgeOptimizedStubs(Zone* zone) {
411 for (size_t i = 0; i < numICEntries(); i++) {
412 ICEntry& entry = icEntry(i);
413 ICStub* lastStub = entry.firstStub();
414 while (!lastStub->isFallback()) {
415 lastStub = lastStub->toCacheIRStub()->next();
418 // Unlink all stubs allocated in the optimized space.
419 ICStub* stub = entry.firstStub();
420 ICCacheIRStub* prev = nullptr;
422 while (stub != lastStub) {
423 if (!stub->toCacheIRStub()->allocatedInFallbackSpace()) {
424 lastStub->toFallbackStub()->unlinkStub(zone, &entry, prev,
425 stub->toCacheIRStub());
426 stub = stub->toCacheIRStub()->next();
427 continue;
430 prev = stub->toCacheIRStub();
431 stub = stub->toCacheIRStub()->next();
434 lastStub->toFallbackStub()->clearHasFoldedStub();
437 #ifdef DEBUG
438 // All remaining stubs must be allocated in the fallback space.
439 for (size_t i = 0; i < numICEntries(); i++) {
440 ICEntry& entry = icEntry(i);
441 ICStub* stub = entry.firstStub();
442 while (!stub->isFallback()) {
443 MOZ_ASSERT(stub->toCacheIRStub()->allocatedInFallbackSpace());
444 stub = stub->toCacheIRStub()->next();
447 #endif
450 bool JitScript::ensureHasCachedBaselineJitData(JSContext* cx,
451 HandleScript script) {
452 if (templateEnv_.isSome()) {
453 return true;
456 if (!script->function() ||
457 !script->function()->needsFunctionEnvironmentObjects()) {
458 templateEnv_.emplace();
459 return true;
462 Rooted<EnvironmentObject*> templateEnv(cx);
463 Rooted<JSFunction*> fun(cx, script->function());
465 if (fun->needsNamedLambdaEnvironment()) {
466 templateEnv = NamedLambdaObject::createTemplateObject(cx, fun);
467 if (!templateEnv) {
468 return false;
472 if (fun->needsCallObject()) {
473 templateEnv = CallObject::createTemplateObject(cx, script, templateEnv);
474 if (!templateEnv) {
475 return false;
479 templateEnv_.emplace(templateEnv);
480 return true;
483 bool JitScript::ensureHasCachedIonData(JSContext* cx, HandleScript script) {
484 MOZ_ASSERT(script->jitScript() == this);
486 if (usesEnvironmentChain_.isSome()) {
487 return true;
490 if (!ensureHasCachedBaselineJitData(cx, script)) {
491 return false;
494 usesEnvironmentChain_.emplace(ScriptUsesEnvironmentChain(script));
495 return true;
498 void JitScript::setBaselineScriptImpl(JSScript* script,
499 BaselineScript* baselineScript) {
500 JSRuntime* rt = script->runtimeFromMainThread();
501 setBaselineScriptImpl(rt->gcContext(), script, baselineScript);
504 void JitScript::setBaselineScriptImpl(JS::GCContext* gcx, JSScript* script,
505 BaselineScript* baselineScript) {
506 if (hasBaselineScript()) {
507 gcx->removeCellMemory(script, baselineScript_->allocBytes(),
508 MemoryUse::BaselineScript);
509 baselineScript_.set(script->zone(), nullptr);
512 MOZ_ASSERT(ionScript_ == nullptr || ionScript_ == IonDisabledScriptPtr);
514 baselineScript_.set(script->zone(), baselineScript);
515 if (hasBaselineScript()) {
516 AddCellMemory(script, baselineScript_->allocBytes(),
517 MemoryUse::BaselineScript);
520 script->resetWarmUpResetCounter();
521 script->updateJitCodeRaw(gcx->runtime());
524 void JitScript::setIonScriptImpl(JSScript* script, IonScript* ionScript) {
525 JSRuntime* rt = script->runtimeFromMainThread();
526 setIonScriptImpl(rt->gcContext(), script, ionScript);
529 void JitScript::setIonScriptImpl(JS::GCContext* gcx, JSScript* script,
530 IonScript* ionScript) {
531 MOZ_ASSERT_IF(ionScript != IonDisabledScriptPtr,
532 !baselineScript()->hasPendingIonCompileTask());
534 JS::Zone* zone = script->zone();
535 if (hasIonScript()) {
536 gcx->removeCellMemory(script, ionScript_->allocBytes(),
537 MemoryUse::IonScript);
538 ionScript_.set(zone, nullptr);
541 ionScript_.set(zone, ionScript);
542 MOZ_ASSERT_IF(hasIonScript(), hasBaselineScript());
543 if (hasIonScript()) {
544 AddCellMemory(script, ionScript_->allocBytes(), MemoryUse::IonScript);
547 script->updateJitCodeRaw(gcx->runtime());
550 #ifdef JS_STRUCTURED_SPEW
551 static bool HasEnteredCounters(ICEntry& entry) {
552 ICStub* stub = entry.firstStub();
553 if (stub && !stub->isFallback()) {
554 return true;
556 return false;
559 void jit::JitSpewBaselineICStats(JSScript* script, const char* dumpReason) {
560 MOZ_ASSERT(script->hasJitScript());
561 JSContext* cx = TlsContext.get();
562 AutoStructuredSpewer spew(cx, SpewChannel::BaselineICStats, script);
563 if (!spew) {
564 return;
567 JitScript* jitScript = script->jitScript();
568 spew->property("reason", dumpReason);
569 spew->beginListProperty("entries");
570 for (size_t i = 0; i < jitScript->numICEntries(); i++) {
571 ICEntry& entry = jitScript->icEntry(i);
572 ICFallbackStub* fallback = jitScript->fallbackStub(i);
573 if (!HasEnteredCounters(entry)) {
574 continue;
577 uint32_t pcOffset = fallback->pcOffset();
578 jsbytecode* pc = script->offsetToPC(pcOffset);
580 unsigned column;
581 unsigned int line = PCToLineNumber(script, pc, &column);
583 spew->beginObject();
584 spew->property("op", CodeName(JSOp(*pc)));
585 spew->property("pc", pcOffset);
586 spew->property("line", line);
587 spew->property("column", column);
589 spew->beginListProperty("counts");
590 ICStub* stub = entry.firstStub();
591 while (stub && !stub->isFallback()) {
592 uint32_t count = stub->enteredCount();
593 spew->value(count);
594 stub = stub->toCacheIRStub()->next();
596 spew->endList();
597 spew->property("fallback_count", fallback->enteredCount());
598 spew->endObject();
600 spew->endList();
602 #endif
604 static void MarkActiveJitScripts(JSContext* cx,
605 const JitActivationIterator& activation) {
606 for (OnlyJSJitFrameIter iter(activation); !iter.done(); ++iter) {
607 const JSJitFrameIter& frame = iter.frame();
608 switch (frame.type()) {
609 case FrameType::BaselineJS:
610 frame.script()->jitScript()->setActive();
611 break;
612 case FrameType::Exit:
613 if (frame.exitFrame()->is<LazyLinkExitFrameLayout>()) {
614 LazyLinkExitFrameLayout* ll =
615 frame.exitFrame()->as<LazyLinkExitFrameLayout>();
616 JSScript* script =
617 ScriptFromCalleeToken(ll->jsFrame()->calleeToken());
618 script->jitScript()->setActive();
620 break;
621 case FrameType::Bailout:
622 case FrameType::IonJS: {
623 // Keep the JitScript and BaselineScript around, since bailouts from
624 // the ion jitcode need to re-enter into the Baseline code.
625 frame.script()->jitScript()->setActive();
626 for (InlineFrameIterator inlineIter(cx, &frame); inlineIter.more();
627 ++inlineIter) {
628 inlineIter.script()->jitScript()->setActive();
630 break;
632 default:;
637 void jit::MarkActiveJitScripts(Zone* zone) {
638 if (zone->isAtomsZone()) {
639 return;
641 JSContext* cx = TlsContext.get();
642 for (JitActivationIterator iter(cx); !iter.done(); ++iter) {
643 if (iter->compartment()->zone() == zone) {
644 MarkActiveJitScripts(cx, iter);
649 InliningRoot* JitScript::getOrCreateInliningRoot(JSContext* cx,
650 JSScript* script) {
651 if (!inliningRoot_) {
652 inliningRoot_ = js::MakeUnique<InliningRoot>(cx, script);
653 if (!inliningRoot_) {
654 ReportOutOfMemory(cx);
655 return nullptr;
657 icScript_.inliningRoot_ = inliningRoot_.get();
659 return inliningRoot_.get();
662 gc::AllocSite* JitScript::createAllocSite(JSScript* script) {
663 MOZ_ASSERT(script->jitScript() == this);
665 Nursery& nursery = script->runtimeFromMainThread()->gc.nursery();
666 if (!nursery.canCreateAllocSite()) {
667 // Don't block attaching an optimized stub, but don't process allocations
668 // for this site.
669 return script->zone()->unknownAllocSite(JS::TraceKind::Object);
672 if (!allocSites_.reserve(allocSites_.length() + 1)) {
673 return nullptr;
676 ICStubSpace* stubSpace = jitScriptStubSpace();
677 auto* site =
678 static_cast<gc::AllocSite*>(stubSpace->alloc(sizeof(gc::AllocSite)));
679 if (!site) {
680 return nullptr;
683 new (site) gc::AllocSite(script->zone(), script, JS::TraceKind::Object);
685 allocSites_.infallibleAppend(site);
687 nursery.noteAllocSiteCreated();
689 return site;
692 bool JitScript::resetAllocSites(bool resetNurserySites,
693 bool resetPretenuredSites) {
694 MOZ_ASSERT(resetNurserySites || resetPretenuredSites);
696 bool anyReset = false;
698 for (gc::AllocSite* site : allocSites_) {
699 if ((resetNurserySites && site->initialHeap() == gc::Heap::Default) ||
700 (resetPretenuredSites && site->initialHeap() == gc::Heap::Tenured)) {
701 if (site->maybeResetState()) {
702 anyReset = true;
707 return anyReset;
710 JitScriptICStubSpace* ICScript::jitScriptStubSpace() {
711 if (isInlined()) {
712 return inliningRoot_->jitScriptStubSpace();
714 return outerJitScript()->jitScriptStubSpace();
717 JitScript* ICScript::outerJitScript() {
718 MOZ_ASSERT(!isInlined());
719 uint8_t* ptr = reinterpret_cast<uint8_t*>(this);
720 return reinterpret_cast<JitScript*>(ptr - JitScript::offsetOfICScript());
723 #ifdef DEBUG
724 // This hash is used to verify that we do not recompile after a
725 // TranspiledCacheIR invalidation with the exact same ICs.
727 // It should change iff an ICEntry in this ICScript (or an ICScript
728 // inlined into this ICScript) is modified such that we will make a
729 // different decision in WarpScriptOracle::maybeInlineIC. This means:
731 // 1. The hash will change if we attach a new stub.
732 // 2. The hash will change if the entered count of any CacheIR stub
733 // other than the first changes from 0.
734 // 3. The hash will change if the entered count of the fallback stub
735 // changes from 0.
736 // 4. The hash will change if the failure count of the fallback stub
737 // changes from 0.
738 HashNumber ICScript::hash() {
739 HashNumber h = 0;
740 for (size_t i = 0; i < numICEntries(); i++) {
741 ICStub* stub = icEntry(i).firstStub();
743 // Hash the address of the first stub.
744 h = mozilla::AddToHash(h, stub);
746 // Hash whether subsequent stubs have entry count 0.
747 if (!stub->isFallback()) {
748 stub = stub->toCacheIRStub()->next();
749 while (!stub->isFallback()) {
750 h = mozilla::AddToHash(h, stub->enteredCount() == 0);
751 stub = stub->toCacheIRStub()->next();
755 // Hash whether the fallback has entry count 0 and failure count 0.
756 MOZ_ASSERT(stub->isFallback());
757 h = mozilla::AddToHash(h, stub->enteredCount() == 0);
758 h = mozilla::AddToHash(h, stub->toFallbackStub()->state().hasFailures());
761 return h;
763 #endif