Bug 1867190 - Add prefs for PHC probablities r=glandium
[gecko.git] / js / src / jit / CacheIRWriter.h
blob42f038647d9b864879a11072166eb77ecdb2022b
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #ifndef jit_CacheIRWriter_h
8 #define jit_CacheIRWriter_h
10 #include "mozilla/Assertions.h"
11 #include "mozilla/Attributes.h"
12 #include "mozilla/Casting.h"
13 #include "mozilla/Maybe.h"
15 #include <stddef.h>
16 #include <stdint.h>
18 #include "jstypes.h"
19 #include "NamespaceImports.h"
21 #include "gc/AllocKind.h"
22 #include "jit/ABIFunctions.h"
23 #include "jit/CacheIR.h"
24 #include "jit/CacheIROpsGenerated.h"
25 #include "jit/CompactBuffer.h"
26 #include "jit/ICState.h"
27 #include "jit/Simulator.h"
28 #include "jit/TypeData.h"
29 #include "js/AllocPolicy.h"
30 #include "js/CallArgs.h"
31 #include "js/Class.h"
32 #include "js/experimental/JitInfo.h"
33 #include "js/Id.h"
34 #include "js/RootingAPI.h"
35 #include "js/ScalarType.h"
36 #include "js/Value.h"
37 #include "js/Vector.h"
38 #include "util/Memory.h"
39 #include "vm/GuardFuse.h"
40 #include "vm/JSFunction.h"
41 #include "vm/JSScript.h"
42 #include "vm/List.h"
43 #include "vm/Opcodes.h"
44 #include "vm/RealmFuses.h"
45 #include "vm/Shape.h"
46 #include "wasm/WasmConstants.h"
47 #include "wasm/WasmValType.h"
49 class JS_PUBLIC_API JSTracer;
50 struct JS_PUBLIC_API JSContext;
52 class JSObject;
53 class JSString;
55 namespace JS {
56 class Symbol;
59 namespace js {
61 class GetterSetter;
62 enum class UnaryMathFunction : uint8_t;
64 namespace gc {
65 class AllocSite;
68 namespace jit {
70 class ICScript;
72 // Class to record CacheIR + some additional metadata for code generation.
73 class MOZ_RAII CacheIRWriter : public JS::CustomAutoRooter {
74 #ifdef DEBUG
75 JSContext* cx_;
76 #endif
77 CompactBufferWriter buffer_;
79 uint32_t nextOperandId_;
80 uint32_t nextInstructionId_;
81 uint32_t numInputOperands_;
83 TypeData typeData_;
85 // The data (shapes, slot offsets, etc.) that will be stored in the ICStub.
86 Vector<StubField, 8, SystemAllocPolicy> stubFields_;
87 size_t stubDataSize_;
89 // For each operand id, record which instruction accessed it last. This
90 // information greatly improves register allocation.
91 Vector<uint32_t, 8, SystemAllocPolicy> operandLastUsed_;
93 // OperandId and stub offsets are stored in a single byte, so make sure
94 // this doesn't overflow. We use a very conservative limit for now.
95 static const size_t MaxOperandIds = 20;
96 static const size_t MaxStubDataSizeInBytes = 20 * sizeof(uintptr_t);
97 bool tooLarge_;
99 // Assume this stub can't be trial inlined until we see a scripted call/inline
100 // instruction.
101 TrialInliningState trialInliningState_ = TrialInliningState::Failure;
103 // Basic caching to avoid quadatic lookup behaviour in readStubField.
104 mutable uint32_t lastOffset_;
105 mutable uint32_t lastIndex_;
107 #ifdef DEBUG
108 // Information for assertLengthMatches.
109 mozilla::Maybe<CacheOp> currentOp_;
110 size_t currentOpArgsStart_ = 0;
111 #endif
113 #ifdef DEBUG
114 void assertSameCompartment(JSObject* obj);
115 void assertSameZone(Shape* shape);
116 #else
117 void assertSameCompartment(JSObject* obj) {}
118 void assertSameZone(Shape* shape) {}
119 #endif
121 void writeOp(CacheOp op) {
122 buffer_.writeFixedUint16_t(uint16_t(op));
123 nextInstructionId_++;
124 #ifdef DEBUG
125 MOZ_ASSERT(currentOp_.isNothing(), "Missing call to assertLengthMatches?");
126 currentOp_.emplace(op);
127 currentOpArgsStart_ = buffer_.length();
128 #endif
131 void assertLengthMatches() {
132 #ifdef DEBUG
133 // After writing arguments, assert the length matches CacheIROpArgLengths.
134 size_t expectedLen = CacheIROpInfos[size_t(*currentOp_)].argLength;
135 MOZ_ASSERT_IF(!failed(),
136 buffer_.length() - currentOpArgsStart_ == expectedLen);
137 currentOp_.reset();
138 #endif
141 void writeOperandId(OperandId opId) {
142 if (opId.id() < MaxOperandIds) {
143 static_assert(MaxOperandIds <= UINT8_MAX,
144 "operand id must fit in a single byte");
145 buffer_.writeByte(opId.id());
146 } else {
147 tooLarge_ = true;
148 return;
150 if (opId.id() >= operandLastUsed_.length()) {
151 buffer_.propagateOOM(operandLastUsed_.resize(opId.id() + 1));
152 if (buffer_.oom()) {
153 return;
156 MOZ_ASSERT(nextInstructionId_ > 0);
157 operandLastUsed_[opId.id()] = nextInstructionId_ - 1;
160 void writeCallFlagsImm(CallFlags flags) { buffer_.writeByte(flags.toByte()); }
162 void addStubField(uint64_t value, StubField::Type fieldType) {
163 size_t fieldOffset = stubDataSize_;
164 #ifndef JS_64BIT
165 // On 32-bit platforms there are two stub field sizes (4 bytes and 8 bytes).
166 // Ensure 8-byte fields are properly aligned.
167 if (StubField::sizeIsInt64(fieldType)) {
168 fieldOffset = AlignBytes(fieldOffset, sizeof(uint64_t));
170 #endif
171 MOZ_ASSERT((fieldOffset % StubField::sizeInBytes(fieldType)) == 0);
173 size_t newStubDataSize = fieldOffset + StubField::sizeInBytes(fieldType);
174 if (newStubDataSize < MaxStubDataSizeInBytes) {
175 #ifndef JS_64BIT
176 // Add a RawInt32 stub field for padding if necessary, because when we
177 // iterate over the stub fields we assume there are no 'holes'.
178 if (fieldOffset != stubDataSize_) {
179 MOZ_ASSERT((stubDataSize_ + sizeof(uintptr_t)) == fieldOffset);
180 buffer_.propagateOOM(
181 stubFields_.append(StubField(0, StubField::Type::RawInt32)));
183 #endif
184 buffer_.propagateOOM(stubFields_.append(StubField(value, fieldType)));
185 MOZ_ASSERT((fieldOffset % sizeof(uintptr_t)) == 0);
186 buffer_.writeByte(fieldOffset / sizeof(uintptr_t));
187 stubDataSize_ = newStubDataSize;
188 } else {
189 tooLarge_ = true;
193 void writeShapeField(Shape* shape) {
194 MOZ_ASSERT(shape);
195 assertSameZone(shape);
196 addStubField(uintptr_t(shape), StubField::Type::Shape);
198 void writeWeakShapeField(Shape* shape) {
199 MOZ_ASSERT(shape);
200 assertSameZone(shape);
201 addStubField(uintptr_t(shape), StubField::Type::WeakShape);
203 void writeWeakGetterSetterField(GetterSetter* gs) {
204 MOZ_ASSERT(gs);
205 addStubField(uintptr_t(gs), StubField::Type::WeakGetterSetter);
207 void writeObjectField(JSObject* obj) {
208 MOZ_ASSERT(obj);
209 assertSameCompartment(obj);
210 addStubField(uintptr_t(obj), StubField::Type::JSObject);
212 void writeWeakObjectField(JSObject* obj) {
213 MOZ_ASSERT(obj);
214 assertSameCompartment(obj);
215 addStubField(uintptr_t(obj), StubField::Type::WeakObject);
217 void writeStringField(JSString* str) {
218 MOZ_ASSERT(str);
219 addStubField(uintptr_t(str), StubField::Type::String);
221 void writeSymbolField(JS::Symbol* sym) {
222 MOZ_ASSERT(sym);
223 addStubField(uintptr_t(sym), StubField::Type::Symbol);
225 void writeWeakBaseScriptField(BaseScript* script) {
226 MOZ_ASSERT(script);
227 addStubField(uintptr_t(script), StubField::Type::WeakBaseScript);
229 void writeJitCodeField(JitCode* code) {
230 MOZ_ASSERT(code);
231 addStubField(uintptr_t(code), StubField::Type::JitCode);
233 void writeRawInt32Field(uint32_t val) {
234 addStubField(val, StubField::Type::RawInt32);
236 void writeRawPointerField(const void* ptr) {
237 addStubField(uintptr_t(ptr), StubField::Type::RawPointer);
239 void writeIdField(jsid id) {
240 addStubField(id.asRawBits(), StubField::Type::Id);
242 void writeValueField(const Value& val) {
243 addStubField(val.asRawBits(), StubField::Type::Value);
245 void writeRawInt64Field(uint64_t val) {
246 addStubField(val, StubField::Type::RawInt64);
248 void writeDoubleField(double d) {
249 uint64_t bits = mozilla::BitwiseCast<uint64_t>(d);
250 addStubField(bits, StubField::Type::Double);
252 void writeAllocSiteField(gc::AllocSite* ptr) {
253 addStubField(uintptr_t(ptr), StubField::Type::AllocSite);
256 void writeJSOpImm(JSOp op) {
257 static_assert(sizeof(JSOp) == sizeof(uint8_t), "JSOp must fit in a byte");
258 buffer_.writeByte(uint8_t(op));
260 void writeGuardClassKindImm(GuardClassKind kind) {
261 static_assert(sizeof(GuardClassKind) == sizeof(uint8_t),
262 "GuardClassKind must fit in a byte");
263 buffer_.writeByte(uint8_t(kind));
265 void writeValueTypeImm(ValueType type) {
266 static_assert(sizeof(ValueType) == sizeof(uint8_t),
267 "ValueType must fit in uint8_t");
268 buffer_.writeByte(uint8_t(type));
270 void writeJSWhyMagicImm(JSWhyMagic whyMagic) {
271 static_assert(JS_WHY_MAGIC_COUNT <= UINT8_MAX,
272 "JSWhyMagic must fit in uint8_t");
273 buffer_.writeByte(uint8_t(whyMagic));
275 void writeScalarTypeImm(Scalar::Type type) {
276 MOZ_ASSERT(size_t(type) <= UINT8_MAX);
277 buffer_.writeByte(uint8_t(type));
279 void writeUnaryMathFunctionImm(UnaryMathFunction fun) {
280 static_assert(sizeof(UnaryMathFunction) == sizeof(uint8_t),
281 "UnaryMathFunction must fit in a byte");
282 buffer_.writeByte(uint8_t(fun));
284 void writeCompletionKindImm(CompletionKind kind) {
285 static_assert(sizeof(CompletionKind) == sizeof(uint8_t),
286 "CompletionKind must fit in a byte");
287 buffer_.writeByte(uint8_t(kind));
289 void writeBoolImm(bool b) { buffer_.writeByte(uint32_t(b)); }
290 void writeRealmFuseIndexImm(RealmFuses::FuseIndex realmFuseIndex) {
291 static_assert(sizeof(RealmFuses::FuseIndex) == sizeof(uint8_t),
292 "RealmFuses::FuseIndex must fit in a byte");
293 buffer_.writeByte(uint8_t(realmFuseIndex));
296 void writeByteImm(uint32_t b) {
297 MOZ_ASSERT(b <= UINT8_MAX);
298 buffer_.writeByte(b);
301 void writeInt32Imm(int32_t i32) { buffer_.writeFixedUint32_t(i32); }
302 void writeUInt32Imm(uint32_t u32) { buffer_.writeFixedUint32_t(u32); }
303 void writePointer(const void* ptr) { buffer_.writeRawPointer(ptr); }
305 void writeJSNativeImm(JSNative native) {
306 writePointer(JS_FUNC_TO_DATA_PTR(void*, native));
308 void writeStaticStringImm(const char* str) { writePointer(str); }
310 void writeWasmValTypeImm(wasm::ValType::Kind kind) {
311 static_assert(unsigned(wasm::TypeCode::Limit) <= UINT8_MAX);
312 buffer_.writeByte(uint8_t(kind));
315 void writeAllocKindImm(gc::AllocKind kind) {
316 static_assert(unsigned(gc::AllocKind::LIMIT) <= UINT8_MAX);
317 buffer_.writeByte(uint8_t(kind));
320 uint32_t newOperandId() { return nextOperandId_++; }
322 CacheIRWriter(const CacheIRWriter&) = delete;
323 CacheIRWriter& operator=(const CacheIRWriter&) = delete;
325 public:
326 explicit CacheIRWriter(JSContext* cx)
327 : CustomAutoRooter(cx),
328 #ifdef DEBUG
329 cx_(cx),
330 #endif
331 nextOperandId_(0),
332 nextInstructionId_(0),
333 numInputOperands_(0),
334 stubDataSize_(0),
335 tooLarge_(false),
336 lastOffset_(0),
337 lastIndex_(0) {
340 bool tooLarge() const { return tooLarge_; }
341 bool oom() const { return buffer_.oom(); }
342 bool failed() const { return tooLarge() || oom(); }
344 TrialInliningState trialInliningState() const { return trialInliningState_; }
346 uint32_t numInputOperands() const { return numInputOperands_; }
347 uint32_t numOperandIds() const { return nextOperandId_; }
348 uint32_t numInstructions() const { return nextInstructionId_; }
350 size_t numStubFields() const { return stubFields_.length(); }
351 StubField::Type stubFieldType(uint32_t i) const {
352 return stubFields_[i].type();
355 uint32_t setInputOperandId(uint32_t op) {
356 MOZ_ASSERT(op == nextOperandId_);
357 nextOperandId_++;
358 numInputOperands_++;
359 return op;
362 TypeData typeData() const { return typeData_; }
363 void setTypeData(TypeData data) { typeData_ = data; }
365 void trace(JSTracer* trc) override {
366 // For now, assert we only GC before we append stub fields.
367 MOZ_RELEASE_ASSERT(stubFields_.empty());
370 size_t stubDataSize() const { return stubDataSize_; }
371 void copyStubData(uint8_t* dest) const;
372 bool stubDataEquals(const uint8_t* stubData) const;
373 bool stubDataEqualsIgnoring(const uint8_t* stubData,
374 uint32_t ignoreOffset) const;
376 bool operandIsDead(uint32_t operandId, uint32_t currentInstruction) const {
377 if (operandId >= operandLastUsed_.length()) {
378 return false;
380 return currentInstruction > operandLastUsed_[operandId];
383 const uint8_t* codeStart() const {
384 MOZ_ASSERT(!failed());
385 return buffer_.buffer();
388 const uint8_t* codeEnd() const {
389 MOZ_ASSERT(!failed());
390 return buffer_.buffer() + buffer_.length();
393 uint32_t codeLength() const {
394 MOZ_ASSERT(!failed());
395 return buffer_.length();
398 // This should not be used when compiling Baseline code, as Baseline code
399 // shouldn't bake in stub values.
400 StubField readStubField(uint32_t offset, StubField::Type type) const;
402 ObjOperandId guardToObject(ValOperandId input) {
403 guardToObject_(input);
404 return ObjOperandId(input.id());
407 StringOperandId guardToString(ValOperandId input) {
408 guardToString_(input);
409 return StringOperandId(input.id());
412 SymbolOperandId guardToSymbol(ValOperandId input) {
413 guardToSymbol_(input);
414 return SymbolOperandId(input.id());
417 BigIntOperandId guardToBigInt(ValOperandId input) {
418 guardToBigInt_(input);
419 return BigIntOperandId(input.id());
422 BooleanOperandId guardToBoolean(ValOperandId input) {
423 guardToBoolean_(input);
424 return BooleanOperandId(input.id());
427 Int32OperandId guardToInt32(ValOperandId input) {
428 guardToInt32_(input);
429 return Int32OperandId(input.id());
432 NumberOperandId guardIsNumber(ValOperandId input) {
433 guardIsNumber_(input);
434 return NumberOperandId(input.id());
437 ValOperandId boxObject(ObjOperandId input) {
438 return ValOperandId(input.id());
441 void guardShapeForClass(ObjOperandId obj, Shape* shape) {
442 // Guard shape to ensure that object class is unchanged. This is true
443 // for all shapes.
444 guardShape(obj, shape);
447 void guardShapeForOwnProperties(ObjOperandId obj, Shape* shape) {
448 // Guard shape to detect changes to (non-dense) own properties. This
449 // also implies |guardShapeForClass|.
450 MOZ_ASSERT(shape->getObjectClass()->isNativeObject());
451 guardShape(obj, shape);
454 public:
455 void guardSpecificFunction(ObjOperandId obj, JSFunction* expected) {
456 // Guard object is a specific function. This implies immutable fields on
457 // the JSFunction struct itself are unchanged.
458 // Bake in the nargs and FunctionFlags so Warp can use them off-main thread,
459 // instead of directly using the JSFunction fields.
460 uint32_t nargsAndFlags = expected->flagsAndArgCountRaw();
461 guardSpecificFunction_(obj, expected, nargsAndFlags);
464 void guardFunctionScript(ObjOperandId fun, BaseScript* expected) {
465 // Guard function has a specific BaseScript. This implies immutable fields
466 // on the JSFunction struct itself are unchanged and are equivalent for
467 // lambda clones.
468 // Bake in the nargs and FunctionFlags so Warp can use them off-main thread,
469 // instead of directly using the JSFunction fields.
470 uint32_t nargsAndFlags = expected->function()->flagsAndArgCountRaw();
471 guardFunctionScript_(fun, expected, nargsAndFlags);
474 ValOperandId loadArgumentFixedSlot(
475 ArgumentKind kind, uint32_t argc,
476 CallFlags flags = CallFlags(CallFlags::Standard)) {
477 bool addArgc;
478 int32_t slotIndex = GetIndexOfArgument(kind, flags, &addArgc);
479 if (addArgc) {
480 slotIndex += argc;
482 MOZ_ASSERT(slotIndex >= 0);
483 MOZ_RELEASE_ASSERT(slotIndex <= UINT8_MAX);
484 return loadArgumentFixedSlot_(slotIndex);
487 ValOperandId loadArgumentDynamicSlot(
488 ArgumentKind kind, Int32OperandId argcId,
489 CallFlags flags = CallFlags(CallFlags::Standard)) {
490 bool addArgc;
491 int32_t slotIndex = GetIndexOfArgument(kind, flags, &addArgc);
492 if (addArgc) {
493 return loadArgumentDynamicSlot_(argcId, slotIndex);
495 return loadArgumentFixedSlot_(slotIndex);
498 ObjOperandId loadSpreadArgs() {
499 ArgumentKind kind = ArgumentKind::Arg0;
500 uint32_t argc = 1;
501 CallFlags flags(CallFlags::Spread);
502 return ObjOperandId(loadArgumentFixedSlot(kind, argc, flags).id());
505 void callScriptedFunction(ObjOperandId callee, Int32OperandId argc,
506 CallFlags flags, uint32_t argcFixed) {
507 callScriptedFunction_(callee, argc, flags, argcFixed);
508 trialInliningState_ = TrialInliningState::Candidate;
511 void callInlinedFunction(ObjOperandId callee, Int32OperandId argc,
512 ICScript* icScript, CallFlags flags,
513 uint32_t argcFixed) {
514 callInlinedFunction_(callee, argc, icScript, flags, argcFixed);
515 trialInliningState_ = TrialInliningState::Inlined;
518 void callNativeFunction(ObjOperandId calleeId, Int32OperandId argc, JSOp op,
519 JSFunction* calleeFunc, CallFlags flags,
520 uint32_t argcFixed) {
521 // Some native functions can be implemented faster if we know that
522 // the return value is ignored.
523 bool ignoresReturnValue =
524 op == JSOp::CallIgnoresRv && calleeFunc->hasJitInfo() &&
525 calleeFunc->jitInfo()->type() == JSJitInfo::IgnoresReturnValueNative;
527 #ifdef JS_SIMULATOR
528 // The simulator requires VM calls to be redirected to a special
529 // swi instruction to handle them, so we store the redirected
530 // pointer in the stub and use that instead of the original one.
531 // If we are calling the ignoresReturnValue version of a native
532 // function, we bake it into the redirected pointer.
533 // (See BaselineCacheIRCompiler::emitCallNativeFunction.)
534 JSNative target = ignoresReturnValue
535 ? calleeFunc->jitInfo()->ignoresReturnValueMethod
536 : calleeFunc->native();
537 void* rawPtr = JS_FUNC_TO_DATA_PTR(void*, target);
538 void* redirected = Simulator::RedirectNativeFunction(rawPtr, Args_General3);
539 callNativeFunction_(calleeId, argc, flags, argcFixed, redirected);
540 #else
541 // If we are not running in the simulator, we generate different jitcode
542 // to find the ignoresReturnValue version of a native function.
543 callNativeFunction_(calleeId, argc, flags, argcFixed, ignoresReturnValue);
544 #endif
547 void callDOMFunction(ObjOperandId calleeId, Int32OperandId argc,
548 ObjOperandId thisObjId, JSFunction* calleeFunc,
549 CallFlags flags, uint32_t argcFixed) {
550 #ifdef JS_SIMULATOR
551 void* rawPtr = JS_FUNC_TO_DATA_PTR(void*, calleeFunc->native());
552 void* redirected = Simulator::RedirectNativeFunction(rawPtr, Args_General3);
553 callDOMFunction_(calleeId, argc, thisObjId, flags, argcFixed, redirected);
554 #else
555 callDOMFunction_(calleeId, argc, thisObjId, flags, argcFixed);
556 #endif
559 void callAnyNativeFunction(ObjOperandId calleeId, Int32OperandId argc,
560 CallFlags flags, uint32_t argcFixed) {
561 MOZ_ASSERT(!flags.isSameRealm());
562 #ifdef JS_SIMULATOR
563 const void* redirected = RedirectedCallAnyNative();
564 callNativeFunction_(calleeId, argc, flags, argcFixed, redirected);
565 #else
566 callNativeFunction_(calleeId, argc, flags, argcFixed,
567 /* ignoresReturnValue = */ false);
568 #endif
571 void callClassHook(ObjOperandId calleeId, Int32OperandId argc, JSNative hook,
572 CallFlags flags, uint32_t argcFixed) {
573 MOZ_ASSERT(!flags.isSameRealm());
574 void* target = JS_FUNC_TO_DATA_PTR(void*, hook);
575 #ifdef JS_SIMULATOR
576 // The simulator requires VM calls to be redirected to a special
577 // swi instruction to handle them, so we store the redirected
578 // pointer in the stub and use that instead of the original one.
579 target = Simulator::RedirectNativeFunction(target, Args_General3);
580 #endif
581 callClassHook_(calleeId, argc, flags, argcFixed, target);
584 void callScriptedGetterResult(ValOperandId receiver, JSFunction* getter,
585 bool sameRealm) {
586 MOZ_ASSERT(getter->hasJitEntry());
587 uint32_t nargsAndFlags = getter->flagsAndArgCountRaw();
588 callScriptedGetterResult_(receiver, getter, sameRealm, nargsAndFlags);
589 trialInliningState_ = TrialInliningState::Candidate;
592 void callInlinedGetterResult(ValOperandId receiver, JSFunction* getter,
593 ICScript* icScript, bool sameRealm) {
594 MOZ_ASSERT(getter->hasJitEntry());
595 uint32_t nargsAndFlags = getter->flagsAndArgCountRaw();
596 callInlinedGetterResult_(receiver, getter, icScript, sameRealm,
597 nargsAndFlags);
598 trialInliningState_ = TrialInliningState::Inlined;
601 void callNativeGetterResult(ValOperandId receiver, JSFunction* getter,
602 bool sameRealm) {
603 MOZ_ASSERT(getter->isNativeWithoutJitEntry());
604 uint32_t nargsAndFlags = getter->flagsAndArgCountRaw();
605 callNativeGetterResult_(receiver, getter, sameRealm, nargsAndFlags);
608 void callScriptedSetter(ObjOperandId receiver, JSFunction* setter,
609 ValOperandId rhs, bool sameRealm) {
610 MOZ_ASSERT(setter->hasJitEntry());
611 uint32_t nargsAndFlags = setter->flagsAndArgCountRaw();
612 callScriptedSetter_(receiver, setter, rhs, sameRealm, nargsAndFlags);
613 trialInliningState_ = TrialInliningState::Candidate;
616 void callInlinedSetter(ObjOperandId receiver, JSFunction* setter,
617 ValOperandId rhs, ICScript* icScript, bool sameRealm) {
618 MOZ_ASSERT(setter->hasJitEntry());
619 uint32_t nargsAndFlags = setter->flagsAndArgCountRaw();
620 callInlinedSetter_(receiver, setter, rhs, icScript, sameRealm,
621 nargsAndFlags);
622 trialInliningState_ = TrialInliningState::Inlined;
625 void callNativeSetter(ObjOperandId receiver, JSFunction* setter,
626 ValOperandId rhs, bool sameRealm) {
627 MOZ_ASSERT(setter->isNativeWithoutJitEntry());
628 uint32_t nargsAndFlags = setter->flagsAndArgCountRaw();
629 callNativeSetter_(receiver, setter, rhs, sameRealm, nargsAndFlags);
632 #ifdef JS_PUNBOX64
633 void callScriptedProxyGetResult(ValOperandId target, ObjOperandId receiver,
634 ObjOperandId handler, JSFunction* trap,
635 HandleId property) {
636 MOZ_ASSERT(trap->hasJitEntry());
637 uint32_t nargsAndFlags = trap->flagsAndArgCountRaw();
638 callScriptedProxyGetResult_(target, receiver, handler, trap, property,
639 nargsAndFlags);
642 void callScriptedProxyGetByValueResult(ValOperandId target,
643 ObjOperandId receiver,
644 ObjOperandId handler,
645 ValOperandId property,
646 JSFunction* trap) {
647 MOZ_ASSERT(trap->hasJitEntry());
648 uint32_t nargsAndFlags = trap->flagsAndArgCountRaw();
649 callScriptedProxyGetByValueResult_(target, receiver, handler, property,
650 trap, nargsAndFlags);
652 #endif
654 void metaScriptedThisShape(Shape* thisShape) {
655 metaScriptedThisShape_(thisShape);
658 void guardMultipleShapes(ObjOperandId obj, ListObject* shapes) {
659 MOZ_ASSERT(shapes->length() > 0);
660 guardMultipleShapes_(obj, shapes);
663 friend class CacheIRCloner;
665 CACHE_IR_WRITER_GENERATED
668 } // namespace jit
669 } // namespace js
671 #endif /* jit_CacheIRWriter_h */