Backed out changeset 48baafc34055 (bug 1789166) for causing mochitests failures....
[gecko.git] / js / src / jit / CacheIRWriter.h
blob4798e817e36429abdb75a5660d5e3361d64afe7e
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * vim: set ts=8 sts=2 et sw=2 tw=80:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
7 #ifndef jit_CacheIRWriter_h
8 #define jit_CacheIRWriter_h
10 #include "mozilla/Assertions.h"
11 #include "mozilla/Attributes.h"
12 #include "mozilla/Casting.h"
13 #include "mozilla/Maybe.h"
15 #include <stddef.h>
16 #include <stdint.h>
18 #include "jstypes.h"
19 #include "NamespaceImports.h"
21 #include "gc/AllocKind.h"
22 #include "jit/ABIFunctions.h"
23 #include "jit/CacheIR.h"
24 #include "jit/CacheIROpsGenerated.h"
25 #include "jit/CompactBuffer.h"
26 #include "jit/ICState.h"
27 #include "jit/Simulator.h"
28 #include "jit/TypeData.h"
29 #include "js/AllocPolicy.h"
30 #include "js/CallArgs.h"
31 #include "js/Class.h"
32 #include "js/experimental/JitInfo.h"
33 #include "js/Id.h"
34 #include "js/RootingAPI.h"
35 #include "js/ScalarType.h"
36 #include "js/Value.h"
37 #include "js/Vector.h"
38 #include "util/Memory.h"
39 #include "vm/GuardFuse.h"
40 #include "vm/JSFunction.h"
41 #include "vm/JSScript.h"
42 #include "vm/List.h"
43 #include "vm/Opcodes.h"
44 #include "vm/RealmFuses.h"
45 #include "vm/Shape.h"
46 #include "vm/TypeofEqOperand.h" // TypeofEqOperand
47 #include "wasm/WasmConstants.h"
48 #include "wasm/WasmValType.h"
50 class JS_PUBLIC_API JSTracer;
51 struct JS_PUBLIC_API JSContext;
53 class JSObject;
54 class JSString;
56 namespace JS {
57 class Symbol;
60 namespace js {
62 class GetterSetter;
63 enum class UnaryMathFunction : uint8_t;
65 namespace gc {
66 class AllocSite;
69 namespace jit {
71 class ICScript;
73 // Class to record CacheIR + some additional metadata for code generation.
74 class MOZ_RAII CacheIRWriter : public JS::CustomAutoRooter {
75 #ifdef DEBUG
76 JSContext* cx_;
77 #endif
78 CompactBufferWriter buffer_;
80 uint32_t nextOperandId_;
81 uint32_t nextInstructionId_;
82 uint32_t numInputOperands_;
84 TypeData typeData_;
86 // The data (shapes, slot offsets, etc.) that will be stored in the ICStub.
87 Vector<StubField, 8, SystemAllocPolicy> stubFields_;
88 size_t stubDataSize_;
90 // For each operand id, record which instruction accessed it last. This
91 // information greatly improves register allocation.
92 Vector<uint32_t, 8, SystemAllocPolicy> operandLastUsed_;
94 // OperandId and stub offsets are stored in a single byte, so make sure
95 // this doesn't overflow. We use a very conservative limit for now.
96 static const size_t MaxOperandIds = 20;
97 static const size_t MaxStubDataSizeInBytes = 20 * sizeof(uintptr_t);
98 bool tooLarge_;
100 // Assume this stub can't be trial inlined until we see a scripted call/inline
101 // instruction.
102 TrialInliningState trialInliningState_ = TrialInliningState::Failure;
104 // Basic caching to avoid quadatic lookup behaviour in readStubField.
105 mutable uint32_t lastOffset_;
106 mutable uint32_t lastIndex_;
108 #ifdef DEBUG
109 // Information for assertLengthMatches.
110 mozilla::Maybe<CacheOp> currentOp_;
111 size_t currentOpArgsStart_ = 0;
112 #endif
114 #ifdef DEBUG
115 void assertSameCompartment(JSObject* obj);
116 void assertSameZone(Shape* shape);
117 #else
118 void assertSameCompartment(JSObject* obj) {}
119 void assertSameZone(Shape* shape) {}
120 #endif
122 void writeOp(CacheOp op) {
123 buffer_.writeFixedUint16_t(uint16_t(op));
124 nextInstructionId_++;
125 #ifdef DEBUG
126 MOZ_ASSERT(currentOp_.isNothing(), "Missing call to assertLengthMatches?");
127 currentOp_.emplace(op);
128 currentOpArgsStart_ = buffer_.length();
129 #endif
132 void assertLengthMatches() {
133 #ifdef DEBUG
134 // After writing arguments, assert the length matches CacheIROpArgLengths.
135 size_t expectedLen = CacheIROpInfos[size_t(*currentOp_)].argLength;
136 MOZ_ASSERT_IF(!failed(),
137 buffer_.length() - currentOpArgsStart_ == expectedLen);
138 currentOp_.reset();
139 #endif
142 void writeOperandId(OperandId opId) {
143 if (opId.id() < MaxOperandIds) {
144 static_assert(MaxOperandIds <= UINT8_MAX,
145 "operand id must fit in a single byte");
146 buffer_.writeByte(opId.id());
147 } else {
148 tooLarge_ = true;
149 return;
151 if (opId.id() >= operandLastUsed_.length()) {
152 buffer_.propagateOOM(operandLastUsed_.resize(opId.id() + 1));
153 if (buffer_.oom()) {
154 return;
157 MOZ_ASSERT(nextInstructionId_ > 0);
158 operandLastUsed_[opId.id()] = nextInstructionId_ - 1;
161 void writeCallFlagsImm(CallFlags flags) { buffer_.writeByte(flags.toByte()); }
163 void addStubField(uint64_t value, StubField::Type fieldType) {
164 size_t fieldOffset = stubDataSize_;
165 #ifndef JS_64BIT
166 // On 32-bit platforms there are two stub field sizes (4 bytes and 8 bytes).
167 // Ensure 8-byte fields are properly aligned.
168 if (StubField::sizeIsInt64(fieldType)) {
169 fieldOffset = AlignBytes(fieldOffset, sizeof(uint64_t));
171 #endif
172 MOZ_ASSERT((fieldOffset % StubField::sizeInBytes(fieldType)) == 0);
174 size_t newStubDataSize = fieldOffset + StubField::sizeInBytes(fieldType);
175 if (newStubDataSize < MaxStubDataSizeInBytes) {
176 #ifndef JS_64BIT
177 // Add a RawInt32 stub field for padding if necessary, because when we
178 // iterate over the stub fields we assume there are no 'holes'.
179 if (fieldOffset != stubDataSize_) {
180 MOZ_ASSERT((stubDataSize_ + sizeof(uintptr_t)) == fieldOffset);
181 buffer_.propagateOOM(
182 stubFields_.append(StubField(0, StubField::Type::RawInt32)));
184 #endif
185 buffer_.propagateOOM(stubFields_.append(StubField(value, fieldType)));
186 MOZ_ASSERT((fieldOffset % sizeof(uintptr_t)) == 0);
187 buffer_.writeByte(fieldOffset / sizeof(uintptr_t));
188 stubDataSize_ = newStubDataSize;
189 } else {
190 tooLarge_ = true;
194 void writeShapeField(Shape* shape) {
195 MOZ_ASSERT(shape);
196 assertSameZone(shape);
197 addStubField(uintptr_t(shape), StubField::Type::Shape);
199 void writeWeakShapeField(Shape* shape) {
200 MOZ_ASSERT(shape);
201 assertSameZone(shape);
202 addStubField(uintptr_t(shape), StubField::Type::WeakShape);
204 void writeWeakGetterSetterField(GetterSetter* gs) {
205 MOZ_ASSERT(gs);
206 addStubField(uintptr_t(gs), StubField::Type::WeakGetterSetter);
208 void writeObjectField(JSObject* obj) {
209 MOZ_ASSERT(obj);
210 assertSameCompartment(obj);
211 addStubField(uintptr_t(obj), StubField::Type::JSObject);
213 void writeWeakObjectField(JSObject* obj) {
214 MOZ_ASSERT(obj);
215 assertSameCompartment(obj);
216 addStubField(uintptr_t(obj), StubField::Type::WeakObject);
218 void writeStringField(JSString* str) {
219 MOZ_ASSERT(str);
220 addStubField(uintptr_t(str), StubField::Type::String);
222 void writeSymbolField(JS::Symbol* sym) {
223 MOZ_ASSERT(sym);
224 addStubField(uintptr_t(sym), StubField::Type::Symbol);
226 void writeWeakBaseScriptField(BaseScript* script) {
227 MOZ_ASSERT(script);
228 addStubField(uintptr_t(script), StubField::Type::WeakBaseScript);
230 void writeJitCodeField(JitCode* code) {
231 MOZ_ASSERT(code);
232 addStubField(uintptr_t(code), StubField::Type::JitCode);
234 void writeRawInt32Field(uint32_t val) {
235 addStubField(val, StubField::Type::RawInt32);
237 void writeRawPointerField(const void* ptr) {
238 addStubField(uintptr_t(ptr), StubField::Type::RawPointer);
240 void writeIdField(jsid id) {
241 addStubField(id.asRawBits(), StubField::Type::Id);
243 void writeValueField(const Value& val) {
244 addStubField(val.asRawBits(), StubField::Type::Value);
246 void writeRawInt64Field(uint64_t val) {
247 addStubField(val, StubField::Type::RawInt64);
249 void writeDoubleField(double d) {
250 uint64_t bits = mozilla::BitwiseCast<uint64_t>(d);
251 addStubField(bits, StubField::Type::Double);
253 void writeAllocSiteField(gc::AllocSite* ptr) {
254 addStubField(uintptr_t(ptr), StubField::Type::AllocSite);
257 void writeJSOpImm(JSOp op) {
258 static_assert(sizeof(JSOp) == sizeof(uint8_t), "JSOp must fit in a byte");
259 buffer_.writeByte(uint8_t(op));
261 void writeTypeofEqOperandImm(TypeofEqOperand operand) {
262 buffer_.writeByte(operand.rawValue());
264 void writeGuardClassKindImm(GuardClassKind kind) {
265 static_assert(sizeof(GuardClassKind) == sizeof(uint8_t),
266 "GuardClassKind must fit in a byte");
267 buffer_.writeByte(uint8_t(kind));
269 void writeArrayBufferViewKindImm(ArrayBufferViewKind kind) {
270 static_assert(sizeof(ArrayBufferViewKind) == sizeof(uint8_t),
271 "ArrayBufferViewKind must fit in a byte");
272 buffer_.writeByte(uint8_t(kind));
274 void writeValueTypeImm(ValueType type) {
275 static_assert(sizeof(ValueType) == sizeof(uint8_t),
276 "ValueType must fit in uint8_t");
277 buffer_.writeByte(uint8_t(type));
279 void writeJSWhyMagicImm(JSWhyMagic whyMagic) {
280 static_assert(JS_WHY_MAGIC_COUNT <= UINT8_MAX,
281 "JSWhyMagic must fit in uint8_t");
282 buffer_.writeByte(uint8_t(whyMagic));
284 void writeScalarTypeImm(Scalar::Type type) {
285 MOZ_ASSERT(size_t(type) <= UINT8_MAX);
286 buffer_.writeByte(uint8_t(type));
288 void writeUnaryMathFunctionImm(UnaryMathFunction fun) {
289 static_assert(sizeof(UnaryMathFunction) == sizeof(uint8_t),
290 "UnaryMathFunction must fit in a byte");
291 buffer_.writeByte(uint8_t(fun));
293 void writeCompletionKindImm(CompletionKind kind) {
294 static_assert(sizeof(CompletionKind) == sizeof(uint8_t),
295 "CompletionKind must fit in a byte");
296 buffer_.writeByte(uint8_t(kind));
298 void writeBoolImm(bool b) { buffer_.writeByte(uint32_t(b)); }
299 void writeRealmFuseIndexImm(RealmFuses::FuseIndex realmFuseIndex) {
300 static_assert(sizeof(RealmFuses::FuseIndex) == sizeof(uint8_t),
301 "RealmFuses::FuseIndex must fit in a byte");
302 buffer_.writeByte(uint8_t(realmFuseIndex));
305 void writeByteImm(uint32_t b) {
306 MOZ_ASSERT(b <= UINT8_MAX);
307 buffer_.writeByte(b);
310 void writeInt32Imm(int32_t i32) { buffer_.writeFixedUint32_t(i32); }
311 void writeUInt32Imm(uint32_t u32) { buffer_.writeFixedUint32_t(u32); }
312 void writePointer(const void* ptr) { buffer_.writeRawPointer(ptr); }
314 void writeJSNativeImm(JSNative native) {
315 writePointer(JS_FUNC_TO_DATA_PTR(void*, native));
317 void writeStaticStringImm(const char* str) { writePointer(str); }
319 void writeWasmValTypeImm(wasm::ValType::Kind kind) {
320 static_assert(unsigned(wasm::TypeCode::Limit) <= UINT8_MAX);
321 buffer_.writeByte(uint8_t(kind));
324 void writeAllocKindImm(gc::AllocKind kind) {
325 static_assert(unsigned(gc::AllocKind::LIMIT) <= UINT8_MAX);
326 buffer_.writeByte(uint8_t(kind));
329 uint32_t newOperandId() { return nextOperandId_++; }
331 CacheIRWriter(const CacheIRWriter&) = delete;
332 CacheIRWriter& operator=(const CacheIRWriter&) = delete;
334 public:
335 explicit CacheIRWriter(JSContext* cx)
336 : CustomAutoRooter(cx),
337 #ifdef DEBUG
338 cx_(cx),
339 #endif
340 nextOperandId_(0),
341 nextInstructionId_(0),
342 numInputOperands_(0),
343 stubDataSize_(0),
344 tooLarge_(false),
345 lastOffset_(0),
346 lastIndex_(0) {
349 bool tooLarge() const { return tooLarge_; }
350 bool oom() const { return buffer_.oom(); }
351 bool failed() const { return tooLarge() || oom(); }
353 TrialInliningState trialInliningState() const { return trialInliningState_; }
355 uint32_t numInputOperands() const { return numInputOperands_; }
356 uint32_t numOperandIds() const { return nextOperandId_; }
357 uint32_t numInstructions() const { return nextInstructionId_; }
359 size_t numStubFields() const { return stubFields_.length(); }
360 StubField::Type stubFieldType(uint32_t i) const {
361 return stubFields_[i].type();
364 uint32_t setInputOperandId(uint32_t op) {
365 MOZ_ASSERT(op == nextOperandId_);
366 nextOperandId_++;
367 numInputOperands_++;
368 return op;
371 TypeData typeData() const { return typeData_; }
372 void setTypeData(TypeData data) { typeData_ = data; }
374 void trace(JSTracer* trc) override {
375 // For now, assert we only GC before we append stub fields.
376 MOZ_RELEASE_ASSERT(stubFields_.empty());
379 size_t stubDataSize() const { return stubDataSize_; }
380 void copyStubData(uint8_t* dest) const;
381 bool stubDataEquals(const uint8_t* stubData) const;
382 bool stubDataEqualsIgnoring(const uint8_t* stubData,
383 uint32_t ignoreOffset) const;
385 bool operandIsDead(uint32_t operandId, uint32_t currentInstruction) const {
386 if (operandId >= operandLastUsed_.length()) {
387 return false;
389 return currentInstruction > operandLastUsed_[operandId];
392 const uint8_t* codeStart() const {
393 MOZ_ASSERT(!failed());
394 return buffer_.buffer();
397 const uint8_t* codeEnd() const {
398 MOZ_ASSERT(!failed());
399 return buffer_.buffer() + buffer_.length();
402 uint32_t codeLength() const {
403 MOZ_ASSERT(!failed());
404 return buffer_.length();
407 // This should not be used when compiling Baseline code, as Baseline code
408 // shouldn't bake in stub values.
409 StubField readStubField(uint32_t offset, StubField::Type type) const;
411 ObjOperandId guardToObject(ValOperandId input) {
412 guardToObject_(input);
413 return ObjOperandId(input.id());
416 StringOperandId guardToString(ValOperandId input) {
417 guardToString_(input);
418 return StringOperandId(input.id());
421 SymbolOperandId guardToSymbol(ValOperandId input) {
422 guardToSymbol_(input);
423 return SymbolOperandId(input.id());
426 BigIntOperandId guardToBigInt(ValOperandId input) {
427 guardToBigInt_(input);
428 return BigIntOperandId(input.id());
431 BooleanOperandId guardToBoolean(ValOperandId input) {
432 guardToBoolean_(input);
433 return BooleanOperandId(input.id());
436 Int32OperandId guardToInt32(ValOperandId input) {
437 guardToInt32_(input);
438 return Int32OperandId(input.id());
441 NumberOperandId guardIsNumber(ValOperandId input) {
442 guardIsNumber_(input);
443 return NumberOperandId(input.id());
446 StringOperandId stringToAtom(StringOperandId input) {
447 stringToAtom_(input);
448 return input;
451 ValOperandId boxObject(ObjOperandId input) {
452 return ValOperandId(input.id());
455 void guardShapeForClass(ObjOperandId obj, Shape* shape) {
456 // Guard shape to ensure that object class is unchanged. This is true
457 // for all shapes.
458 guardShape(obj, shape);
461 void guardShapeForOwnProperties(ObjOperandId obj, Shape* shape) {
462 // Guard shape to detect changes to (non-dense) own properties. This
463 // also implies |guardShapeForClass|.
464 MOZ_ASSERT(shape->getObjectClass()->isNativeObject());
465 guardShape(obj, shape);
468 public:
469 void guardSpecificFunction(ObjOperandId obj, JSFunction* expected) {
470 // Guard object is a specific function. This implies immutable fields on
471 // the JSFunction struct itself are unchanged.
472 // Bake in the nargs and FunctionFlags so Warp can use them off-main thread,
473 // instead of directly using the JSFunction fields.
474 uint32_t nargsAndFlags = expected->flagsAndArgCountRaw();
475 guardSpecificFunction_(obj, expected, nargsAndFlags);
478 void guardFunctionScript(ObjOperandId fun, BaseScript* expected) {
479 // Guard function has a specific BaseScript. This implies immutable fields
480 // on the JSFunction struct itself are unchanged and are equivalent for
481 // lambda clones.
482 // Bake in the nargs and FunctionFlags so Warp can use them off-main thread,
483 // instead of directly using the JSFunction fields.
484 uint32_t nargsAndFlags = expected->function()->flagsAndArgCountRaw();
485 guardFunctionScript_(fun, expected, nargsAndFlags);
488 ValOperandId loadArgumentFixedSlot(
489 ArgumentKind kind, uint32_t argc,
490 CallFlags flags = CallFlags(CallFlags::Standard)) {
491 bool addArgc;
492 int32_t slotIndex = GetIndexOfArgument(kind, flags, &addArgc);
493 if (addArgc) {
494 slotIndex += argc;
496 MOZ_ASSERT(slotIndex >= 0);
497 MOZ_RELEASE_ASSERT(slotIndex <= UINT8_MAX);
498 return loadArgumentFixedSlot_(slotIndex);
501 ValOperandId loadArgumentDynamicSlot(
502 ArgumentKind kind, Int32OperandId argcId,
503 CallFlags flags = CallFlags(CallFlags::Standard)) {
504 bool addArgc;
505 int32_t slotIndex = GetIndexOfArgument(kind, flags, &addArgc);
506 if (addArgc) {
507 return loadArgumentDynamicSlot_(argcId, slotIndex);
509 return loadArgumentFixedSlot_(slotIndex);
512 ObjOperandId loadSpreadArgs() {
513 ArgumentKind kind = ArgumentKind::Arg0;
514 uint32_t argc = 1;
515 CallFlags flags(CallFlags::Spread);
516 return ObjOperandId(loadArgumentFixedSlot(kind, argc, flags).id());
519 void callScriptedFunction(ObjOperandId callee, Int32OperandId argc,
520 CallFlags flags, uint32_t argcFixed) {
521 callScriptedFunction_(callee, argc, flags, argcFixed);
522 trialInliningState_ = TrialInliningState::Candidate;
525 void callInlinedFunction(ObjOperandId callee, Int32OperandId argc,
526 ICScript* icScript, CallFlags flags,
527 uint32_t argcFixed) {
528 callInlinedFunction_(callee, argc, icScript, flags, argcFixed);
529 trialInliningState_ = TrialInliningState::Inlined;
532 void callNativeFunction(ObjOperandId calleeId, Int32OperandId argc, JSOp op,
533 JSFunction* calleeFunc, CallFlags flags,
534 uint32_t argcFixed) {
535 // Some native functions can be implemented faster if we know that
536 // the return value is ignored.
537 bool ignoresReturnValue =
538 op == JSOp::CallIgnoresRv && calleeFunc->hasJitInfo() &&
539 calleeFunc->jitInfo()->type() == JSJitInfo::IgnoresReturnValueNative;
541 #ifdef JS_SIMULATOR
542 // The simulator requires VM calls to be redirected to a special
543 // swi instruction to handle them, so we store the redirected
544 // pointer in the stub and use that instead of the original one.
545 // If we are calling the ignoresReturnValue version of a native
546 // function, we bake it into the redirected pointer.
547 // (See BaselineCacheIRCompiler::emitCallNativeFunction.)
548 JSNative target = ignoresReturnValue
549 ? calleeFunc->jitInfo()->ignoresReturnValueMethod
550 : calleeFunc->native();
551 void* rawPtr = JS_FUNC_TO_DATA_PTR(void*, target);
552 void* redirected = Simulator::RedirectNativeFunction(rawPtr, Args_General3);
553 callNativeFunction_(calleeId, argc, flags, argcFixed, redirected);
554 #else
555 // If we are not running in the simulator, we generate different jitcode
556 // to find the ignoresReturnValue version of a native function.
557 callNativeFunction_(calleeId, argc, flags, argcFixed, ignoresReturnValue);
558 #endif
561 void callDOMFunction(ObjOperandId calleeId, Int32OperandId argc,
562 ObjOperandId thisObjId, JSFunction* calleeFunc,
563 CallFlags flags, uint32_t argcFixed) {
564 #ifdef JS_SIMULATOR
565 void* rawPtr = JS_FUNC_TO_DATA_PTR(void*, calleeFunc->native());
566 void* redirected = Simulator::RedirectNativeFunction(rawPtr, Args_General3);
567 callDOMFunction_(calleeId, argc, thisObjId, flags, argcFixed, redirected);
568 #else
569 callDOMFunction_(calleeId, argc, thisObjId, flags, argcFixed);
570 #endif
573 void callAnyNativeFunction(ObjOperandId calleeId, Int32OperandId argc,
574 CallFlags flags, uint32_t argcFixed) {
575 MOZ_ASSERT(!flags.isSameRealm());
576 #ifdef JS_SIMULATOR
577 const void* redirected = RedirectedCallAnyNative();
578 callNativeFunction_(calleeId, argc, flags, argcFixed, redirected);
579 #else
580 callNativeFunction_(calleeId, argc, flags, argcFixed,
581 /* ignoresReturnValue = */ false);
582 #endif
585 void callClassHook(ObjOperandId calleeId, Int32OperandId argc, JSNative hook,
586 CallFlags flags, uint32_t argcFixed) {
587 MOZ_ASSERT(!flags.isSameRealm());
588 void* target = JS_FUNC_TO_DATA_PTR(void*, hook);
589 #ifdef JS_SIMULATOR
590 // The simulator requires VM calls to be redirected to a special
591 // swi instruction to handle them, so we store the redirected
592 // pointer in the stub and use that instead of the original one.
593 target = Simulator::RedirectNativeFunction(target, Args_General3);
594 #endif
595 callClassHook_(calleeId, argc, flags, argcFixed, target);
598 void callScriptedGetterResult(ValOperandId receiver, JSFunction* getter,
599 bool sameRealm) {
600 MOZ_ASSERT(getter->hasJitEntry());
601 uint32_t nargsAndFlags = getter->flagsAndArgCountRaw();
602 callScriptedGetterResult_(receiver, getter, sameRealm, nargsAndFlags);
603 trialInliningState_ = TrialInliningState::Candidate;
606 void callInlinedGetterResult(ValOperandId receiver, JSFunction* getter,
607 ICScript* icScript, bool sameRealm) {
608 MOZ_ASSERT(getter->hasJitEntry());
609 uint32_t nargsAndFlags = getter->flagsAndArgCountRaw();
610 callInlinedGetterResult_(receiver, getter, icScript, sameRealm,
611 nargsAndFlags);
612 trialInliningState_ = TrialInliningState::Inlined;
615 void callNativeGetterResult(ValOperandId receiver, JSFunction* getter,
616 bool sameRealm) {
617 MOZ_ASSERT(getter->isNativeWithoutJitEntry());
618 uint32_t nargsAndFlags = getter->flagsAndArgCountRaw();
619 callNativeGetterResult_(receiver, getter, sameRealm, nargsAndFlags);
622 void callScriptedSetter(ObjOperandId receiver, JSFunction* setter,
623 ValOperandId rhs, bool sameRealm) {
624 MOZ_ASSERT(setter->hasJitEntry());
625 uint32_t nargsAndFlags = setter->flagsAndArgCountRaw();
626 callScriptedSetter_(receiver, setter, rhs, sameRealm, nargsAndFlags);
627 trialInliningState_ = TrialInliningState::Candidate;
630 void callInlinedSetter(ObjOperandId receiver, JSFunction* setter,
631 ValOperandId rhs, ICScript* icScript, bool sameRealm) {
632 MOZ_ASSERT(setter->hasJitEntry());
633 uint32_t nargsAndFlags = setter->flagsAndArgCountRaw();
634 callInlinedSetter_(receiver, setter, rhs, icScript, sameRealm,
635 nargsAndFlags);
636 trialInliningState_ = TrialInliningState::Inlined;
639 void callNativeSetter(ObjOperandId receiver, JSFunction* setter,
640 ValOperandId rhs, bool sameRealm) {
641 MOZ_ASSERT(setter->isNativeWithoutJitEntry());
642 uint32_t nargsAndFlags = setter->flagsAndArgCountRaw();
643 callNativeSetter_(receiver, setter, rhs, sameRealm, nargsAndFlags);
646 #ifdef JS_PUNBOX64
647 void callScriptedProxyGetResult(ValOperandId target, ObjOperandId receiver,
648 ObjOperandId handler, ObjOperandId trapId,
649 JSFunction* trap, HandleId property) {
650 MOZ_ASSERT(trap->hasJitEntry());
651 uint32_t nargsAndFlags = trap->flagsAndArgCountRaw();
652 callScriptedProxyGetResult_(target, receiver, handler, trapId, property,
653 nargsAndFlags);
656 void callScriptedProxyGetByValueResult(
657 ValOperandId target, ObjOperandId receiver, ObjOperandId handler,
658 ValOperandId property, ObjOperandId trapId, JSFunction* trap) {
659 MOZ_ASSERT(trap->hasJitEntry());
660 uint32_t nargsAndFlags = trap->flagsAndArgCountRaw();
661 callScriptedProxyGetByValueResult_(target, receiver, handler, property,
662 trapId, nargsAndFlags);
664 #endif
666 void metaScriptedThisShape(Shape* thisShape) {
667 metaScriptedThisShape_(thisShape);
670 void guardMultipleShapes(ObjOperandId obj, ListObject* shapes) {
671 MOZ_ASSERT(shapes->length() > 0);
672 guardMultipleShapes_(obj, shapes);
675 friend class CacheIRCloner;
677 CACHE_IR_WRITER_GENERATED
680 } // namespace jit
681 } // namespace js
683 #endif /* jit_CacheIRWriter_h */