1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
2 * vim: set ts=8 sts=4 et sw=4 tw=99:
3 * This Source Code Form is subject to the terms of the Mozilla Public
4 * License, v. 2.0. If a copy of the MPL was not distributed with this
5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
8 * JS bytecode generation.
11 #include "frontend/BytecodeEmitter.h"
13 #include "mozilla/ArrayUtils.h"
14 #include "mozilla/DebugOnly.h"
15 #include "mozilla/FloatingPoint.h"
16 #include "mozilla/PodOperations.h"
17 #include "mozilla/UniquePtr.h"
31 #include "asmjs/AsmJSLink.h"
32 #include "frontend/Parser.h"
33 #include "frontend/TokenStream.h"
34 #include "vm/Debugger.h"
35 #include "vm/GeneratorObject.h"
38 #include "jsatominlines.h"
39 #include "jsobjinlines.h"
40 #include "jsscriptinlines.h"
42 #include "frontend/ParseMaps-inl.h"
43 #include "frontend/ParseNode-inl.h"
44 #include "vm/NativeObject-inl.h"
45 #include "vm/ScopeObject-inl.h"
48 using namespace js::gc
;
49 using namespace js::frontend
;
51 using mozilla::DebugOnly
;
52 using mozilla::NumberIsInt32
;
53 using mozilla::PodCopy
;
54 using mozilla::UniquePtr
;
57 SetSrcNoteOffset(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, unsigned index
, unsigned which
, ptrdiff_t offset
);
60 UpdateSourceCoordNotes(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, uint32_t offset
);
62 struct frontend::StmtInfoBCE
: public StmtInfoBase
64 StmtInfoBCE
* down
; /* info for enclosing statement */
65 StmtInfoBCE
* downScope
; /* next enclosing lexical scope */
67 ptrdiff_t update
; /* loop update offset (top if none) */
68 ptrdiff_t breaks
; /* offset of last break in loop */
69 ptrdiff_t continues
; /* offset of last continue in loop */
70 uint32_t blockScopeIndex
; /* index of scope in BlockScopeArray */
72 explicit StmtInfoBCE(ExclusiveContext
* cx
) : StmtInfoBase(cx
) {}
75 * To reuse space, alias two of the ptrdiff_t fields for use during
76 * try/catch/finally code generation and backpatching.
78 * Only a loop, switch, or label statement info record can have breaks and
79 * continues, and only a for loop has an update backpatch chain, so it's
80 * safe to overlay these for the "trying" StmtTypes.
84 MOZ_ASSERT(type
== STMT_FINALLY
);
88 ptrdiff_t& guardJump() {
89 MOZ_ASSERT(type
== STMT_TRY
|| type
== STMT_FINALLY
);
97 struct LoopStmtInfo
: public StmtInfoBCE
99 int32_t stackDepth
; // Stack depth when this loop was pushed.
100 uint32_t loopDepth
; // Loop depth.
102 // Can we OSR into Ion from here? True unless there is non-loop state on the stack.
105 explicit LoopStmtInfo(ExclusiveContext
* cx
) : StmtInfoBCE(cx
) {}
107 static LoopStmtInfo
* fromStmtInfo(StmtInfoBCE
* stmt
) {
108 MOZ_ASSERT(stmt
->isLoop());
109 return static_cast<LoopStmtInfo
*>(stmt
);
113 } // anonymous namespace
115 BytecodeEmitter::BytecodeEmitter(BytecodeEmitter
* parent
,
116 Parser
<FullParseHandler
>* parser
, SharedContext
* sc
,
117 HandleScript script
, Handle
<LazyScript
*> lazyScript
,
118 bool insideEval
, HandleScript evalCaller
,
119 bool hasGlobalScope
, uint32_t lineNum
, EmitterMode emitterMode
)
122 script(sc
->context
, script
),
123 lazyScript(sc
->context
, lazyScript
),
124 prolog(sc
->context
, lineNum
),
125 main(sc
->context
, lineNum
),
128 evalCaller(evalCaller
),
130 topScopeStmt(nullptr),
131 staticScope(sc
->context
),
132 atomIndices(sc
->context
),
134 localsToFrameSlots_(sc
->context
),
135 stackDepth(0), maxStackDepth(0),
138 constList(sc
->context
),
139 tryNoteList(sc
->context
),
140 blockScopeList(sc
->context
),
141 yieldOffsetList(sc
->context
),
143 hasSingletons(false),
144 emittingForInit(false),
145 emittingRunOnceLambda(false),
146 insideEval(insideEval
),
147 hasGlobalScope(hasGlobalScope
),
148 emitterMode(emitterMode
)
150 MOZ_ASSERT_IF(evalCaller
, insideEval
);
151 MOZ_ASSERT_IF(emitterMode
== LazyFunction
, lazyScript
);
155 BytecodeEmitter::init()
157 return atomIndices
.ensureMap(sc
->context
);
161 BytecodeEmitter::updateLocalsToFrameSlots()
163 // Assign stack slots to unaliased locals (aliased locals are stored in the
164 // call object and don't need their own stack slots). We do this by filling
165 // a Vector that can be used to map a local to its stack slot.
167 if (localsToFrameSlots_
.length() == script
->bindings
.numLocals()) {
168 // CompileScript calls updateNumBlockScoped to update the block scope
169 // depth. Do nothing if the depth didn't change.
173 localsToFrameSlots_
.clear();
175 if (!localsToFrameSlots_
.reserve(script
->bindings
.numLocals()))
179 for (BindingIter
bi(script
); !bi
.done(); bi
++) {
180 if (bi
->kind() == Binding::ARGUMENT
)
184 localsToFrameSlots_
.infallibleAppend(UINT32_MAX
);
186 localsToFrameSlots_
.infallibleAppend(slot
++);
189 for (size_t i
= 0; i
< script
->bindings
.numBlockScoped(); i
++)
190 localsToFrameSlots_
.infallibleAppend(slot
++);
196 EmitCheck(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ptrdiff_t delta
)
198 ptrdiff_t offset
= bce
->code().length();
200 // Start it off moderately large to avoid repeated resizings early on.
201 // ~98% of cases fit within 1024 bytes.
202 if (bce
->code().capacity() == 0 && !bce
->code().reserve(1024))
205 jsbytecode dummy
= 0;
206 if (!bce
->code().appendN(dummy
, delta
)) {
207 js_ReportOutOfMemory(cx
);
214 UpdateDepth(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ptrdiff_t target
)
216 jsbytecode
* pc
= bce
->code(target
);
217 JSOp op
= (JSOp
) *pc
;
218 const JSCodeSpec
* cs
= &js_CodeSpec
[op
];
220 if (cs
->format
& JOF_TMPSLOT_MASK
) {
222 * An opcode may temporarily consume stack space during execution.
223 * Account for this in maxStackDepth separately from uses/defs here.
225 uint32_t depth
= (uint32_t) bce
->stackDepth
+
226 ((cs
->format
& JOF_TMPSLOT_MASK
) >> JOF_TMPSLOT_SHIFT
);
227 if (depth
> bce
->maxStackDepth
)
228 bce
->maxStackDepth
= depth
;
231 int nuses
= StackUses(nullptr, pc
);
232 int ndefs
= StackDefs(nullptr, pc
);
234 bce
->stackDepth
-= nuses
;
235 MOZ_ASSERT(bce
->stackDepth
>= 0);
236 bce
->stackDepth
+= ndefs
;
237 if ((uint32_t)bce
->stackDepth
> bce
->maxStackDepth
)
238 bce
->maxStackDepth
= bce
->stackDepth
;
243 CheckStrictOrSloppy(BytecodeEmitter
* bce
, JSOp op
)
245 if (IsCheckStrictOp(op
) && !bce
->sc
->strict
)
247 if (IsCheckSloppyOp(op
) && bce
->sc
->strict
)
254 frontend::Emit1(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp op
)
256 MOZ_ASSERT(CheckStrictOrSloppy(bce
, op
));
257 ptrdiff_t offset
= EmitCheck(cx
, bce
, 1);
261 jsbytecode
* code
= bce
->code(offset
);
262 code
[0] = jsbytecode(op
);
263 UpdateDepth(cx
, bce
, offset
);
268 frontend::Emit2(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp op
, jsbytecode op1
)
270 MOZ_ASSERT(CheckStrictOrSloppy(bce
, op
));
271 ptrdiff_t offset
= EmitCheck(cx
, bce
, 2);
275 jsbytecode
* code
= bce
->code(offset
);
276 code
[0] = jsbytecode(op
);
278 UpdateDepth(cx
, bce
, offset
);
283 frontend::Emit3(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp op
, jsbytecode op1
,
286 MOZ_ASSERT(CheckStrictOrSloppy(bce
, op
));
288 /* These should filter through EmitVarOp. */
289 MOZ_ASSERT(!IsArgOp(op
));
290 MOZ_ASSERT(!IsLocalOp(op
));
292 ptrdiff_t offset
= EmitCheck(cx
, bce
, 3);
296 jsbytecode
* code
= bce
->code(offset
);
297 code
[0] = jsbytecode(op
);
300 UpdateDepth(cx
, bce
, offset
);
305 frontend::EmitN(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp op
, size_t extra
)
307 MOZ_ASSERT(CheckStrictOrSloppy(bce
, op
));
308 ptrdiff_t length
= 1 + (ptrdiff_t)extra
;
309 ptrdiff_t offset
= EmitCheck(cx
, bce
, length
);
313 jsbytecode
* code
= bce
->code(offset
);
314 code
[0] = jsbytecode(op
);
315 /* The remaining |extra| bytes are set by the caller */
318 * Don't UpdateDepth if op's use-count comes from the immediate
319 * operand yet to be stored in the extra bytes after op.
321 if (js_CodeSpec
[op
].nuses
>= 0)
322 UpdateDepth(cx
, bce
, offset
);
328 EmitJump(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp op
, ptrdiff_t off
)
330 ptrdiff_t offset
= EmitCheck(cx
, bce
, 5);
334 jsbytecode
* code
= bce
->code(offset
);
335 code
[0] = jsbytecode(op
);
336 SET_JUMP_OFFSET(code
, off
);
337 UpdateDepth(cx
, bce
, offset
);
342 EmitCall(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp op
, uint16_t argc
, ParseNode
* pn
=nullptr)
344 if (pn
&& !UpdateSourceCoordNotes(cx
, bce
, pn
->pn_pos
.begin
))
346 return Emit3(cx
, bce
, op
, ARGC_HI(argc
), ARGC_LO(argc
));
349 // Dup the var in operand stack slot "slot". The first item on the operand
350 // stack is one slot past the last fixed slot. The last (most recent) item is
351 // slot bce->stackDepth - 1.
353 // The instruction that is written (JSOP_DUPAT) switches the depth around so
354 // that it is addressed from the sp instead of from the fp. This is useful when
355 // you don't know the size of the fixed stack segment (nfixed), as is the case
356 // when compiling scripts (because each statement is parsed and compiled
357 // separately, but they all together form one script with one fixed stack
360 EmitDupAt(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, unsigned slot
)
362 MOZ_ASSERT(slot
< unsigned(bce
->stackDepth
));
363 // The slot's position on the operand stack, measured from the top.
364 unsigned slotFromTop
= bce
->stackDepth
- 1 - slot
;
365 if (slotFromTop
>= JS_BIT(24)) {
366 bce
->reportError(nullptr, JSMSG_TOO_MANY_LOCALS
);
369 ptrdiff_t off
= EmitN(cx
, bce
, JSOP_DUPAT
, 3);
372 jsbytecode
* pc
= bce
->code(off
);
373 SET_UINT24(pc
, slotFromTop
);
377 /* XXX too many "... statement" L10N gaffes below -- fix via js.msg! */
378 const char js_with_statement_str
[] = "with statement";
379 const char js_finally_block_str
[] = "finally block";
380 const char js_script_str
[] = "script";
382 static const char * const statementName
[] = {
383 "label statement", /* LABEL */
384 "if statement", /* IF */
385 "else statement", /* ELSE */
386 "destructuring body", /* BODY */
387 "switch statement", /* SWITCH */
389 js_with_statement_str
, /* WITH */
390 "catch block", /* CATCH */
391 "try block", /* TRY */
392 js_finally_block_str
, /* FINALLY */
393 js_finally_block_str
, /* SUBROUTINE */
394 "do loop", /* DO_LOOP */
395 "for loop", /* FOR_LOOP */
396 "for/in loop", /* FOR_IN_LOOP */
397 "for/of loop", /* FOR_OF_LOOP */
398 "while loop", /* WHILE_LOOP */
399 "spread", /* SPREAD */
402 static_assert(MOZ_ARRAY_LENGTH(statementName
) == STMT_LIMIT
,
403 "statementName array and StmtType enum must be consistent");
406 StatementName(StmtInfoBCE
* topStmt
)
409 return js_script_str
;
410 return statementName
[topStmt
->type
];
414 ReportStatementTooLarge(TokenStream
& ts
, StmtInfoBCE
* topStmt
)
416 ts
.reportError(JSMSG_NEED_DIET
, StatementName(topStmt
));
420 * Emit a backpatch op with offset pointing to the previous jump of this type,
421 * so that we can walk back up the chain fixing up the op and jump offset.
424 EmitBackPatchOp(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ptrdiff_t* lastp
)
426 ptrdiff_t offset
, delta
;
428 offset
= bce
->offset();
429 delta
= offset
- *lastp
;
431 MOZ_ASSERT(delta
> 0);
432 return EmitJump(cx
, bce
, JSOP_BACKPATCH
, delta
);
435 static inline unsigned
436 LengthOfSetLine(unsigned line
)
438 return 1 /* SN_SETLINE */ + (line
> SN_4BYTE_OFFSET_MASK
? 4 : 1);
441 /* Updates line number notes, not column notes. */
443 UpdateLineNumberNotes(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, uint32_t offset
)
445 TokenStream
* ts
= &bce
->parser
->tokenStream
;
447 if (!ts
->srcCoords
.isOnThisLine(offset
, bce
->currentLine(), &onThisLine
))
448 return ts
->reportError(JSMSG_OUT_OF_MEMORY
);
450 unsigned line
= ts
->srcCoords
.lineNum(offset
);
451 unsigned delta
= line
- bce
->currentLine();
454 * Encode any change in the current source line number by using
455 * either several SRC_NEWLINE notes or just one SRC_SETLINE note,
456 * whichever consumes less space.
458 * NB: We handle backward line number deltas (possible with for
459 * loops where the update part is emitted after the body, but its
460 * line number is <= any line number in the body) here by letting
461 * unsigned delta_ wrap to a very large number, which triggers a
464 bce
->current
->currentLine
= line
;
465 bce
->current
->lastColumn
= 0;
466 if (delta
>= LengthOfSetLine(line
)) {
467 if (NewSrcNote2(cx
, bce
, SRC_SETLINE
, (ptrdiff_t)line
) < 0)
471 if (NewSrcNote(cx
, bce
, SRC_NEWLINE
) < 0)
473 } while (--delta
!= 0);
479 /* Updates the line number and column number information in the source notes. */
481 UpdateSourceCoordNotes(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, uint32_t offset
)
483 if (!UpdateLineNumberNotes(cx
, bce
, offset
))
486 uint32_t columnIndex
= bce
->parser
->tokenStream
.srcCoords
.columnIndex(offset
);
487 ptrdiff_t colspan
= ptrdiff_t(columnIndex
) - ptrdiff_t(bce
->current
->lastColumn
);
489 // If the column span is so large that we can't store it, then just
490 // discard this information. This can happen with minimized or otherwise
491 // machine-generated code. Even gigantic column numbers are still
492 // valuable if you have a source map to relate them to something real;
493 // but it's better to fail soft here.
494 if (!SN_REPRESENTABLE_COLSPAN(colspan
))
496 if (NewSrcNote2(cx
, bce
, SRC_COLSPAN
, SN_COLSPAN_TO_OFFSET(colspan
)) < 0)
498 bce
->current
->lastColumn
= columnIndex
;
504 EmitLoopHead(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* nextpn
)
508 * Try to give the JSOP_LOOPHEAD the same line number as the next
509 * instruction. nextpn is often a block, in which case the next
510 * instruction typically comes from the first statement inside.
512 MOZ_ASSERT_IF(nextpn
->isKind(PNK_STATEMENTLIST
), nextpn
->isArity(PN_LIST
));
513 if (nextpn
->isKind(PNK_STATEMENTLIST
) && nextpn
->pn_head
)
514 nextpn
= nextpn
->pn_head
;
515 if (!UpdateSourceCoordNotes(cx
, bce
, nextpn
->pn_pos
.begin
))
519 return Emit1(cx
, bce
, JSOP_LOOPHEAD
);
523 EmitLoopEntry(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* nextpn
)
526 /* Update the line number, as for LOOPHEAD. */
527 MOZ_ASSERT_IF(nextpn
->isKind(PNK_STATEMENTLIST
), nextpn
->isArity(PN_LIST
));
528 if (nextpn
->isKind(PNK_STATEMENTLIST
) && nextpn
->pn_head
)
529 nextpn
= nextpn
->pn_head
;
530 if (!UpdateSourceCoordNotes(cx
, bce
, nextpn
->pn_pos
.begin
))
534 LoopStmtInfo
* loop
= LoopStmtInfo::fromStmtInfo(bce
->topStmt
);
535 MOZ_ASSERT(loop
->loopDepth
> 0);
537 uint8_t loopDepthAndFlags
= PackLoopEntryDepthHintAndFlags(loop
->loopDepth
, loop
->canIonOsr
);
538 return Emit2(cx
, bce
, JSOP_LOOPENTRY
, loopDepthAndFlags
) >= 0;
542 * If op is JOF_TYPESET (see the type barriers comment in jsinfer.h), reserve
543 * a type set to store its result.
546 CheckTypeSet(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp op
)
548 if (js_CodeSpec
[op
].format
& JOF_TYPESET
) {
549 if (bce
->typesetCount
< UINT16_MAX
)
555 * Macro to emit a bytecode followed by a uint16_t immediate operand stored in
558 * NB: We use cx and bce from our caller's lexical environment, and return
561 #define EMIT_UINT16_IMM_OP(op, i) \
563 if (Emit3(cx, bce, op, UINT16_HI(i), UINT16_LO(i)) < 0) \
565 CheckTypeSet(cx, bce, op); \
569 FlushPops(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, int* npops
)
571 MOZ_ASSERT(*npops
!= 0);
572 EMIT_UINT16_IMM_OP(JSOP_POPN
, *npops
);
578 PopIterator(ExclusiveContext
* cx
, BytecodeEmitter
* bce
)
580 if (Emit1(cx
, bce
, JSOP_ENDITER
) < 0)
587 class NonLocalExitScope
{
588 ExclusiveContext
* cx
;
589 BytecodeEmitter
* bce
;
590 const uint32_t savedScopeIndex
;
591 const int savedDepth
;
592 uint32_t openScopeIndex
;
594 NonLocalExitScope(const NonLocalExitScope
&) = delete;
597 explicit NonLocalExitScope(ExclusiveContext
* cx_
, BytecodeEmitter
* bce_
)
600 savedScopeIndex(bce
->blockScopeList
.length()),
601 savedDepth(bce
->stackDepth
),
602 openScopeIndex(UINT32_MAX
) {
603 if (bce
->staticScope
) {
604 StmtInfoBCE
* stmt
= bce
->topStmt
;
607 if (stmt
->isNestedScope
) {
608 openScopeIndex
= stmt
->blockScopeIndex
;
616 ~NonLocalExitScope() {
617 for (uint32_t n
= savedScopeIndex
; n
< bce
->blockScopeList
.length(); n
++)
618 bce
->blockScopeList
.recordEnd(n
, bce
->offset());
619 bce
->stackDepth
= savedDepth
;
622 bool popScopeForNonLocalExit(uint32_t blockScopeIndex
) {
623 uint32_t scopeObjectIndex
= bce
->blockScopeList
.findEnclosingScope(blockScopeIndex
);
624 uint32_t parent
= openScopeIndex
;
626 if (!bce
->blockScopeList
.append(scopeObjectIndex
, bce
->offset(), parent
))
628 openScopeIndex
= bce
->blockScopeList
.length() - 1;
632 bool prepareForNonLocalJump(StmtInfoBCE
* toStmt
);
636 * Emit additional bytecode(s) for non-local jumps.
639 NonLocalExitScope::prepareForNonLocalJump(StmtInfoBCE
* toStmt
)
643 #define FLUSH_POPS() if (npops && !FlushPops(cx, bce, &npops)) return false
645 for (StmtInfoBCE
* stmt
= bce
->topStmt
; stmt
!= toStmt
; stmt
= stmt
->down
) {
646 switch (stmt
->type
) {
649 if (EmitBackPatchOp(cx
, bce
, &stmt
->gosubs()) < 0)
654 if (Emit1(cx
, bce
, JSOP_LEAVEWITH
) < 0)
656 MOZ_ASSERT(stmt
->isNestedScope
);
657 if (!popScopeForNonLocalExit(stmt
->blockScopeIndex
))
661 case STMT_FOR_OF_LOOP
:
665 case STMT_FOR_IN_LOOP
:
666 /* The iterator and the current value are on the stack. */
669 if (!PopIterator(cx
, bce
))
674 MOZ_ASSERT_UNREACHABLE("can't break/continue/return from inside a spread");
677 case STMT_SUBROUTINE
:
679 * There's a [exception or hole, retsub pc-index] pair on the
680 * stack that we need to pop.
688 if (stmt
->isBlockScope
) {
689 MOZ_ASSERT(stmt
->isNestedScope
);
690 StaticBlockObject
& blockObj
= stmt
->staticBlock();
691 if (Emit1(cx
, bce
, JSOP_DEBUGLEAVEBLOCK
) < 0)
693 if (!popScopeForNonLocalExit(stmt
->blockScopeIndex
))
695 if (blockObj
.needsClone()) {
696 if (Emit1(cx
, bce
, JSOP_POPBLOCKSCOPE
) < 0)
708 } // anonymous namespace
711 EmitGoto(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, StmtInfoBCE
* toStmt
, ptrdiff_t* lastp
,
712 SrcNoteType noteType
= SRC_NULL
)
714 NonLocalExitScope
nle(cx
, bce
);
716 if (!nle
.prepareForNonLocalJump(toStmt
))
719 if (noteType
!= SRC_NULL
) {
720 if (NewSrcNote(cx
, bce
, noteType
) < 0)
724 return EmitBackPatchOp(cx
, bce
, lastp
);
728 BackPatch(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ptrdiff_t last
, jsbytecode
* target
, jsbytecode op
)
730 jsbytecode
* pc
, *stop
;
731 ptrdiff_t delta
, span
;
733 pc
= bce
->code(last
);
734 stop
= bce
->code(-1);
736 delta
= GET_JUMP_OFFSET(pc
);
738 SET_JUMP_OFFSET(pc
, span
);
745 #define SET_STATEMENT_TOP(stmt, top) \
746 ((stmt)->update = (top), (stmt)->breaks = (stmt)->continues = (-1))
749 PushStatementInner(BytecodeEmitter
* bce
, StmtInfoBCE
* stmt
, StmtType type
, ptrdiff_t top
)
751 SET_STATEMENT_TOP(stmt
, top
);
752 PushStatement(bce
, stmt
, type
);
756 PushStatementBCE(BytecodeEmitter
* bce
, StmtInfoBCE
* stmt
, StmtType type
, ptrdiff_t top
)
758 PushStatementInner(bce
, stmt
, type
, top
);
759 MOZ_ASSERT(!stmt
->isLoop());
763 PushLoopStatement(BytecodeEmitter
* bce
, LoopStmtInfo
* stmt
, StmtType type
, ptrdiff_t top
)
765 PushStatementInner(bce
, stmt
, type
, top
);
766 MOZ_ASSERT(stmt
->isLoop());
768 LoopStmtInfo
* downLoop
= nullptr;
769 for (StmtInfoBCE
* outer
= stmt
->down
; outer
; outer
= outer
->down
) {
770 if (outer
->isLoop()) {
771 downLoop
= LoopStmtInfo::fromStmtInfo(outer
);
776 stmt
->stackDepth
= bce
->stackDepth
;
777 stmt
->loopDepth
= downLoop
? downLoop
->loopDepth
+ 1 : 1;
780 if (type
== STMT_SPREAD
)
782 else if (type
== STMT_FOR_IN_LOOP
|| type
== STMT_FOR_OF_LOOP
)
787 MOZ_ASSERT(loopSlots
<= stmt
->stackDepth
);
790 stmt
->canIonOsr
= (downLoop
->canIonOsr
&&
791 stmt
->stackDepth
== downLoop
->stackDepth
+ loopSlots
);
793 stmt
->canIonOsr
= stmt
->stackDepth
== loopSlots
;
797 * Return the enclosing lexical scope, which is the innermost enclosing static
798 * block object or compiler created function.
801 EnclosingStaticScope(BytecodeEmitter
* bce
)
803 if (bce
->staticScope
)
804 return bce
->staticScope
;
806 if (!bce
->sc
->isFunctionBox()) {
807 MOZ_ASSERT(!bce
->parent
);
811 return bce
->sc
->asFunctionBox()->function();
816 AllLocalsAliased(StaticBlockObject
& obj
)
818 for (unsigned i
= 0; i
< obj
.numVariables(); i
++)
819 if (!obj
.isAliased(i
))
826 ComputeAliasedSlots(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, Handle
<StaticBlockObject
*> blockObj
)
828 uint32_t numAliased
= bce
->script
->bindings
.numAliasedBodyLevelLocals();
830 for (unsigned i
= 0; i
< blockObj
->numVariables(); i
++) {
831 Definition
* dn
= blockObj
->definitionParseNode(i
);
833 MOZ_ASSERT(dn
->isDefn());
835 // blockIndexToLocalIndex returns the frame slot following the unaliased
836 // locals. We add numAliased so that the cookie's slot value comes after
837 // all (aliased and unaliased) body level locals.
838 if (!dn
->pn_cookie
.set(bce
->parser
->tokenStream
, dn
->pn_cookie
.level(),
839 numAliased
+ blockObj
->blockIndexToLocalIndex(dn
->frameSlot())))
845 for (ParseNode
* pnu
= dn
->dn_uses
; pnu
; pnu
= pnu
->pn_link
) {
846 MOZ_ASSERT(pnu
->pn_lexdef
== dn
);
847 MOZ_ASSERT(!(pnu
->pn_dflags
& PND_BOUND
));
848 MOZ_ASSERT(pnu
->pn_cookie
.isFree());
852 blockObj
->setAliased(i
, bce
->isAliasedName(dn
));
855 MOZ_ASSERT_IF(bce
->sc
->allLocalsAliased(), AllLocalsAliased(*blockObj
));
861 EmitInternedObjectOp(ExclusiveContext
* cx
, uint32_t index
, JSOp op
, BytecodeEmitter
* bce
);
863 // In a function, block-scoped locals go after the vars, and form part of the
864 // fixed part of a stack frame. Outside a function, there are no fixed vars,
865 // but block-scoped locals still form part of the fixed part of a stack frame
866 // and are thus addressable via GETLOCAL and friends.
868 ComputeLocalOffset(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, Handle
<StaticBlockObject
*> blockObj
)
870 unsigned nbodyfixed
= bce
->sc
->isFunctionBox()
871 ? bce
->script
->bindings
.numUnaliasedBodyLevelLocals()
873 unsigned localOffset
= nbodyfixed
;
875 if (bce
->staticScope
) {
876 Rooted
<NestedScopeObject
*> outer(cx
, bce
->staticScope
);
877 for (; outer
; outer
= outer
->enclosingNestedScope()) {
878 if (outer
->is
<StaticBlockObject
>()) {
879 StaticBlockObject
& outerBlock
= outer
->as
<StaticBlockObject
>();
880 localOffset
= outerBlock
.localOffset() + outerBlock
.numVariables();
886 MOZ_ASSERT(localOffset
+ blockObj
->numVariables()
887 <= nbodyfixed
+ bce
->script
->bindings
.numBlockScoped());
889 blockObj
->setLocalOffset(localOffset
);
894 // A nested scope is a region of a compilation unit (function, script, or eval
895 // code) with an additional node on the scope chain. This node may either be a
896 // "with" object or a "block" object. "With" objects represent "with" scopes.
897 // Block objects represent lexical scopes, and contain named block-scoped
898 // bindings, for example "let" bindings or the exception in a catch block.
899 // Those variables may be local and thus accessible directly from the stack, or
900 // "aliased" (accessed by name from nested functions, or dynamically via nested
901 // "eval" or "with") and only accessible through the scope chain.
903 // All nested scopes are present on the "static scope chain". A nested scope
904 // that is a "with" scope will be present on the scope chain at run-time as
905 // well. A block scope may or may not have a corresponding link on the run-time
906 // scope chain; if no variable declared in the block scope is "aliased", then no
907 // scope chain node is allocated.
909 // To help debuggers, the bytecode emitter arranges to record the PC ranges
910 // comprehended by a nested scope, and ultimately attach them to the JSScript.
911 // An element in the "block scope array" specifies the PC range, and links to a
912 // NestedScopeObject in the object list of the script. That scope object is
913 // linked to the previous link in the static scope chain, if any. The static
914 // scope chain at any pre-retire PC can be retrieved using
915 // JSScript::getStaticScope(jsbytecode* pc).
917 // Block scopes store their locals in the fixed part of a stack frame, after the
918 // "fixed var" bindings. A fixed var binding is a "var" or legacy "const"
919 // binding that occurs in a function (as opposed to a script or in eval code).
920 // Only functions have fixed var bindings.
922 // To assist the debugger, we emit a DEBUGLEAVEBLOCK opcode before leaving a
923 // block scope, even if the block has no aliased locals. This allows
924 // DebugScopes to invalidate any association between a debugger scope object,
925 // which can proxy access to unaliased stack locals, and the actual live frame.
926 // In normal, non-debug mode, this opcode does not cause any baseline code to be
929 // Enter a nested scope with EnterNestedScope. It will emit
930 // PUSHBLOCKSCOPE/ENTERWITH if needed, and arrange to record the PC bounds of
931 // the scope. Leave a nested scope with LeaveNestedScope, which, for blocks,
932 // will emit DEBUGLEAVEBLOCK and may emit POPBLOCKSCOPE. (For "with" scopes it
933 // emits LEAVEWITH, of course.) Pass EnterNestedScope a fresh StmtInfoBCE
934 // object, and pass that same object to the corresponding LeaveNestedScope. If
935 // the statement is a block scope, pass STMT_BLOCK as stmtType; otherwise for
936 // with scopes pass STMT_WITH.
939 EnterNestedScope(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, StmtInfoBCE
* stmt
, ObjectBox
* objbox
,
942 Rooted
<NestedScopeObject
*> scopeObj(cx
, &objbox
->object
->as
<NestedScopeObject
>());
943 uint32_t scopeObjectIndex
= bce
->objectList
.add(objbox
);
947 Rooted
<StaticBlockObject
*> blockObj(cx
, &scopeObj
->as
<StaticBlockObject
>());
949 ComputeLocalOffset(cx
, bce
, blockObj
);
951 if (!ComputeAliasedSlots(cx
, bce
, blockObj
))
954 if (blockObj
->needsClone()) {
955 if (!EmitInternedObjectOp(cx
, scopeObjectIndex
, JSOP_PUSHBLOCKSCOPE
, bce
))
961 MOZ_ASSERT(scopeObj
->is
<StaticWithObject
>());
962 if (!EmitInternedObjectOp(cx
, scopeObjectIndex
, JSOP_ENTERWITH
, bce
))
966 MOZ_CRASH("Unexpected scope statement");
969 uint32_t parent
= BlockScopeNote::NoBlockScopeIndex
;
970 if (StmtInfoBCE
* stmt
= bce
->topScopeStmt
) {
971 for (; stmt
->staticScope
!= bce
->staticScope
; stmt
= stmt
->down
) {}
972 parent
= stmt
->blockScopeIndex
;
975 stmt
->blockScopeIndex
= bce
->blockScopeList
.length();
976 if (!bce
->blockScopeList
.append(scopeObjectIndex
, bce
->offset(), parent
))
979 PushStatementBCE(bce
, stmt
, stmtType
, bce
->offset());
980 scopeObj
->initEnclosingNestedScope(EnclosingStaticScope(bce
));
981 FinishPushNestedScope(bce
, stmt
, *scopeObj
);
982 MOZ_ASSERT(stmt
->isNestedScope
);
983 stmt
->isBlockScope
= (stmtType
== STMT_BLOCK
);
988 // Patches |breaks| and |continues| unless the top statement info record
989 // represents a try-catch-finally suite. May fail if a jump offset overflows.
991 PopStatementBCE(ExclusiveContext
* cx
, BytecodeEmitter
* bce
)
993 StmtInfoBCE
* stmt
= bce
->topStmt
;
994 if (!stmt
->isTrying() &&
995 (!BackPatch(cx
, bce
, stmt
->breaks
, bce
->code().end(), JSOP_GOTO
) ||
996 !BackPatch(cx
, bce
, stmt
->continues
, bce
->code(stmt
->update
), JSOP_GOTO
)))
1001 FinishPopStatement(bce
);
1006 LeaveNestedScope(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, StmtInfoBCE
* stmt
)
1008 MOZ_ASSERT(stmt
== bce
->topStmt
);
1009 MOZ_ASSERT(stmt
->isNestedScope
);
1010 MOZ_ASSERT(stmt
->isBlockScope
== !(stmt
->type
== STMT_WITH
));
1011 uint32_t blockScopeIndex
= stmt
->blockScopeIndex
;
1014 MOZ_ASSERT(bce
->blockScopeList
.list
[blockScopeIndex
].length
== 0);
1015 uint32_t blockObjIndex
= bce
->blockScopeList
.list
[blockScopeIndex
].index
;
1016 ObjectBox
* blockObjBox
= bce
->objectList
.find(blockObjIndex
);
1017 NestedScopeObject
* staticScope
= &blockObjBox
->object
->as
<NestedScopeObject
>();
1018 MOZ_ASSERT(stmt
->staticScope
== staticScope
);
1019 MOZ_ASSERT(staticScope
== bce
->staticScope
);
1020 MOZ_ASSERT_IF(!stmt
->isBlockScope
, staticScope
->is
<StaticWithObject
>());
1023 if (!PopStatementBCE(cx
, bce
))
1026 if (Emit1(cx
, bce
, stmt
->isBlockScope
? JSOP_DEBUGLEAVEBLOCK
: JSOP_LEAVEWITH
) < 0)
1029 bce
->blockScopeList
.recordEnd(blockScopeIndex
, bce
->offset());
1031 if (stmt
->isBlockScope
&& stmt
->staticScope
->as
<StaticBlockObject
>().needsClone()) {
1032 if (Emit1(cx
, bce
, JSOP_POPBLOCKSCOPE
) < 0)
1040 EmitIndex32(ExclusiveContext
* cx
, JSOp op
, uint32_t index
, BytecodeEmitter
* bce
)
1042 MOZ_ASSERT(CheckStrictOrSloppy(bce
, op
));
1043 const size_t len
= 1 + UINT32_INDEX_LEN
;
1044 MOZ_ASSERT(len
== size_t(js_CodeSpec
[op
].length
));
1045 ptrdiff_t offset
= EmitCheck(cx
, bce
, len
);
1049 jsbytecode
* code
= bce
->code(offset
);
1050 code
[0] = jsbytecode(op
);
1051 SET_UINT32_INDEX(code
, index
);
1052 UpdateDepth(cx
, bce
, offset
);
1053 CheckTypeSet(cx
, bce
, op
);
1058 EmitIndexOp(ExclusiveContext
* cx
, JSOp op
, uint32_t index
, BytecodeEmitter
* bce
)
1060 MOZ_ASSERT(CheckStrictOrSloppy(bce
, op
));
1061 const size_t len
= js_CodeSpec
[op
].length
;
1062 MOZ_ASSERT(len
>= 1 + UINT32_INDEX_LEN
);
1063 ptrdiff_t offset
= EmitCheck(cx
, bce
, len
);
1067 jsbytecode
* code
= bce
->code(offset
);
1068 code
[0] = jsbytecode(op
);
1069 SET_UINT32_INDEX(code
, index
);
1070 UpdateDepth(cx
, bce
, offset
);
1071 CheckTypeSet(cx
, bce
, op
);
1076 EmitAtomOp(ExclusiveContext
* cx
, JSAtom
* atom
, JSOp op
, BytecodeEmitter
* bce
)
1078 MOZ_ASSERT(JOF_OPTYPE(op
) == JOF_ATOM
);
1080 // .generator and .genrval lookups should be emitted as JSOP_GETALIASEDVAR
1081 // instead of JSOP_GETNAME etc, to bypass |with| objects on the scope chain.
1082 MOZ_ASSERT_IF(op
== JSOP_GETNAME
|| op
== JSOP_GETGNAME
, !bce
->sc
->isDotVariable(atom
));
1084 if (op
== JSOP_GETPROP
&& atom
== cx
->names().length
) {
1085 /* Specialize length accesses for the interpreter. */
1090 if (!bce
->makeAtomIndex(atom
, &index
))
1093 return EmitIndexOp(cx
, op
, index
, bce
);
1097 EmitAtomOp(ExclusiveContext
* cx
, ParseNode
* pn
, JSOp op
, BytecodeEmitter
* bce
)
1099 MOZ_ASSERT(pn
->pn_atom
!= nullptr);
1100 return EmitAtomOp(cx
, pn
->pn_atom
, op
, bce
);
1104 EmitInternedObjectOp(ExclusiveContext
* cx
, uint32_t index
, JSOp op
, BytecodeEmitter
* bce
)
1106 MOZ_ASSERT(JOF_OPTYPE(op
) == JOF_OBJECT
);
1107 MOZ_ASSERT(index
< bce
->objectList
.length
);
1108 return EmitIndex32(cx
, op
, index
, bce
);
1112 EmitObjectOp(ExclusiveContext
* cx
, ObjectBox
* objbox
, JSOp op
, BytecodeEmitter
* bce
)
1114 return EmitInternedObjectOp(cx
, bce
->objectList
.add(objbox
), op
, bce
);
1118 EmitObjectPairOp(ExclusiveContext
* cx
, ObjectBox
* objbox1
, ObjectBox
* objbox2
, JSOp op
,
1119 BytecodeEmitter
* bce
)
1121 uint32_t index
= bce
->objectList
.add(objbox1
);
1122 bce
->objectList
.add(objbox2
);
1123 return EmitInternedObjectOp(cx
, index
, op
, bce
);
1127 EmitRegExp(ExclusiveContext
* cx
, uint32_t index
, BytecodeEmitter
* bce
)
1129 return EmitIndex32(cx
, JSOP_REGEXP
, index
, bce
);
1133 * To catch accidental misuse, EMIT_UINT16_IMM_OP/Emit3 assert that they are
1134 * not used to unconditionally emit JSOP_GETLOCAL. Variable access should
1135 * instead be emitted using EmitVarOp. In special cases, when the caller
1136 * definitely knows that a given local slot is unaliased, this function may be
1137 * used as a non-asserting version of EMIT_UINT16_IMM_OP.
1140 EmitLocalOp(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp op
, uint32_t slot
)
1142 MOZ_ASSERT(JOF_OPTYPE(op
) != JOF_SCOPECOORD
);
1143 MOZ_ASSERT(IsLocalOp(op
));
1145 ptrdiff_t off
= EmitN(cx
, bce
, op
, LOCALNO_LEN
);
1149 SET_LOCALNO(bce
->code(off
), slot
);
1154 EmitUnaliasedVarOp(ExclusiveContext
* cx
, JSOp op
, uint32_t slot
, MaybeCheckLexical checkLexical
,
1155 BytecodeEmitter
* bce
)
1157 MOZ_ASSERT(JOF_OPTYPE(op
) != JOF_SCOPECOORD
);
1159 if (IsLocalOp(op
)) {
1160 // Only unaliased locals have stack slots assigned to them. Convert the
1161 // var index (which includes unaliased and aliased locals) to the stack
1163 MOZ_ASSERT(bce
->localsToFrameSlots_
[slot
] <= slot
);
1164 slot
= bce
->localsToFrameSlots_
[slot
];
1167 MOZ_ASSERT(op
!= JSOP_INITLEXICAL
);
1168 if (!EmitLocalOp(cx
, bce
, JSOP_CHECKLEXICAL
, slot
))
1172 return EmitLocalOp(cx
, bce
, op
, slot
);
1175 MOZ_ASSERT(IsArgOp(op
));
1176 ptrdiff_t off
= EmitN(cx
, bce
, op
, ARGNO_LEN
);
1180 SET_ARGNO(bce
->code(off
), slot
);
1185 EmitScopeCoordOp(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp op
, ScopeCoordinate sc
)
1187 MOZ_ASSERT(JOF_OPTYPE(op
) == JOF_SCOPECOORD
);
1189 unsigned n
= SCOPECOORD_HOPS_LEN
+ SCOPECOORD_SLOT_LEN
;
1190 MOZ_ASSERT(int(n
) + 1 /* op */ == js_CodeSpec
[op
].length
);
1192 ptrdiff_t off
= EmitN(cx
, bce
, op
, n
);
1196 jsbytecode
* pc
= bce
->code(off
);
1197 SET_SCOPECOORD_HOPS(pc
, sc
.hops());
1198 pc
+= SCOPECOORD_HOPS_LEN
;
1199 SET_SCOPECOORD_SLOT(pc
, sc
.slot());
1200 pc
+= SCOPECOORD_SLOT_LEN
;
1201 CheckTypeSet(cx
, bce
, op
);
1206 EmitAliasedVarOp(ExclusiveContext
* cx
, JSOp op
, ScopeCoordinate sc
, MaybeCheckLexical checkLexical
,
1207 BytecodeEmitter
* bce
)
1210 MOZ_ASSERT(op
!= JSOP_INITALIASEDLEXICAL
);
1211 if (!EmitScopeCoordOp(cx
, bce
, JSOP_CHECKALIASEDLEXICAL
, sc
))
1215 return EmitScopeCoordOp(cx
, bce
, op
, sc
);
1218 // Compute the number of nested scope objects that will actually be on the scope
1219 // chain at runtime, given the BCE's current staticScope.
1221 DynamicNestedScopeDepth(BytecodeEmitter
* bce
)
1224 for (NestedScopeObject
* b
= bce
->staticScope
; b
; b
= b
->enclosingNestedScope()) {
1225 if (!b
->is
<StaticBlockObject
>() || b
->as
<StaticBlockObject
>().needsClone())
1233 LookupAliasedName(BytecodeEmitter
* bce
, HandleScript script
, PropertyName
* name
, uint32_t* pslot
,
1234 ParseNode
* pn
= nullptr)
1236 LazyScript::FreeVariable
* freeVariables
= nullptr;
1237 uint32_t lexicalBegin
= 0;
1238 uint32_t numFreeVariables
= 0;
1239 if (bce
->emitterMode
== BytecodeEmitter::LazyFunction
) {
1240 freeVariables
= bce
->lazyScript
->freeVariables();
1241 lexicalBegin
= script
->bindings
.lexicalBegin();
1242 numFreeVariables
= bce
->lazyScript
->numFreeVariables();
1246 * Beware: BindingIter may contain more than one Binding for a given name
1247 * (in the case of |function f(x,x) {}|) but only one will be aliased.
1249 uint32_t bindingIndex
= 0;
1250 uint32_t slot
= CallObject::RESERVED_SLOTS
;
1251 for (BindingIter
bi(script
); !bi
.done(); bi
++) {
1252 if (bi
->aliased()) {
1253 if (bi
->name() == name
) {
1254 // Check if the free variable from a lazy script was marked as
1255 // a possible hoisted use and is a lexical binding. If so,
1256 // mark it as such so we emit a dead zone check.
1257 if (freeVariables
) {
1258 for (uint32_t i
= 0; i
< numFreeVariables
; i
++) {
1259 if (freeVariables
[i
].atom() == name
) {
1260 if (freeVariables
[i
].isHoistedUse() && bindingIndex
>= lexicalBegin
) {
1262 MOZ_ASSERT(pn
->isUsed());
1263 pn
->pn_dflags
|= PND_LEXICAL
;
1282 LookupAliasedNameSlot(BytecodeEmitter
* bce
, HandleScript script
, PropertyName
* name
,
1283 ScopeCoordinate
* sc
)
1286 if (!LookupAliasedName(bce
, script
, name
, &slot
))
1294 * Use this function instead of assigning directly to 'hops' to guard for
1295 * uint8_t overflows.
1298 AssignHops(BytecodeEmitter
* bce
, ParseNode
* pn
, unsigned src
, ScopeCoordinate
* dst
)
1300 if (src
> UINT8_MAX
) {
1301 bce
->reportError(pn
, JSMSG_TOO_DEEP
, js_function_str
);
1309 static inline MaybeCheckLexical
1310 NodeNeedsCheckLexical(ParseNode
* pn
)
1312 return pn
->isHoistedLexicalUse() ? CheckLexical
: DontCheckLexical
;
1316 EmitAliasedVarOp(ExclusiveContext
* cx
, JSOp op
, ParseNode
* pn
, BytecodeEmitter
* bce
)
1319 * While pn->pn_cookie tells us how many function scopes are between the use and the def this
1320 * is not the same as how many hops up the dynamic scope chain are needed. In particular:
1321 * - a lexical function scope only contributes a hop if it is "heavyweight" (has a dynamic
1323 * - a heavyweight named function scope contributes an extra scope to the scope chain (a
1324 * DeclEnvObject that holds just the name).
1325 * - all the intervening let/catch blocks must be counted.
1327 unsigned skippedScopes
= 0;
1328 BytecodeEmitter
* bceOfDef
= bce
;
1331 * As explained in BindNameToSlot, the 'level' of a use indicates how
1332 * many function scopes (i.e., BytecodeEmitters) to skip to find the
1333 * enclosing function scope of the definition being accessed.
1335 for (unsigned i
= pn
->pn_cookie
.level(); i
; i
--) {
1336 skippedScopes
+= DynamicNestedScopeDepth(bceOfDef
);
1337 FunctionBox
* funbox
= bceOfDef
->sc
->asFunctionBox();
1338 if (funbox
->isHeavyweight()) {
1340 if (funbox
->function()->isNamedLambda())
1343 bceOfDef
= bceOfDef
->parent
;
1346 MOZ_ASSERT(pn
->isDefn());
1347 MOZ_ASSERT(pn
->pn_cookie
.level() == bce
->script
->staticLevel());
1351 * The final part of the skippedScopes computation depends on the type of
1352 * variable. An arg or local variable is at the outer scope of a function
1353 * and so includes the full DynamicNestedScopeDepth. A let/catch-binding
1354 * requires a search of the block chain to see how many (dynamic) block
1358 if (IsArgOp(pn
->getOp())) {
1359 if (!AssignHops(bce
, pn
, skippedScopes
+ DynamicNestedScopeDepth(bceOfDef
), &sc
))
1361 JS_ALWAYS_TRUE(LookupAliasedNameSlot(bceOfDef
, bceOfDef
->script
, pn
->name(), &sc
));
1363 MOZ_ASSERT(IsLocalOp(pn
->getOp()) || pn
->isKind(PNK_FUNCTION
));
1364 uint32_t local
= pn
->pn_cookie
.slot();
1365 if (local
< bceOfDef
->script
->bindings
.numBodyLevelLocals()) {
1366 if (!AssignHops(bce
, pn
, skippedScopes
+ DynamicNestedScopeDepth(bceOfDef
), &sc
))
1368 JS_ALWAYS_TRUE(LookupAliasedNameSlot(bceOfDef
, bceOfDef
->script
, pn
->name(), &sc
));
1370 MOZ_ASSERT_IF(bce
->sc
->isFunctionBox(), local
<= bceOfDef
->script
->bindings
.numLocals());
1371 MOZ_ASSERT(bceOfDef
->staticScope
->is
<StaticBlockObject
>());
1372 Rooted
<StaticBlockObject
*> b(cx
, &bceOfDef
->staticScope
->as
<StaticBlockObject
>());
1373 local
= bceOfDef
->localsToFrameSlots_
[local
];
1374 while (local
< b
->localOffset()) {
1375 if (b
->needsClone())
1377 b
= &b
->enclosingNestedScope()->as
<StaticBlockObject
>();
1379 if (!AssignHops(bce
, pn
, skippedScopes
, &sc
))
1381 sc
.setSlot(b
->localIndexToSlot(local
));
1385 return EmitAliasedVarOp(cx
, op
, sc
, NodeNeedsCheckLexical(pn
), bce
);
1389 EmitVarOp(ExclusiveContext
* cx
, ParseNode
* pn
, JSOp op
, BytecodeEmitter
* bce
)
1391 MOZ_ASSERT(pn
->isKind(PNK_FUNCTION
) || pn
->isKind(PNK_NAME
));
1392 MOZ_ASSERT(!pn
->pn_cookie
.isFree());
1394 if (IsAliasedVarOp(op
)) {
1396 sc
.setHops(pn
->pn_cookie
.level());
1397 sc
.setSlot(pn
->pn_cookie
.slot());
1398 return EmitAliasedVarOp(cx
, op
, sc
, NodeNeedsCheckLexical(pn
), bce
);
1401 MOZ_ASSERT_IF(pn
->isKind(PNK_NAME
), IsArgOp(op
) || IsLocalOp(op
));
1403 if (!bce
->isAliasedName(pn
)) {
1404 MOZ_ASSERT(pn
->isUsed() || pn
->isDefn());
1405 MOZ_ASSERT_IF(pn
->isUsed(), pn
->pn_cookie
.level() == 0);
1406 MOZ_ASSERT_IF(pn
->isDefn(), pn
->pn_cookie
.level() == bce
->script
->staticLevel());
1407 return EmitUnaliasedVarOp(cx
, op
, pn
->pn_cookie
.slot(), NodeNeedsCheckLexical(pn
), bce
);
1411 case JSOP_GETARG
: case JSOP_GETLOCAL
: op
= JSOP_GETALIASEDVAR
; break;
1412 case JSOP_SETARG
: case JSOP_SETLOCAL
: op
= JSOP_SETALIASEDVAR
; break;
1413 case JSOP_INITLEXICAL
: op
= JSOP_INITALIASEDLEXICAL
; break;
1414 default: MOZ_CRASH("unexpected var op");
1417 return EmitAliasedVarOp(cx
, op
, pn
, bce
);
1421 GetIncDecInfo(ParseNodeKind kind
, bool* post
)
1423 MOZ_ASSERT(kind
== PNK_POSTINCREMENT
|| kind
== PNK_PREINCREMENT
||
1424 kind
== PNK_POSTDECREMENT
|| kind
== PNK_PREDECREMENT
);
1425 *post
= kind
== PNK_POSTINCREMENT
|| kind
== PNK_POSTDECREMENT
;
1426 return (kind
== PNK_POSTINCREMENT
|| kind
== PNK_PREINCREMENT
) ? JSOP_ADD
: JSOP_SUB
;
1430 EmitVarIncDec(ExclusiveContext
* cx
, ParseNode
* pn
, BytecodeEmitter
* bce
)
1432 JSOp op
= pn
->pn_kid
->getOp();
1433 MOZ_ASSERT(IsArgOp(op
) || IsLocalOp(op
) || IsAliasedVarOp(op
));
1434 MOZ_ASSERT(pn
->pn_kid
->isKind(PNK_NAME
));
1435 MOZ_ASSERT(!pn
->pn_kid
->pn_cookie
.isFree());
1438 JSOp binop
= GetIncDecInfo(pn
->getKind(), &post
);
1441 if (IsLocalOp(op
)) {
1442 getOp
= JSOP_GETLOCAL
;
1443 setOp
= JSOP_SETLOCAL
;
1444 } else if (IsArgOp(op
)) {
1445 getOp
= JSOP_GETARG
;
1446 setOp
= JSOP_SETARG
;
1448 getOp
= JSOP_GETALIASEDVAR
;
1449 setOp
= JSOP_SETALIASEDVAR
;
1452 if (!EmitVarOp(cx
, pn
->pn_kid
, getOp
, bce
)) // V
1454 if (Emit1(cx
, bce
, JSOP_POS
) < 0) // N
1456 if (post
&& Emit1(cx
, bce
, JSOP_DUP
) < 0) // N? N
1458 if (Emit1(cx
, bce
, JSOP_ONE
) < 0) // N? N 1
1460 if (Emit1(cx
, bce
, binop
) < 0) // N? N+1
1462 if (!EmitVarOp(cx
, pn
->pn_kid
, setOp
, bce
)) // N? N+1
1464 if (post
&& Emit1(cx
, bce
, JSOP_POP
) < 0) // RESULT
1471 BytecodeEmitter::isAliasedName(ParseNode
* pn
)
1473 Definition
* dn
= pn
->resolve();
1474 MOZ_ASSERT(dn
->isDefn());
1475 MOZ_ASSERT(!dn
->isPlaceholder());
1476 MOZ_ASSERT(dn
->isBound());
1478 /* If dn is in an enclosing function, it is definitely aliased. */
1479 if (dn
->pn_cookie
.level() != script
->staticLevel())
1482 switch (dn
->kind()) {
1483 case Definition::LET
:
1484 case Definition::CONST
:
1486 * There are two ways to alias a let variable: nested functions and
1487 * dynamic scope operations. (This is overly conservative since the
1488 * bindingsAccessedDynamically flag, checked by allLocalsAliased, is
1491 * In addition all locals in generators are marked as aliased, to ensure
1492 * that they are allocated on scope chains instead of on the stack. See
1493 * the definition of SharedContext::allLocalsAliased.
1495 return dn
->isClosed() || sc
->allLocalsAliased();
1496 case Definition::ARG
:
1498 * Consult the bindings, since they already record aliasing. We might
1499 * be tempted to use the same definition as VAR/CONST/LET, but there is
1500 * a problem caused by duplicate arguments: only the last argument with
1501 * a given name is aliased. This is necessary to avoid generating a
1502 * shape for the call object with with more than one name for a given
1503 * slot (which violates internal engine invariants). All this means that
1504 * the '|| sc->allLocalsAliased()' disjunct is incorrect since it will
1505 * mark both parameters in function(x,x) as aliased.
1507 return script
->formalIsAliased(pn
->pn_cookie
.slot());
1508 case Definition::VAR
:
1509 case Definition::GLOBALCONST
:
1510 MOZ_ASSERT_IF(sc
->allLocalsAliased(), script
->cookieIsAliased(pn
->pn_cookie
));
1511 return script
->cookieIsAliased(pn
->pn_cookie
);
1512 case Definition::PLACEHOLDER
:
1513 case Definition::NAMED_LAMBDA
:
1514 case Definition::MISSING
:
1515 MOZ_CRASH("unexpected dn->kind");
1521 StrictifySetNameOp(JSOp op
, BytecodeEmitter
* bce
)
1525 if (bce
->sc
->strict
)
1526 op
= JSOP_STRICTSETNAME
;
1529 if (bce
->sc
->strict
)
1530 op
= JSOP_STRICTSETGNAME
;
1538 StrictifySetNameNode(ParseNode
* pn
, BytecodeEmitter
* bce
)
1540 pn
->setOp(StrictifySetNameOp(pn
->getOp(), bce
));
1544 * Try to convert a *NAME op with a free name to a more specialized GNAME,
1545 * INTRINSIC or ALIASEDVAR op, which optimize accesses on that name.
1546 * Return true if a conversion was made.
1549 TryConvertFreeName(BytecodeEmitter
* bce
, ParseNode
* pn
)
1552 * In self-hosting mode, JSOP_*NAME is unconditionally converted to
1553 * JSOP_*INTRINSIC. This causes lookups to be redirected to the special
1554 * intrinsics holder in the global object, into which any missing values are
1555 * cloned lazily upon first access.
1557 if (bce
->emitterMode
== BytecodeEmitter::SelfHosting
) {
1559 switch (pn
->getOp()) {
1560 case JSOP_GETNAME
: op
= JSOP_GETINTRINSIC
; break;
1561 case JSOP_SETNAME
: op
= JSOP_SETINTRINSIC
; break;
1562 /* Other *NAME ops aren't (yet) supported in self-hosted code. */
1563 default: MOZ_CRASH("intrinsic");
1570 * When parsing inner functions lazily, parse nodes for outer functions no
1571 * longer exist and only the function's scope chain is available for
1572 * resolving upvar accesses within the inner function.
1574 if (bce
->emitterMode
== BytecodeEmitter::LazyFunction
) {
1575 // The only statements within a lazy function which can push lexical
1576 // scopes are try/catch blocks. Use generic ops in this case.
1577 for (StmtInfoBCE
* stmt
= bce
->topStmt
; stmt
; stmt
= stmt
->down
) {
1578 if (stmt
->type
== STMT_CATCH
)
1583 FunctionBox
* funbox
= bce
->sc
->asFunctionBox();
1584 if (funbox
->hasExtensibleScope())
1586 if (funbox
->function()->isNamedLambda() && funbox
->function()->atom() == pn
->pn_atom
)
1588 if (funbox
->isHeavyweight()) {
1590 if (funbox
->function()->isNamedLambda())
1593 if (bce
->script
->directlyInsideEval())
1595 RootedObject
outerScope(bce
->sc
->context
, bce
->script
->enclosingStaticScope());
1596 for (StaticScopeIter
<CanGC
> ssi(bce
->sc
->context
, outerScope
); !ssi
.done(); ssi
++) {
1597 if (ssi
.type() != StaticScopeIter
<CanGC
>::FUNCTION
) {
1598 if (ssi
.type() == StaticScopeIter
<CanGC
>::BLOCK
) {
1599 // Use generic ops if a catch block is encountered.
1602 if (ssi
.hasDynamicScopeObject())
1606 RootedScript
script(bce
->sc
->context
, ssi
.funScript());
1607 if (script
->functionNonDelazifying()->atom() == pn
->pn_atom
)
1609 if (ssi
.hasDynamicScopeObject()) {
1611 if (LookupAliasedName(bce
, script
, pn
->pn_atom
->asPropertyName(), &slot
, pn
)) {
1613 switch (pn
->getOp()) {
1614 case JSOP_GETNAME
: op
= JSOP_GETALIASEDVAR
; break;
1615 case JSOP_SETNAME
: op
= JSOP_SETALIASEDVAR
; break;
1616 default: return false;
1620 JS_ALWAYS_TRUE(pn
->pn_cookie
.set(bce
->parser
->tokenStream
, hops
, slot
));
1626 if (script
->funHasExtensibleScope() || script
->directlyInsideEval())
1631 // Unbound names aren't recognizable global-property references if the
1632 // script isn't running against its global object.
1633 if (!bce
->script
->compileAndGo() || !bce
->hasGlobalScope
)
1636 // Deoptimized names also aren't necessarily globals.
1637 if (pn
->isDeoptimized())
1640 if (bce
->sc
->isFunctionBox()) {
1641 // Unbound names in function code may not be globals if new locals can
1642 // be added to this function (or an enclosing one) to alias a global
1644 FunctionBox
* funbox
= bce
->sc
->asFunctionBox();
1645 if (funbox
->mightAliasLocals())
1649 // If this is eval code, being evaluated inside strict mode eval code,
1650 // an "unbound" name might be a binding local to that outer eval:
1652 // var x = "GLOBAL";
1653 // eval('"use strict"; ' +
1655 // 'eval("print(x)");'); // "undefined", not "GLOBAL"
1657 // Given the enclosing eval code's strictness and its bindings (neither is
1658 // readily available now), we could exactly check global-ness, but it's not
1659 // worth the trouble for doubly-nested eval code. So we conservatively
1660 // approximate. If the outer eval code is strict, then this eval code will
1661 // be: thus, don't optimize if we're compiling strict code inside an eval.
1662 if (bce
->insideEval
&& bce
->sc
->strict
)
1666 switch (pn
->getOp()) {
1667 case JSOP_GETNAME
: op
= JSOP_GETGNAME
; break;
1668 case JSOP_SETNAME
: op
= StrictifySetNameOp(JSOP_SETGNAME
, bce
); break;
1672 default: MOZ_CRASH("gname");
1679 * BindNameToSlotHelper attempts to optimize name gets and sets to stack slot
1680 * loads and stores, given the compile-time information in bce and a PNK_NAME
1681 * node pn. It returns false on error, true on success.
1683 * The caller can test pn->pn_cookie.isFree() to tell whether optimization
1684 * occurred, in which case BindNameToSlotHelper also updated pn->pn_op. If
1685 * pn->pn_cookie.isFree() is still true on return, pn->pn_op still may have
1686 * been optimized, e.g., from JSOP_GETNAME to JSOP_CALLEE. Whether or not
1687 * pn->pn_op was modified, if this function finds an argument or local variable
1688 * name, PND_CONST will be set in pn_dflags for read-only properties after a
1689 * successful return.
1691 * NB: if you add more opcodes specialized from JSOP_GETNAME, etc., don't forget
1692 * to update the special cases in EmitFor (for-in) and EmitAssignment (= and
1696 BindNameToSlotHelper(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
1698 MOZ_ASSERT(pn
->isKind(PNK_NAME
));
1700 MOZ_ASSERT_IF(pn
->isKind(PNK_FUNCTION
), pn
->isBound());
1702 /* Don't attempt if 'pn' is already bound or deoptimized or a function. */
1703 if (pn
->isBound() || pn
->isDeoptimized())
1706 /* JSOP_CALLEE is pre-bound by definition. */
1707 JSOp op
= pn
->getOp();
1708 MOZ_ASSERT(op
!= JSOP_CALLEE
);
1709 MOZ_ASSERT(JOF_OPTYPE(op
) == JOF_ATOM
);
1712 * The parser already linked name uses to definitions when (where not
1713 * prevented by non-lexical constructs like 'with' and 'eval').
1717 MOZ_ASSERT(pn
->pn_cookie
.isFree());
1719 MOZ_ASSERT(dn
->isDefn());
1720 pn
->pn_dflags
|= (dn
->pn_dflags
& PND_CONST
);
1721 } else if (pn
->isDefn()) {
1722 dn
= (Definition
*) pn
;
1727 // Throw an error on attempts to mutate const-declared bindings.
1733 if (pn
->isConst()) {
1734 JSAutoByteString name
;
1735 if (!AtomToPrintableString(cx
, pn
->pn_atom
, &name
))
1737 bce
->reportError(pn
, JSMSG_BAD_CONST_ASSIGN
, name
.ptr());
1742 if (dn
->pn_cookie
.isFree()) {
1743 if (HandleScript caller
= bce
->evalCaller
) {
1744 MOZ_ASSERT(bce
->script
->compileAndGo());
1747 * Don't generate upvars on the left side of a for loop. See
1750 if (bce
->emittingForInit
)
1754 * If this is an eval in the global scope, then unbound variables
1755 * must be globals, so try to use GNAME ops.
1757 if (!caller
->functionOrCallerFunction() && TryConvertFreeName(bce
, pn
)) {
1758 pn
->pn_dflags
|= PND_BOUND
;
1763 * Out of tricks, so we must rely on PICs to optimize named
1764 * accesses from direct eval called from function code.
1769 /* Optimize accesses to undeclared globals. */
1770 if (!TryConvertFreeName(bce
, pn
))
1773 pn
->pn_dflags
|= PND_BOUND
;
1778 * At this point, we are only dealing with uses that have already been
1779 * bound to definitions via pn_lexdef. The rest of this routine converts
1780 * the parse node of the use from its initial JSOP_*NAME* op to a LOCAL/ARG
1781 * op. This requires setting the node's pn_cookie with a pair (level, slot)
1782 * where 'level' is the number of function scopes between the use and the
1783 * def and 'slot' is the index to emit as the immediate of the ARG/LOCAL
1784 * op. For example, in this code:
1786 * function(a,b,x) { return x }
1787 * function(y) { function() { return y } }
1789 * x will get (level = 0, slot = 2) and y will get (level = 1, slot = 0).
1791 MOZ_ASSERT(!pn
->isDefn());
1792 MOZ_ASSERT(pn
->isUsed());
1793 MOZ_ASSERT(pn
->pn_lexdef
);
1794 MOZ_ASSERT(pn
->pn_cookie
.isFree());
1797 * We are compiling a function body and may be able to optimize name
1798 * to stack slot. Look for an argument or variable in the function and
1799 * rewrite pn_op and update pn accordingly.
1801 switch (dn
->kind()) {
1802 case Definition::ARG
:
1805 op
= JSOP_GETARG
; break;
1807 case JSOP_STRICTSETNAME
:
1808 op
= JSOP_SETARG
; break;
1809 default: MOZ_CRASH("arg");
1811 MOZ_ASSERT(!pn
->isConst());
1814 case Definition::VAR
:
1815 case Definition::GLOBALCONST
:
1816 case Definition::CONST
:
1817 case Definition::LET
:
1820 op
= JSOP_GETLOCAL
; break;
1822 case JSOP_STRICTSETNAME
:
1823 op
= JSOP_SETLOCAL
; break;
1825 op
= JSOP_SETLOCAL
; break;
1826 default: MOZ_CRASH("local");
1830 case Definition::NAMED_LAMBDA
: {
1831 MOZ_ASSERT(dn
->isOp(JSOP_CALLEE
));
1832 MOZ_ASSERT(op
!= JSOP_CALLEE
);
1835 * Currently, the ALIASEDVAR ops do not support accessing the
1836 * callee of a DeclEnvObject, so use NAME.
1838 if (dn
->pn_cookie
.level() != bce
->script
->staticLevel())
1841 DebugOnly
<JSFunction
*> fun
= bce
->sc
->asFunctionBox()->function();
1842 MOZ_ASSERT(fun
->isLambda());
1843 MOZ_ASSERT(pn
->pn_atom
== fun
->atom());
1846 * Leave pn->isOp(JSOP_GETNAME) if bce->fun is heavyweight to
1847 * address two cases: a new binding introduced by eval, and
1848 * assignment to the name in strict mode.
1850 * var fun = (function f(s) { eval(s); return f; });
1851 * assertEq(fun("var f = 42"), 42);
1853 * ECMAScript specifies that a function expression's name is bound
1854 * in a lexical environment distinct from that used to bind its
1855 * named parameters, the arguments object, and its variables. The
1856 * new binding for "var f = 42" shadows the binding for the
1857 * function itself, so the name of the function will not refer to
1860 * (function f() { "use strict"; f = 12; })();
1862 * Outside strict mode, assignment to a function expression's name
1863 * has no effect. But in strict mode, this attempt to mutate an
1864 * immutable binding must throw a TypeError. We implement this by
1865 * not optimizing such assignments and by marking such functions as
1866 * heavyweight, ensuring that the function name is represented in
1867 * the scope chain so that assignment will throw a TypeError.
1869 if (!bce
->sc
->asFunctionBox()->isHeavyweight()) {
1871 pn
->pn_dflags
|= PND_CONST
;
1875 pn
->pn_dflags
|= PND_BOUND
;
1879 case Definition::PLACEHOLDER
:
1882 case Definition::MISSING
:
1883 MOZ_CRASH("missing");
1887 * The difference between the current static level and the static level of
1888 * the definition is the number of function scopes between the current
1889 * scope and dn's scope.
1891 unsigned skip
= bce
->script
->staticLevel() - dn
->pn_cookie
.level();
1892 MOZ_ASSERT_IF(skip
, dn
->isClosed());
1895 * Explicitly disallow accessing var/let bindings in global scope from
1896 * nested functions. The reason for this limitation is that, since the
1897 * global script is not included in the static scope chain (1. because it
1898 * has no object to stand in the static scope chain, 2. to minimize memory
1899 * bloat where a single live function keeps its whole global script
1900 * alive.), ScopeCoordinateToTypeSet is not able to find the var/let's
1901 * associated types::TypeSet.
1904 BytecodeEmitter
* bceSkipped
= bce
;
1905 for (unsigned i
= 0; i
< skip
; i
++)
1906 bceSkipped
= bceSkipped
->parent
;
1907 if (!bceSkipped
->sc
->isFunctionBox())
1911 MOZ_ASSERT(!pn
->isOp(op
));
1913 if (!pn
->pn_cookie
.set(bce
->parser
->tokenStream
, skip
, dn
->pn_cookie
.slot()))
1916 pn
->pn_dflags
|= PND_BOUND
;
1921 * Attempts to bind the name, then checks that no dynamic scope lookup ops are
1922 * emitted in self-hosting mode. NAME ops do lookups off current scope chain,
1923 * and we do not want to allow self-hosted code to use the dynamic scope.
1926 BindNameToSlot(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
1928 if (!BindNameToSlotHelper(cx
, bce
, pn
))
1931 StrictifySetNameNode(pn
, bce
);
1933 if (bce
->emitterMode
== BytecodeEmitter::SelfHosting
&& !pn
->isBound()) {
1934 bce
->reportError(pn
, JSMSG_SELFHOSTED_UNBOUND_NAME
);
1942 * If pn contains a useful expression, return true with *answer set to true.
1943 * If pn contains a useless expression, return true with *answer set to false.
1944 * Return false on error.
1946 * The caller should initialize *answer to false and invoke this function on
1947 * an expression statement or similar subtree to decide whether the tree could
1948 * produce code that has any side effects. For an expression statement, we
1949 * define useless code as code with no side effects, because the main effect,
1950 * the value left on the stack after the code executes, will be discarded by a
1954 CheckSideEffects(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, bool* answer
)
1959 switch (pn
->getArity()) {
1962 * A named function, contrary to ES3, is no longer useful, because we
1963 * bind its name lexically (using JSOP_CALLEE) instead of creating an
1964 * Object instance and binding a readonly, permanent property in it
1965 * (the object and binding can be detected and hijacked or captured).
1966 * This is a bug fix to ES3; it is fixed in ES3.1 drafts.
1968 MOZ_ASSERT(*answer
== false);
1972 if (pn
->isOp(JSOP_NOP
) || pn
->isOp(JSOP_OR
) || pn
->isOp(JSOP_AND
) ||
1973 pn
->isOp(JSOP_STRICTEQ
) || pn
->isOp(JSOP_STRICTNE
)) {
1975 * Non-operators along with ||, &&, ===, and !== never invoke
1976 * toString or valueOf.
1979 for (ParseNode
* pn2
= pn
->pn_head
; pn2
; pn2
= pn2
->pn_next
)
1980 ok
&= CheckSideEffects(cx
, bce
, pn2
, answer
);
1984 if (pn
->isKind(PNK_GENEXP
)) {
1985 /* Generator-expressions are harmless if the result is ignored. */
1986 MOZ_ASSERT(*answer
== false);
1991 * All invocation operations (construct: PNK_NEW, call: PNK_CALL)
1992 * are presumed to be useful, because they may have side effects
1993 * even if their main effect (their return value) is discarded.
1995 * PNK_ELEM binary trees of 3+ nodes are flattened into lists to
1996 * avoid too much recursion. All such lists must be presumed to be
1997 * useful because each index operation could invoke a getter.
1999 * Likewise, array and object initialisers may call prototype
2000 * setters (the __defineSetter__ built-in, and writable __proto__
2001 * on Array.prototype create this hazard). Initialiser list nodes
2002 * have JSOP_NEWINIT in their pn_op.
2008 return CheckSideEffects(cx
, bce
, pn
->pn_kid1
, answer
) &&
2009 CheckSideEffects(cx
, bce
, pn
->pn_kid2
, answer
) &&
2010 CheckSideEffects(cx
, bce
, pn
->pn_kid3
, answer
);
2014 if (pn
->isAssignment()) {
2016 * Assignment is presumed to be useful, even if the next operation
2017 * is another assignment overwriting this one's ostensible effect,
2018 * because the left operand may be a property with a setter that
2021 * The only exception is assignment of a useless value to a const
2022 * declared in the function currently being compiled.
2024 ParseNode
* pn2
= pn
->pn_left
;
2025 if (!pn2
->isKind(PNK_NAME
)) {
2028 if (!BindNameToSlot(cx
, bce
, pn2
))
2030 if (!CheckSideEffects(cx
, bce
, pn
->pn_right
, answer
))
2032 if (!*answer
&& (!pn
->isOp(JSOP_NOP
) || !pn2
->isConst()))
2038 if (pn
->isOp(JSOP_OR
) || pn
->isOp(JSOP_AND
) || pn
->isOp(JSOP_STRICTEQ
) ||
2039 pn
->isOp(JSOP_STRICTNE
)) {
2041 * ||, &&, ===, and !== do not convert their operands via
2042 * toString or valueOf method calls.
2044 return CheckSideEffects(cx
, bce
, pn
->pn_left
, answer
) &&
2045 CheckSideEffects(cx
, bce
, pn
->pn_right
, answer
);
2049 * We can't easily prove that neither operand ever denotes an
2050 * object with a toString or valueOf method.
2056 switch (pn
->getKind()) {
2059 ParseNode
* pn2
= pn
->pn_kid
;
2060 switch (pn2
->getKind()) {
2062 if (!BindNameToSlot(cx
, bce
, pn2
))
2064 if (pn2
->isConst()) {
2065 MOZ_ASSERT(*answer
== false);
2072 /* All these delete addressing modes have effects too. */
2076 return CheckSideEffects(cx
, bce
, pn2
, answer
);
2078 MOZ_CRASH("We have a returning default case");
2085 if (pn
->isOp(JSOP_NOT
)) {
2086 /* ! does not convert its operand via toString or valueOf. */
2087 return CheckSideEffects(cx
, bce
, pn
->pn_kid
, answer
);
2093 * All of PNK_INC, PNK_DEC and PNK_THROW have direct effects. Of
2094 * the remaining unary-arity node types, we can't easily prove that
2095 * the operand never denotes an object with a toString or valueOf
2101 MOZ_CRASH("We have a returning default case");
2105 * Take care to avoid trying to bind a label name (labels, both for
2106 * statements and property values in object initialisers, have pn_op
2107 * defaulted to JSOP_NOP).
2109 if (pn
->isKind(PNK_NAME
) && !pn
->isOp(JSOP_NOP
)) {
2110 if (!BindNameToSlot(cx
, bce
, pn
))
2112 if (!pn
->isOp(JSOP_CALLEE
) && pn
->pn_cookie
.isFree()) {
2114 * Not a use of an unshadowed named function expression's given
2115 * name, so this expression could invoke a getter that has side
2122 if (pn
->isHoistedLexicalUse()) {
2123 // Hoisted uses of lexical bindings throw on access.
2127 if (pn
->isKind(PNK_DOT
)) {
2128 /* Dotted property references in general can call getters. */
2131 return CheckSideEffects(cx
, bce
, pn
->maybeExpr(), answer
);
2134 if (pn
->isKind(PNK_DEBUGGER
))
2142 BytecodeEmitter::isInLoop()
2144 for (StmtInfoBCE
* stmt
= topStmt
; stmt
; stmt
= stmt
->down
) {
2152 BytecodeEmitter::checkSingletonContext()
2154 if (!script
->compileAndGo() || sc
->isFunctionBox() || isInLoop())
2156 hasSingletons
= true;
2161 BytecodeEmitter::needsImplicitThis()
2163 if (!script
->compileAndGo())
2166 if (sc
->isFunctionBox()) {
2167 if (sc
->asFunctionBox()->inWith
)
2170 JSObject
* scope
= sc
->asGlobalSharedContext()->scopeChain();
2172 if (scope
->is
<DynamicWithObject
>())
2174 scope
= scope
->enclosingScope();
2178 for (StmtInfoBCE
* stmt
= topStmt
; stmt
; stmt
= stmt
->down
) {
2179 if (stmt
->type
== STMT_WITH
)
2186 BytecodeEmitter::tellDebuggerAboutCompiledScript(ExclusiveContext
* cx
)
2188 // Note: when parsing off thread the resulting scripts need to be handed to
2189 // the debugger after rejoining to the main thread.
2190 if (!cx
->isJSContext())
2193 // Lazy scripts are never top level (despite always being invoked with a
2194 // nullptr parent), and so the hook should never be fired.
2195 if (emitterMode
!= LazyFunction
&& !parent
) {
2196 GlobalObject
* compileAndGoGlobal
= nullptr;
2197 if (script
->compileAndGo())
2198 compileAndGoGlobal
= &script
->global();
2199 Debugger::onNewScript(cx
->asJSContext(), script
, compileAndGoGlobal
);
2204 BytecodeEmitter::tokenStream()
2206 return &parser
->tokenStream
;
2210 BytecodeEmitter::reportError(ParseNode
* pn
, unsigned errorNumber
, ...)
2212 TokenPos pos
= pn
? pn
->pn_pos
: tokenStream()->currentToken().pos
;
2215 va_start(args
, errorNumber
);
2216 bool result
= tokenStream()->reportCompileErrorNumberVA(pos
.begin
, JSREPORT_ERROR
,
2223 BytecodeEmitter::reportStrictWarning(ParseNode
* pn
, unsigned errorNumber
, ...)
2225 TokenPos pos
= pn
? pn
->pn_pos
: tokenStream()->currentToken().pos
;
2228 va_start(args
, errorNumber
);
2229 bool result
= tokenStream()->reportStrictWarningErrorNumberVA(pos
.begin
, errorNumber
, args
);
2235 BytecodeEmitter::reportStrictModeError(ParseNode
* pn
, unsigned errorNumber
, ...)
2237 TokenPos pos
= pn
? pn
->pn_pos
: tokenStream()->currentToken().pos
;
2240 va_start(args
, errorNumber
);
2241 bool result
= tokenStream()->reportStrictModeErrorNumberVA(pos
.begin
, sc
->strict
,
2248 EmitNewInit(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSProtoKey key
)
2250 const size_t len
= 1 + UINT32_INDEX_LEN
;
2251 ptrdiff_t offset
= EmitCheck(cx
, bce
, len
);
2255 jsbytecode
* code
= bce
->code(offset
);
2256 code
[0] = JSOP_NEWINIT
;
2257 code
[1] = jsbytecode(key
);
2261 UpdateDepth(cx
, bce
, offset
);
2262 CheckTypeSet(cx
, bce
, JSOP_NEWINIT
);
2267 IteratorResultShape(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, unsigned* shape
)
2269 MOZ_ASSERT(bce
->script
->compileAndGo());
2271 RootedPlainObject
obj(cx
);
2272 gc::AllocKind kind
= GuessObjectGCKind(2);
2273 obj
= NewBuiltinClassInstance
<PlainObject
>(cx
, kind
);
2277 Rooted
<jsid
> value_id(cx
, AtomToId(cx
->names().value
));
2278 Rooted
<jsid
> done_id(cx
, AtomToId(cx
->names().done
));
2279 if (!DefineNativeProperty(cx
, obj
, value_id
, UndefinedHandleValue
, nullptr, nullptr,
2284 if (!DefineNativeProperty(cx
, obj
, done_id
, UndefinedHandleValue
, nullptr, nullptr,
2290 ObjectBox
* objbox
= bce
->parser
->newObjectBox(obj
);
2294 *shape
= bce
->objectList
.add(objbox
);
2300 EmitPrepareIteratorResult(ExclusiveContext
* cx
, BytecodeEmitter
* bce
)
2302 if (bce
->script
->compileAndGo()) {
2304 if (!IteratorResultShape(cx
, bce
, &shape
))
2306 return EmitIndex32(cx
, JSOP_NEWOBJECT
, shape
, bce
);
2309 return EmitNewInit(cx
, bce
, JSProto_Object
);
2313 EmitFinishIteratorResult(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, bool done
)
2316 if (!bce
->makeAtomIndex(cx
->names().value
, &value_id
))
2319 if (!bce
->makeAtomIndex(cx
->names().done
, &done_id
))
2322 if (!EmitIndex32(cx
, JSOP_INITPROP
, value_id
, bce
))
2324 if (Emit1(cx
, bce
, done
? JSOP_TRUE
: JSOP_FALSE
) < 0)
2326 if (!EmitIndex32(cx
, JSOP_INITPROP
, done_id
, bce
))
2332 EmitNameOp(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, bool callContext
)
2334 if (!BindNameToSlot(cx
, bce
, pn
))
2337 JSOp op
= pn
->getOp();
2339 if (op
== JSOP_CALLEE
) {
2340 if (Emit1(cx
, bce
, op
) < 0)
2343 if (!pn
->pn_cookie
.isFree()) {
2344 MOZ_ASSERT(JOF_OPTYPE(op
) != JOF_ATOM
);
2345 if (!EmitVarOp(cx
, pn
, op
, bce
))
2348 if (!EmitAtomOp(cx
, pn
, op
, bce
))
2353 /* Need to provide |this| value for call */
2355 if (op
== JSOP_GETNAME
&& bce
->needsImplicitThis()) {
2356 if (!EmitAtomOp(cx
, pn
, JSOP_IMPLICITTHIS
, bce
))
2359 if (Emit1(cx
, bce
, JSOP_UNDEFINED
) < 0)
2368 EmitPropLHS(ExclusiveContext
* cx
, ParseNode
* pn
, JSOp op
, BytecodeEmitter
* bce
)
2370 MOZ_ASSERT(pn
->isKind(PNK_DOT
));
2371 ParseNode
* pn2
= pn
->maybeExpr();
2374 * If the object operand is also a dotted property reference, reverse the
2375 * list linked via pn_expr temporarily so we can iterate over it from the
2376 * bottom up (reversing again as we go), to avoid excessive recursion.
2378 if (pn2
->isKind(PNK_DOT
)) {
2379 ParseNode
* pndot
= pn2
;
2380 ParseNode
* pnup
= nullptr, *pndown
;
2381 ptrdiff_t top
= bce
->offset();
2383 /* Reverse pndot->pn_expr to point up, not down. */
2384 pndot
->pn_offset
= top
;
2385 MOZ_ASSERT(!pndot
->isUsed());
2386 pndown
= pndot
->pn_expr
;
2387 pndot
->pn_expr
= pnup
;
2388 if (!pndown
->isKind(PNK_DOT
))
2394 /* pndown is a primary expression, not a dotted property reference. */
2395 if (!EmitTree(cx
, bce
, pndown
))
2399 /* Walk back up the list, emitting annotated name ops. */
2400 if (!EmitAtomOp(cx
, pndot
, JSOP_GETPROP
, bce
))
2403 /* Reverse the pn_expr link again. */
2404 pnup
= pndot
->pn_expr
;
2405 pndot
->pn_expr
= pndown
;
2407 } while ((pndot
= pnup
) != nullptr);
2411 // The non-optimized case.
2412 return EmitTree(cx
, bce
, pn2
);
2416 EmitPropOp(ExclusiveContext
* cx
, ParseNode
* pn
, JSOp op
, BytecodeEmitter
* bce
)
2418 MOZ_ASSERT(pn
->isArity(PN_NAME
));
2420 if (!EmitPropLHS(cx
, pn
, op
, bce
))
2423 if (op
== JSOP_CALLPROP
&& Emit1(cx
, bce
, JSOP_DUP
) < 0)
2426 if (!EmitAtomOp(cx
, pn
, op
, bce
))
2429 if (op
== JSOP_CALLPROP
&& Emit1(cx
, bce
, JSOP_SWAP
) < 0)
2436 EmitPropIncDec(ExclusiveContext
* cx
, ParseNode
* pn
, BytecodeEmitter
* bce
)
2438 MOZ_ASSERT(pn
->pn_kid
->getKind() == PNK_DOT
);
2441 JSOp binop
= GetIncDecInfo(pn
->getKind(), &post
);
2443 JSOp get
= JSOP_GETPROP
;
2444 if (!EmitPropLHS(cx
, pn
->pn_kid
, get
, bce
)) // OBJ
2446 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // OBJ OBJ
2448 if (!EmitAtomOp(cx
, pn
->pn_kid
, JSOP_GETPROP
, bce
)) // OBJ V
2450 if (Emit1(cx
, bce
, JSOP_POS
) < 0) // OBJ N
2452 if (post
&& Emit1(cx
, bce
, JSOP_DUP
) < 0) // OBJ N? N
2454 if (Emit1(cx
, bce
, JSOP_ONE
) < 0) // OBJ N? N 1
2456 if (Emit1(cx
, bce
, binop
) < 0) // OBJ N? N+1
2460 if (Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)2) < 0) // N? N+1 OBJ
2462 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0) // N? OBJ N+1
2466 JSOp setOp
= bce
->sc
->strict
? JSOP_STRICTSETPROP
: JSOP_SETPROP
;
2467 if (!EmitAtomOp(cx
, pn
->pn_kid
, setOp
, bce
)) // N? N+1
2469 if (post
&& Emit1(cx
, bce
, JSOP_POP
) < 0) // RESULT
2476 EmitNameIncDec(ExclusiveContext
* cx
, ParseNode
* pn
, BytecodeEmitter
* bce
)
2478 const JSCodeSpec
* cs
= &js_CodeSpec
[pn
->pn_kid
->getOp()];
2480 bool global
= (cs
->format
& JOF_GNAME
);
2482 JSOp binop
= GetIncDecInfo(pn
->getKind(), &post
);
2484 if (!EmitAtomOp(cx
, pn
->pn_kid
, global
? JSOP_BINDGNAME
: JSOP_BINDNAME
, bce
)) // OBJ
2486 if (!EmitAtomOp(cx
, pn
->pn_kid
, global
? JSOP_GETGNAME
: JSOP_GETNAME
, bce
)) // OBJ V
2488 if (Emit1(cx
, bce
, JSOP_POS
) < 0) // OBJ N
2490 if (post
&& Emit1(cx
, bce
, JSOP_DUP
) < 0) // OBJ N? N
2492 if (Emit1(cx
, bce
, JSOP_ONE
) < 0) // OBJ N? N 1
2494 if (Emit1(cx
, bce
, binop
) < 0) // OBJ N? N+1
2498 if (Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)2) < 0) // N? N+1 OBJ
2500 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0) // N? OBJ N+1
2504 JSOp setOp
= StrictifySetNameOp(global
? JSOP_SETGNAME
: JSOP_SETNAME
, bce
);
2505 if (!EmitAtomOp(cx
, pn
->pn_kid
, setOp
, bce
)) // N? N+1
2507 if (post
&& Emit1(cx
, bce
, JSOP_POP
) < 0) // RESULT
2514 * Emit bytecode to put operands for a JSOP_GETELEM/CALLELEM/SETELEM/DELELEM
2515 * opcode onto the stack in the right order. In the case of SETELEM, the
2516 * value to be assigned must already be pushed.
2519 EmitElemOperands(ExclusiveContext
* cx
, ParseNode
* pn
, JSOp op
, BytecodeEmitter
* bce
)
2521 MOZ_ASSERT(pn
->isArity(PN_BINARY
));
2522 if (!EmitTree(cx
, bce
, pn
->pn_left
))
2524 if (op
== JSOP_CALLELEM
&& Emit1(cx
, bce
, JSOP_DUP
) < 0)
2526 if (!EmitTree(cx
, bce
, pn
->pn_right
))
2528 bool isSetElem
= op
== JSOP_SETELEM
|| op
== JSOP_STRICTSETELEM
;
2529 if (isSetElem
&& Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)2) < 0)
2535 EmitElemOpBase(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp op
)
2537 if (Emit1(cx
, bce
, op
) < 0)
2539 CheckTypeSet(cx
, bce
, op
);
2544 EmitElemOp(ExclusiveContext
* cx
, ParseNode
* pn
, JSOp op
, BytecodeEmitter
* bce
)
2546 return EmitElemOperands(cx
, pn
, op
, bce
) && EmitElemOpBase(cx
, bce
, op
);
2550 EmitElemIncDec(ExclusiveContext
* cx
, ParseNode
* pn
, BytecodeEmitter
* bce
)
2552 MOZ_ASSERT(pn
->pn_kid
->getKind() == PNK_ELEM
);
2554 if (!EmitElemOperands(cx
, pn
->pn_kid
, JSOP_GETELEM
, bce
))
2558 JSOp binop
= GetIncDecInfo(pn
->getKind(), &post
);
2561 * We need to convert the key to an object id first, so that we do not do
2562 * it inside both the GETELEM and the SETELEM.
2565 if (Emit1(cx
, bce
, JSOP_TOID
) < 0) // OBJ KEY
2567 if (Emit1(cx
, bce
, JSOP_DUP2
) < 0) // OBJ KEY OBJ KEY
2569 if (!EmitElemOpBase(cx
, bce
, JSOP_GETELEM
)) // OBJ KEY V
2571 if (Emit1(cx
, bce
, JSOP_POS
) < 0) // OBJ KEY N
2573 if (post
&& Emit1(cx
, bce
, JSOP_DUP
) < 0) // OBJ KEY N? N
2575 if (Emit1(cx
, bce
, JSOP_ONE
) < 0) // OBJ KEY N? N 1
2577 if (Emit1(cx
, bce
, binop
) < 0) // OBJ KEY N? N+1
2581 if (Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)3) < 0) // KEY N N+1 OBJ
2583 if (Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)3) < 0) // N N+1 OBJ KEY
2585 if (Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)2) < 0) // N OBJ KEY N+1
2589 JSOp setOp
= bce
->sc
->strict
? JSOP_STRICTSETELEM
: JSOP_SETELEM
;
2590 if (!EmitElemOpBase(cx
, bce
, setOp
)) // N? N+1
2592 if (post
&& Emit1(cx
, bce
, JSOP_POP
) < 0) // RESULT
2599 EmitNumberOp(ExclusiveContext
* cx
, double dval
, BytecodeEmitter
* bce
)
2606 if (NumberIsInt32(dval
, &ival
)) {
2608 return Emit1(cx
, bce
, JSOP_ZERO
) >= 0;
2610 return Emit1(cx
, bce
, JSOP_ONE
) >= 0;
2611 if ((int)(int8_t)ival
== ival
)
2612 return Emit2(cx
, bce
, JSOP_INT8
, (jsbytecode
)(int8_t)ival
) >= 0;
2615 if (u
< JS_BIT(16)) {
2616 EMIT_UINT16_IMM_OP(JSOP_UINT16
, u
);
2617 } else if (u
< JS_BIT(24)) {
2618 off
= EmitN(cx
, bce
, JSOP_UINT24
, 3);
2621 pc
= bce
->code(off
);
2624 off
= EmitN(cx
, bce
, JSOP_INT32
, 4);
2627 pc
= bce
->code(off
);
2628 SET_INT32(pc
, ival
);
2633 if (!bce
->constList
.append(DoubleValue(dval
)))
2636 return EmitIndex32(cx
, JSOP_DOUBLE
, bce
->constList
.length() - 1, bce
);
2640 SetJumpOffsetAt(BytecodeEmitter
* bce
, ptrdiff_t off
)
2642 SET_JUMP_OFFSET(bce
->code(off
), bce
->offset() - off
);
2646 PushInitialConstants(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp op
, unsigned n
)
2648 MOZ_ASSERT(op
== JSOP_UNDEFINED
|| op
== JSOP_UNINITIALIZED
);
2649 for (unsigned i
= 0; i
< n
; ++i
) {
2650 if (Emit1(cx
, bce
, op
) < 0)
2657 InitializeBlockScopedLocalsFromStack(ExclusiveContext
* cx
, BytecodeEmitter
* bce
,
2658 Handle
<StaticBlockObject
*> blockObj
)
2660 for (unsigned i
= blockObj
->numVariables(); i
> 0; --i
) {
2661 if (blockObj
->isAliased(i
- 1)) {
2664 sc
.setSlot(BlockObject::RESERVED_SLOTS
+ i
- 1);
2665 if (!EmitAliasedVarOp(cx
, JSOP_INITALIASEDLEXICAL
, sc
, DontCheckLexical
, bce
))
2668 // blockIndexToLocalIndex returns the slot index after the unaliased
2669 // locals stored in the frame. EmitUnaliasedVarOp expects the slot index
2670 // to include both unaliased and aliased locals, so we have to add the
2671 // number of aliased locals.
2672 uint32_t numAliased
= bce
->script
->bindings
.numAliasedBodyLevelLocals();
2673 unsigned local
= blockObj
->blockIndexToLocalIndex(i
- 1) + numAliased
;
2674 if (!EmitUnaliasedVarOp(cx
, JSOP_INITLEXICAL
, local
, DontCheckLexical
, bce
))
2677 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
2684 EnterBlockScope(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, StmtInfoBCE
* stmtInfo
,
2685 ObjectBox
* objbox
, JSOp initialValueOp
, unsigned alreadyPushed
= 0)
2687 // Initial values for block-scoped locals. Whether it is undefined or the
2688 // JS_UNINITIALIZED_LEXICAL magic value depends on the context. The
2689 // current way we emit for-in and for-of heads means its let bindings will
2690 // always be initialized, so we can initialize them to undefined.
2691 Rooted
<StaticBlockObject
*> blockObj(cx
, &objbox
->object
->as
<StaticBlockObject
>());
2692 if (!PushInitialConstants(cx
, bce
, initialValueOp
, blockObj
->numVariables() - alreadyPushed
))
2695 if (!EnterNestedScope(cx
, bce
, stmtInfo
, objbox
, STMT_BLOCK
))
2698 if (!InitializeBlockScopedLocalsFromStack(cx
, bce
, blockObj
))
2705 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047.
2706 * LLVM is deciding to inline this function which uses a lot of stack space
2707 * into EmitTree which is recursive and uses relatively little stack space.
2709 MOZ_NEVER_INLINE
static bool
2710 EmitSwitch(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
2714 ptrdiff_t top
, off
, defaultOffset
;
2715 ParseNode
* pn2
, *pn3
, *pn4
;
2721 /* Try for most optimal, fall back if not dense ints. */
2722 switchOp
= JSOP_TABLESWITCH
;
2727 MOZ_ASSERT(pn2
->isKind(PNK_LEXICALSCOPE
) || pn2
->isKind(PNK_STATEMENTLIST
));
2729 /* Push the discriminant. */
2730 if (!EmitTree(cx
, bce
, pn
->pn_left
))
2733 StmtInfoBCE
stmtInfo(cx
);
2734 if (pn2
->isKind(PNK_LEXICALSCOPE
)) {
2735 if (!EnterBlockScope(cx
, bce
, &stmtInfo
, pn2
->pn_objbox
, JSOP_UNINITIALIZED
, 0))
2738 stmtInfo
.type
= STMT_SWITCH
;
2739 stmtInfo
.update
= top
= bce
->offset();
2740 /* Advance pn2 to refer to the switch case list. */
2743 MOZ_ASSERT(pn2
->isKind(PNK_STATEMENTLIST
));
2744 top
= bce
->offset();
2745 PushStatementBCE(bce
, &stmtInfo
, STMT_SWITCH
, top
);
2748 /* Switch bytecodes run from here till end of final case. */
2749 uint32_t caseCount
= pn2
->pn_count
;
2750 uint32_t tableLength
= 0;
2751 UniquePtr
<ParseNode
*[], JS::FreePolicy
> table(nullptr);
2753 if (caseCount
> JS_BIT(16)) {
2754 bce
->parser
->tokenStream
.reportError(JSMSG_TOO_MANY_CASES
);
2758 if (caseCount
== 0 ||
2760 (hasDefault
= (pn2
->pn_head
->isKind(PNK_DEFAULT
))))) {
2766 #define INTMAP_LENGTH 256
2767 jsbitmap intmap_space
[INTMAP_LENGTH
];
2768 jsbitmap
* intmap
= nullptr;
2769 int32_t intmap_bitlen
= 0;
2771 low
= JSVAL_INT_MAX
;
2772 high
= JSVAL_INT_MIN
;
2774 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
2775 if (pn3
->isKind(PNK_DEFAULT
)) {
2777 caseCount
--; /* one of the "cases" was the default */
2781 MOZ_ASSERT(pn3
->isKind(PNK_CASE
));
2782 if (switchOp
== JSOP_CONDSWITCH
)
2785 MOZ_ASSERT(switchOp
== JSOP_TABLESWITCH
);
2789 if (pn4
->getKind() != PNK_NUMBER
) {
2790 switchOp
= JSOP_CONDSWITCH
;
2795 if (!NumberIsInt32(pn4
->pn_dval
, &i
)) {
2796 switchOp
= JSOP_CONDSWITCH
;
2800 if ((unsigned)(i
+ (int)JS_BIT(15)) >= (unsigned)JS_BIT(16)) {
2801 switchOp
= JSOP_CONDSWITCH
;
2810 * Check for duplicates, which require a JSOP_CONDSWITCH.
2811 * We bias i by 65536 if it's negative, and hope that's a rare
2812 * case (because it requires a malloc'd bitmap).
2816 if (i
>= intmap_bitlen
) {
2818 size_t(i
) < (INTMAP_LENGTH
* JS_BITMAP_NBITS
)) {
2819 intmap
= intmap_space
;
2820 intmap_bitlen
= INTMAP_LENGTH
* JS_BITMAP_NBITS
;
2822 /* Just grab 8K for the worst-case bitmap. */
2823 intmap_bitlen
= JS_BIT(16);
2824 intmap
= cx
->pod_malloc
<jsbitmap
>(JS_BIT(16) / JS_BITMAP_NBITS
);
2826 js_ReportOutOfMemory(cx
);
2830 memset(intmap
, 0, size_t(intmap_bitlen
) / CHAR_BIT
);
2832 if (JS_TEST_BIT(intmap
, i
)) {
2833 switchOp
= JSOP_CONDSWITCH
;
2836 JS_SET_BIT(intmap
, i
);
2839 if (intmap
&& intmap
!= intmap_space
)
2845 * Compute table length and select condswitch instead if overlarge or
2846 * more than half-sparse.
2848 if (switchOp
== JSOP_TABLESWITCH
) {
2849 tableLength
= (uint32_t)(high
- low
+ 1);
2850 if (tableLength
>= JS_BIT(16) || tableLength
> 2 * caseCount
)
2851 switchOp
= JSOP_CONDSWITCH
;
2856 * The note has one or two offsets: first tells total switch code length;
2857 * second (if condswitch) tells offset to first JSOP_CASE.
2859 if (switchOp
== JSOP_CONDSWITCH
) {
2860 /* 0 bytes of immediate for unoptimized switch. */
2862 noteIndex
= NewSrcNote3(cx
, bce
, SRC_CONDSWITCH
, 0, 0);
2864 MOZ_ASSERT(switchOp
== JSOP_TABLESWITCH
);
2866 /* 3 offsets (len, low, high) before the table, 1 per entry. */
2867 switchSize
= (size_t)(JUMP_OFFSET_LEN
* (3 + tableLength
));
2868 noteIndex
= NewSrcNote2(cx
, bce
, SRC_TABLESWITCH
, 0);
2873 /* Emit switchOp followed by switchSize bytes of jump or lookup table. */
2874 if (EmitN(cx
, bce
, switchOp
, switchSize
) < 0)
2878 if (switchOp
== JSOP_CONDSWITCH
) {
2879 int caseNoteIndex
= -1;
2880 bool beforeCases
= true;
2882 /* Emit code for evaluating cases and jumping to case statements. */
2883 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
2885 if (pn4
&& !EmitTree(cx
, bce
, pn4
))
2887 if (caseNoteIndex
>= 0) {
2888 /* off is the previous JSOP_CASE's bytecode offset. */
2889 if (!SetSrcNoteOffset(cx
, bce
, (unsigned)caseNoteIndex
, 0, bce
->offset() - off
))
2893 MOZ_ASSERT(pn3
->isKind(PNK_DEFAULT
));
2896 caseNoteIndex
= NewSrcNote2(cx
, bce
, SRC_NEXTCASE
, 0);
2897 if (caseNoteIndex
< 0)
2899 off
= EmitJump(cx
, bce
, JSOP_CASE
, 0);
2902 pn3
->pn_offset
= off
;
2904 unsigned noteCount
, noteCountDelta
;
2906 /* Switch note's second offset is to first JSOP_CASE. */
2907 noteCount
= bce
->notes().length();
2908 if (!SetSrcNoteOffset(cx
, bce
, (unsigned)noteIndex
, 1, off
- top
))
2910 noteCountDelta
= bce
->notes().length() - noteCount
;
2911 if (noteCountDelta
!= 0)
2912 caseNoteIndex
+= noteCountDelta
;
2913 beforeCases
= false;
2918 * If we didn't have an explicit default (which could fall in between
2919 * cases, preventing us from fusing this SetSrcNoteOffset with the call
2920 * in the loop above), link the last case to the implicit default for
2921 * the benefit of IonBuilder.
2924 caseNoteIndex
>= 0 &&
2925 !SetSrcNoteOffset(cx
, bce
, (unsigned)caseNoteIndex
, 0, bce
->offset() - off
))
2930 /* Emit default even if no explicit default statement. */
2931 defaultOffset
= EmitJump(cx
, bce
, JSOP_DEFAULT
, 0);
2932 if (defaultOffset
< 0)
2935 MOZ_ASSERT(switchOp
== JSOP_TABLESWITCH
);
2936 pc
= bce
->code(top
+ JUMP_OFFSET_LEN
);
2938 /* Fill in switch bounds, which we know fit in 16-bit offsets. */
2939 SET_JUMP_OFFSET(pc
, low
);
2940 pc
+= JUMP_OFFSET_LEN
;
2941 SET_JUMP_OFFSET(pc
, high
);
2942 pc
+= JUMP_OFFSET_LEN
;
2945 * Use malloc to avoid arena bloat for programs with many switches.
2946 * UniquePtr takes care of freeing it on exit.
2948 if (tableLength
!= 0) {
2949 table
= cx
->make_zeroed_pod_array
<ParseNode
*>(tableLength
);
2952 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
2953 if (pn3
->isKind(PNK_DEFAULT
))
2956 MOZ_ASSERT(pn3
->isKind(PNK_CASE
));
2959 MOZ_ASSERT(pn4
->getKind() == PNK_NUMBER
);
2961 int32_t i
= int32_t(pn4
->pn_dval
);
2962 MOZ_ASSERT(double(i
) == pn4
->pn_dval
);
2965 MOZ_ASSERT(uint32_t(i
) < tableLength
);
2971 /* Emit code for each case's statements, copying pn_offset up to pn3. */
2972 for (pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
2973 if (switchOp
== JSOP_CONDSWITCH
&& !pn3
->isKind(PNK_DEFAULT
))
2974 SetJumpOffsetAt(bce
, pn3
->pn_offset
);
2975 pn4
= pn3
->pn_right
;
2976 if (!EmitTree(cx
, bce
, pn4
))
2978 pn3
->pn_offset
= pn4
->pn_offset
;
2979 if (pn3
->isKind(PNK_DEFAULT
))
2980 off
= pn3
->pn_offset
- top
;
2984 /* If no default case, offset for default is to end of switch. */
2985 off
= bce
->offset() - top
;
2988 /* We better have set "off" by now. */
2989 MOZ_ASSERT(off
!= -1);
2991 /* Set the default offset (to end of switch if no default). */
2992 if (switchOp
== JSOP_CONDSWITCH
) {
2994 MOZ_ASSERT(defaultOffset
!= -1);
2995 SET_JUMP_OFFSET(bce
->code(defaultOffset
), off
- (defaultOffset
- top
));
2997 pc
= bce
->code(top
);
2998 SET_JUMP_OFFSET(pc
, off
);
2999 pc
+= JUMP_OFFSET_LEN
;
3002 /* Set the SRC_SWITCH note's offset operand to tell end of switch. */
3003 off
= bce
->offset() - top
;
3004 if (!SetSrcNoteOffset(cx
, bce
, (unsigned)noteIndex
, 0, off
))
3007 if (switchOp
== JSOP_TABLESWITCH
) {
3008 /* Skip over the already-initialized switch bounds. */
3009 pc
+= 2 * JUMP_OFFSET_LEN
;
3011 /* Fill in the jump table, if there is one. */
3012 for (uint32_t i
= 0; i
< tableLength
; i
++) {
3014 off
= pn3
? pn3
->pn_offset
- top
: 0;
3015 SET_JUMP_OFFSET(pc
, off
);
3016 pc
+= JUMP_OFFSET_LEN
;
3020 if (pn
->pn_right
->isKind(PNK_LEXICALSCOPE
)) {
3021 if (!LeaveNestedScope(cx
, bce
, &stmtInfo
))
3024 if (!PopStatementBCE(cx
, bce
))
3032 BytecodeEmitter::isRunOnceLambda()
3034 // The run once lambda flags set by the parser are approximate, and we look
3035 // at properties of the function itself before deciding to emit a function
3036 // as a run once lambda.
3038 if (!(parent
&& parent
->emittingRunOnceLambda
) &&
3039 (emitterMode
!= LazyFunction
|| !lazyScript
->treatAsRunOnce()))
3044 FunctionBox
* funbox
= sc
->asFunctionBox();
3045 return !funbox
->argumentsHasLocalBinding() &&
3046 !funbox
->isGenerator() &&
3047 !funbox
->function()->name();
3051 EmitYieldOp(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp op
)
3053 if (op
== JSOP_FINALYIELDRVAL
)
3054 return Emit1(cx
, bce
, JSOP_FINALYIELDRVAL
) >= 0;
3056 MOZ_ASSERT(op
== JSOP_INITIALYIELD
|| op
== JSOP_YIELD
);
3058 ptrdiff_t off
= EmitN(cx
, bce
, op
, 3);
3062 uint32_t yieldIndex
= bce
->yieldOffsetList
.length();
3063 if (yieldIndex
>= JS_BIT(24)) {
3064 bce
->reportError(nullptr, JSMSG_TOO_MANY_YIELDS
);
3068 SET_UINT24(bce
->code(off
), yieldIndex
);
3070 if (!bce
->yieldOffsetList
.append(bce
->offset()))
3073 return Emit1(cx
, bce
, JSOP_DEBUGAFTERYIELD
) >= 0;
3077 frontend::EmitFunctionScript(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* body
)
3079 if (!bce
->updateLocalsToFrameSlots())
3083 * IonBuilder has assumptions about what may occur immediately after
3084 * script->main (e.g., in the case of destructuring params). Thus, put the
3085 * following ops into the range [script->code, script->main). Note:
3086 * execution starts from script->code, so this has no semantic effect.
3089 FunctionBox
* funbox
= bce
->sc
->asFunctionBox();
3090 if (funbox
->argumentsHasLocalBinding()) {
3091 MOZ_ASSERT(bce
->offset() == 0); /* See JSScript::argumentsBytecode. */
3092 bce
->switchToProlog();
3093 if (Emit1(cx
, bce
, JSOP_ARGUMENTS
) < 0)
3095 InternalBindingsHandle
bindings(bce
->script
, &bce
->script
->bindings
);
3096 BindingIter bi
= Bindings::argumentsBinding(cx
, bindings
);
3097 if (bce
->script
->bindingIsAliased(bi
)) {
3100 JS_ALWAYS_TRUE(LookupAliasedNameSlot(bce
, bce
->script
, cx
->names().arguments
, &sc
));
3101 if (!EmitAliasedVarOp(cx
, JSOP_SETALIASEDVAR
, sc
, DontCheckLexical
, bce
))
3104 if (!EmitUnaliasedVarOp(cx
, JSOP_SETLOCAL
, bi
.localIndex(), DontCheckLexical
, bce
))
3107 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
3109 bce
->switchToMain();
3113 * Emit a prologue for run-once scripts which will deoptimize JIT code if
3114 * the script ends up running multiple times via foo.caller related
3117 bool runOnce
= bce
->isRunOnceLambda();
3119 bce
->switchToProlog();
3120 if (Emit1(cx
, bce
, JSOP_RUNONCE
) < 0)
3122 bce
->switchToMain();
3125 if (!EmitTree(cx
, bce
, body
))
3128 // If we fall off the end of a generator, do a final yield.
3129 if (bce
->sc
->isFunctionBox() && bce
->sc
->asFunctionBox()->isGenerator()) {
3130 if (bce
->sc
->asFunctionBox()->isStarGenerator() && !EmitPrepareIteratorResult(cx
, bce
))
3133 if (Emit1(cx
, bce
, JSOP_UNDEFINED
) < 0)
3136 if (bce
->sc
->asFunctionBox()->isStarGenerator() && !EmitFinishIteratorResult(cx
, bce
, true))
3139 if (Emit1(cx
, bce
, JSOP_SETRVAL
) < 0)
3143 // We know that .generator is on the top scope chain node, as we are
3144 // at the function end.
3146 MOZ_ALWAYS_TRUE(LookupAliasedNameSlot(bce
, bce
->script
, cx
->names().dotGenerator
, &sc
));
3147 if (!EmitAliasedVarOp(cx
, JSOP_GETALIASEDVAR
, sc
, DontCheckLexical
, bce
))
3150 // No need to check for finally blocks, etc as in EmitReturn.
3151 if (!EmitYieldOp(cx
, bce
, JSOP_FINALYIELDRVAL
))
3156 * Always end the script with a JSOP_RETRVAL. Some other parts of the codebase
3157 * depend on this opcode, e.g. js_InternalInterpret.
3159 if (Emit1(cx
, bce
, JSOP_RETRVAL
) < 0)
3162 // If all locals are aliased, the frame's block slots won't be used, so we
3163 // can set numBlockScoped = 0. This is nice for generators as it ensures
3164 // nfixed == 0, so we don't have to initialize any local slots when resuming
3166 if (bce
->sc
->allLocalsAliased())
3167 bce
->script
->bindings
.setAllLocalsAliased();
3169 if (!JSScript::fullyInitFromEmitter(cx
, bce
->script
, bce
))
3173 * If this function is only expected to run once, mark the script so that
3174 * initializers created within it may be given more precise types.
3177 bce
->script
->setTreatAsRunOnce();
3178 MOZ_ASSERT(!bce
->script
->hasRunOnce());
3181 /* Initialize fun->script() so that the debugger has a valid fun->script(). */
3182 RootedFunction
fun(cx
, bce
->script
->functionNonDelazifying());
3183 MOZ_ASSERT(fun
->isInterpreted());
3185 if (fun
->isInterpretedLazy())
3186 fun
->setUnlazifiedScript(bce
->script
);
3188 fun
->setScript(bce
->script
);
3190 bce
->tellDebuggerAboutCompiledScript(cx
);
3196 MaybeEmitVarDecl(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp prologOp
, ParseNode
* pn
,
3201 if (!pn
->pn_cookie
.isFree()) {
3202 atomIndex
= pn
->pn_cookie
.slot();
3204 if (!bce
->makeAtomIndex(pn
->pn_atom
, &atomIndex
))
3208 if (JOF_OPTYPE(pn
->getOp()) == JOF_ATOM
&&
3209 (!bce
->sc
->isFunctionBox() || bce
->sc
->asFunctionBox()->isHeavyweight()))
3211 bce
->switchToProlog();
3212 if (!UpdateSourceCoordNotes(cx
, bce
, pn
->pn_pos
.begin
))
3214 if (!EmitIndexOp(cx
, prologOp
, atomIndex
, bce
))
3216 bce
->switchToMain();
3220 *result
= atomIndex
;
3225 * This enum tells EmitVariables and the destructuring functions how emit the
3226 * given Parser::variables parse tree. In the base case, DefineVars, the caller
3227 * only wants variables to be defined in the prologue (if necessary). For
3228 * PushInitialValues, variable initializer expressions are evaluated and left
3229 * on the stack. For InitializeVars, the initializer expressions values are
3230 * assigned (to local variables) and popped.
3235 PushInitialValues
= 1,
3240 (*DestructuringDeclEmitter
)(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp prologOp
, ParseNode
* pn
);
3242 template <DestructuringDeclEmitter EmitName
>
3244 EmitDestructuringDeclsWithEmitter(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp prologOp
,
3247 if (pattern
->isKind(PNK_ARRAY
)) {
3248 for (ParseNode
* element
= pattern
->pn_head
; element
; element
= element
->pn_next
) {
3249 if (element
->isKind(PNK_ELISION
))
3251 ParseNode
* target
= element
;
3252 if (element
->isKind(PNK_SPREAD
)) {
3253 MOZ_ASSERT(element
->pn_kid
->isKind(PNK_NAME
));
3254 target
= element
->pn_kid
;
3256 if (target
->isKind(PNK_NAME
)) {
3257 if (!EmitName(cx
, bce
, prologOp
, target
))
3260 if (!EmitDestructuringDeclsWithEmitter
<EmitName
>(cx
, bce
, prologOp
, target
))
3267 MOZ_ASSERT(pattern
->isKind(PNK_OBJECT
));
3268 for (ParseNode
* member
= pattern
->pn_head
; member
; member
= member
->pn_next
) {
3269 MOZ_ASSERT(member
->isKind(PNK_MUTATEPROTO
) ||
3270 member
->isKind(PNK_COLON
) ||
3271 member
->isKind(PNK_SHORTHAND
));
3273 ParseNode
* target
= member
->isKind(PNK_MUTATEPROTO
) ? member
->pn_kid
: member
->pn_right
;
3275 if (target
->isKind(PNK_NAME
)) {
3276 if (!EmitName(cx
, bce
, prologOp
, target
))
3279 if (!EmitDestructuringDeclsWithEmitter
<EmitName
>(cx
, bce
, prologOp
, target
))
3287 EmitDestructuringDecl(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp prologOp
, ParseNode
* pn
)
3289 MOZ_ASSERT(pn
->isKind(PNK_NAME
));
3290 if (!BindNameToSlot(cx
, bce
, pn
))
3293 MOZ_ASSERT(!pn
->isOp(JSOP_CALLEE
));
3294 return MaybeEmitVarDecl(cx
, bce
, prologOp
, pn
, nullptr);
3298 EmitDestructuringDecls(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp prologOp
,
3301 return EmitDestructuringDeclsWithEmitter
<EmitDestructuringDecl
>(cx
, bce
, prologOp
, pattern
);
3305 EmitInitializeDestructuringDecl(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp prologOp
,
3308 MOZ_ASSERT(pn
->isKind(PNK_NAME
));
3309 MOZ_ASSERT(pn
->isBound());
3310 return EmitVarOp(cx
, pn
, pn
->getOp(), bce
);
3313 // Emit code to initialize all destructured names to the value on the top of
3316 EmitInitializeDestructuringDecls(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, JSOp prologOp
,
3319 return EmitDestructuringDeclsWithEmitter
<EmitInitializeDestructuringDecl
>(cx
, bce
,
3324 EmitDestructuringOpsHelper(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pattern
,
3325 VarEmitOption emitOption
);
3328 * EmitDestructuringLHS assumes the to-be-destructured value has been pushed on
3329 * the stack and emits code to destructure a single lhs expression (either a
3330 * name or a compound []/{} expression).
3332 * If emitOption is InitializeVars, the to-be-destructured value is assigned to
3333 * locals and ultimately the initial slot is popped (-1 total depth change).
3335 * If emitOption is PushInitialValues, the to-be-destructured value is replaced
3336 * with the initial values of the N (where 0 <= N) variables assigned in the
3337 * lhs expression. (Same post-condition as EmitDestructuringOpsHelper)
3340 EmitDestructuringLHS(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, VarEmitOption emitOption
)
3342 MOZ_ASSERT(emitOption
!= DefineVars
);
3344 // Now emit the lvalue opcode sequence. If the lvalue is a nested
3345 // destructuring initialiser-form, call ourselves to handle it, then pop
3346 // the matched value. Otherwise emit an lvalue bytecode sequence followed
3347 // by an assignment op.
3348 if (pn
->isKind(PNK_SPREAD
))
3350 if (pn
->isKind(PNK_ARRAY
) || pn
->isKind(PNK_OBJECT
)) {
3351 if (!EmitDestructuringOpsHelper(cx
, bce
, pn
, emitOption
))
3353 if (emitOption
== InitializeVars
) {
3354 // Per its post-condition, EmitDestructuringOpsHelper has left the
3355 // to-be-destructured value on top of the stack.
3356 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
3359 } else if (emitOption
== PushInitialValues
) {
3360 // The lhs is a simple name so the to-be-destructured value is
3361 // its initial value and there is nothing to do.
3362 MOZ_ASSERT(pn
->getOp() == JSOP_SETLOCAL
|| pn
->getOp() == JSOP_INITLEXICAL
);
3363 MOZ_ASSERT(pn
->pn_dflags
& PND_BOUND
);
3365 switch (pn
->getKind()) {
3367 if (!BindNameToSlot(cx
, bce
, pn
))
3370 switch (pn
->getOp()) {
3372 case JSOP_STRICTSETNAME
:
3374 case JSOP_STRICTSETGNAME
:
3375 case JSOP_SETCONST
: {
3376 // This is like ordinary assignment, but with one difference.
3378 // In `a = b`, we first determine a binding for `a` (using
3379 // JSOP_BINDNAME or JSOP_BINDGNAME), then we evaluate `b`, then
3380 // a JSOP_SETNAME instruction.
3382 // In `[a] = [b]`, per spec, `b` is evaluated first, then we
3383 // determine a binding for `a`. Then we need to do assignment--
3384 // but the operands are on the stack in the wrong order for
3385 // JSOP_SETPROP, so we have to add a JSOP_SWAP.
3387 if (!bce
->makeAtomIndex(pn
->pn_atom
, &atomIndex
))
3390 if (!pn
->isOp(JSOP_SETCONST
)) {
3391 bool global
= pn
->isOp(JSOP_SETGNAME
) || pn
->isOp(JSOP_STRICTSETGNAME
);
3392 JSOp bindOp
= global
? JSOP_BINDGNAME
: JSOP_BINDNAME
;
3393 if (!EmitIndex32(cx
, bindOp
, atomIndex
, bce
))
3395 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0)
3399 if (!EmitIndexOp(cx
, pn
->getOp(), atomIndex
, bce
))
3406 case JSOP_INITLEXICAL
:
3407 if (!EmitVarOp(cx
, pn
, pn
->getOp(), bce
))
3412 MOZ_CRASH("EmitDestructuringLHS: bad name op");
3418 // See the (PNK_NAME, JSOP_SETNAME) case above.
3420 // In `a.x = b`, `a` is evaluated first, then `b`, then a
3421 // JSOP_SETPROP instruction.
3423 // In `[a.x] = [b]`, per spec, `b` is evaluated before `a`. Then we
3424 // need a property set -- but the operands are on the stack in the
3425 // wrong order for JSOP_SETPROP, so we have to add a JSOP_SWAP.
3426 if (!EmitTree(cx
, bce
, pn
->pn_expr
))
3428 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0)
3430 JSOp setOp
= bce
->sc
->strict
? JSOP_STRICTSETPROP
: JSOP_SETPROP
;
3431 if (!EmitAtomOp(cx
, pn
, setOp
, bce
))
3438 // See the comment at `case PNK_DOT:` above. This case,
3439 // `[a[x]] = [b]`, is handled much the same way. The JSOP_SWAP
3440 // is emitted by EmitElemOperands.
3441 JSOp setOp
= bce
->sc
->strict
? JSOP_STRICTSETELEM
: JSOP_SETELEM
;
3442 if (!EmitElemOp(cx
, pn
, setOp
, bce
))
3448 MOZ_ASSERT(pn
->pn_xflags
& PNX_SETCALL
);
3449 if (!EmitTree(cx
, bce
, pn
))
3452 // Pop the call return value. Below, we pop the RHS too, balancing
3453 // the stack --- presumably for the benefit of bytecode
3454 // analysis. (The interpreter will never reach these instructions
3455 // since we just emitted JSOP_SETCALL, which always throws. It's
3456 // possible no analyses actually depend on this either.)
3457 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
3462 MOZ_CRASH("EmitDestructuringLHS: bad lhs kind");
3465 // Pop the assigned value.
3466 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
3473 static bool EmitSpread(ExclusiveContext
* cx
, BytecodeEmitter
* bce
);
3474 static bool EmitIterator(ExclusiveContext
* cx
, BytecodeEmitter
* bce
);
3477 * EmitIteratorNext will pop iterator from the top of the stack.
3478 * It will push the result of |.next()| onto the stack.
3481 EmitIteratorNext(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
=nullptr)
3483 MOZ_ASSERT(bce
->emitterMode
!= BytecodeEmitter::SelfHosting
,
3484 ".next() iteration is prohibited in self-hosted code because it "
3485 "can run user-modifiable iteration code");
3487 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // ... ITER ITER
3489 if (!EmitAtomOp(cx
, cx
->names().next
, JSOP_CALLPROP
, bce
)) // ... ITER NEXT
3491 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0) // ... NEXT ITER
3493 if (EmitCall(cx
, bce
, JSOP_CALL
, 0, pn
) < 0) // ... RESULT
3495 CheckTypeSet(cx
, bce
, JSOP_CALL
);
3500 EmitDestructuringOpsArrayHelper(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pattern
,
3501 VarEmitOption emitOption
)
3503 MOZ_ASSERT(pattern
->isKind(PNK_ARRAY
));
3504 MOZ_ASSERT(pattern
->isArity(PN_LIST
));
3505 MOZ_ASSERT(bce
->stackDepth
!= 0);
3508 * Use an iterator to destructure the RHS, instead of index lookup.
3509 * InitializeVars expects us to leave the *original* value on the stack.
3511 if (emitOption
== InitializeVars
) {
3512 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // ... OBJ OBJ
3515 if (!EmitIterator(cx
, bce
)) // ... OBJ? ITER
3517 bool needToPopIterator
= true;
3519 for (ParseNode
* member
= pattern
->pn_head
; member
; member
= member
->pn_next
) {
3521 * Now push the property name currently being matched, which is the
3522 * current property name "label" on the left of a colon in the object
3525 if (member
->isKind(PNK_SPREAD
)) {
3526 /* Create a new array with the rest of the iterator */
3527 ptrdiff_t off
= EmitN(cx
, bce
, JSOP_NEWARRAY
, 3); // ... OBJ? ITER ARRAY
3530 CheckTypeSet(cx
, bce
, JSOP_NEWARRAY
);
3531 jsbytecode
* pc
= bce
->code(off
);
3534 if (!EmitNumberOp(cx
, 0, bce
)) // ... OBJ? ITER ARRAY INDEX
3536 if (!EmitSpread(cx
, bce
)) // ... OBJ? ARRAY INDEX
3538 if (Emit1(cx
, bce
, JSOP_POP
) < 0) // ... OBJ? ARRAY
3540 needToPopIterator
= false;
3542 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // ... OBJ? ITER ITER
3544 if (!EmitIteratorNext(cx
, bce
, pattern
)) // ... OBJ? ITER RESULT
3546 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // ... OBJ? ITER RESULT RESULT
3548 if (!EmitAtomOp(cx
, cx
->names().done
, JSOP_GETPROP
, bce
)) // ... OBJ? ITER RESULT DONE?
3551 // Emit (result.done ? undefined : result.value)
3552 // This is mostly copied from EmitConditionalExpression, except that this code
3553 // does not push new values onto the stack.
3554 ptrdiff_t noteIndex
= NewSrcNote(cx
, bce
, SRC_COND
);
3557 ptrdiff_t beq
= EmitJump(cx
, bce
, JSOP_IFEQ
, 0);
3561 if (Emit1(cx
, bce
, JSOP_POP
) < 0) // ... OBJ? ITER
3563 if (Emit1(cx
, bce
, JSOP_UNDEFINED
) < 0) // ... OBJ? ITER UNDEFINED
3566 /* Jump around else, fixup the branch, emit else, fixup jump. */
3567 ptrdiff_t jmp
= EmitJump(cx
, bce
, JSOP_GOTO
, 0);
3570 SetJumpOffsetAt(bce
, beq
);
3572 if (!EmitAtomOp(cx
, cx
->names().value
, JSOP_GETPROP
, bce
)) // ... OBJ? ITER VALUE
3575 SetJumpOffsetAt(bce
, jmp
);
3576 if (!SetSrcNoteOffset(cx
, bce
, noteIndex
, 0, jmp
- beq
))
3580 // Destructure into the pattern the element contains.
3581 ParseNode
* subpattern
= member
;
3582 if (subpattern
->isKind(PNK_ELISION
)) {
3583 // The value destructuring into an elision just gets ignored.
3584 if (Emit1(cx
, bce
, JSOP_POP
) < 0) // ... OBJ? ITER
3589 int32_t depthBefore
= bce
->stackDepth
;
3590 if (!EmitDestructuringLHS(cx
, bce
, subpattern
, emitOption
))
3593 if (emitOption
== PushInitialValues
&& needToPopIterator
) {
3595 * After '[x,y]' in 'let ([[x,y], z] = o)', the stack is
3596 * | to-be-destructured-value | x | y |
3599 * so emit a pick to produce the intermediate state
3600 * | x | y | to-be-destructured-value |
3601 * before destructuring z. This gives the loop invariant that
3602 * the to-be-destructured-value is always on top of the stack.
3604 MOZ_ASSERT((bce
->stackDepth
- bce
->stackDepth
) >= -1);
3605 uint32_t pickDistance
= (uint32_t)((bce
->stackDepth
+ 1) - depthBefore
);
3606 if (pickDistance
> 0) {
3607 if (pickDistance
> UINT8_MAX
) {
3608 bce
->reportError(subpattern
, JSMSG_TOO_MANY_LOCALS
);
3611 if (Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)pickDistance
) < 0)
3617 if (needToPopIterator
&& Emit1(cx
, bce
, JSOP_POP
) < 0)
3624 EmitDestructuringOpsObjectHelper(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pattern
,
3625 VarEmitOption emitOption
)
3627 MOZ_ASSERT(pattern
->isKind(PNK_OBJECT
));
3628 MOZ_ASSERT(pattern
->isArity(PN_LIST
));
3630 MOZ_ASSERT(bce
->stackDepth
!= 0); // ... OBJ
3632 for (ParseNode
* member
= pattern
->pn_head
; member
; member
= member
->pn_next
) {
3633 // Duplicate the value being destructured to use as a reference base.
3634 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // ... OBJ OBJ
3637 // Now push the property name currently being matched, which is the
3638 // current property name "label" on the left of a colon in the object
3640 bool needsGetElem
= true;
3642 ParseNode
* subpattern
;
3643 if (member
->isKind(PNK_MUTATEPROTO
)) {
3644 if (!EmitAtomOp(cx
, cx
->names().proto
, JSOP_GETPROP
, bce
)) // ... OBJ PROP
3646 needsGetElem
= false;
3647 subpattern
= member
->pn_kid
;
3649 MOZ_ASSERT(member
->isKind(PNK_COLON
) || member
->isKind(PNK_SHORTHAND
));
3651 ParseNode
* key
= member
->pn_left
;
3652 if (key
->isKind(PNK_NUMBER
)) {
3653 if (!EmitNumberOp(cx
, key
->pn_dval
, bce
)) // ... OBJ OBJ KEY
3655 } else if (key
->isKind(PNK_NAME
) || key
->isKind(PNK_STRING
)) {
3656 PropertyName
* name
= key
->pn_atom
->asPropertyName();
3658 // The parser already checked for atoms representing indexes and
3659 // used PNK_NUMBER instead, but also watch for ids which TI treats
3660 // as indexes for simplification of downstream analysis.
3661 jsid id
= NameToId(name
);
3662 if (id
!= types::IdToTypeId(id
)) {
3663 if (!EmitTree(cx
, bce
, key
)) // ... OBJ OBJ KEY
3666 if (!EmitAtomOp(cx
, name
, JSOP_GETPROP
, bce
)) // ...OBJ PROP
3668 needsGetElem
= false;
3671 MOZ_ASSERT(key
->isKind(PNK_COMPUTED_NAME
));
3672 if (!EmitTree(cx
, bce
, key
->pn_kid
)) // ... OBJ OBJ KEY
3676 subpattern
= member
->pn_right
;
3679 // Get the property value if not done already.
3680 if (needsGetElem
&& !EmitElemOpBase(cx
, bce
, JSOP_GETELEM
)) // ... OBJ PROP
3683 // Destructure PROP per this member's subpattern.
3684 int32_t depthBefore
= bce
->stackDepth
;
3685 if (!EmitDestructuringLHS(cx
, bce
, subpattern
, emitOption
))
3688 // If emitOption is InitializeVars, destructuring initialized each
3689 // target in the subpattern's LHS as it went, then popped PROP. We've
3690 // correctly returned to the loop-entry stack, and we continue to the
3692 if (emitOption
== InitializeVars
) // ... OBJ
3695 MOZ_ASSERT(emitOption
== PushInitialValues
);
3697 // EmitDestructuringLHS removed PROP, and it pushed a value per target
3698 // name in LHS (for |emitOption == PushInitialValues| only makes sense
3699 // when multiple values need to be pushed onto the stack to initialize
3700 // a single lexical scope). It also preserved OBJ deep in the stack as
3701 // the original object to be destructed into remaining target names in
3702 // the LHS object pattern. (We use PushInitialValues *only* as part of
3703 // SpiderMonkey's proprietary let block statements, which assign their
3704 // targets all in a single go [akin to Scheme's let, and distinct from
3705 // let*/letrec].) Thus for:
3707 // let ({arr: [x, y], z} = obj) { ... }
3709 // we have this stack after the above acts upon the [x, y] subpattern:
3713 // (where of course x = obj.arr[0] and y = obj.arr[1], and []-indexing
3714 // is really iteration-indexing). We want to have:
3718 // so that we can continue, ready to destruct z from OBJ. Pick OBJ out
3719 // of the stack, moving it to the top, to accomplish this.
3720 MOZ_ASSERT((bce
->stackDepth
- bce
->stackDepth
) >= -1);
3721 uint32_t pickDistance
= (uint32_t)((bce
->stackDepth
+ 1) - depthBefore
);
3722 if (pickDistance
> 0) {
3723 if (pickDistance
> UINT8_MAX
) {
3724 bce
->reportError(subpattern
, JSMSG_TOO_MANY_LOCALS
);
3727 if (Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)pickDistance
) < 0)
3732 if (emitOption
== PushInitialValues
) {
3733 // Per the above loop invariant, the value being destructured into this
3734 // object pattern is atop the stack. Pop it to achieve the
3736 if (Emit1(cx
, bce
, JSOP_POP
) < 0) // ... <pattern's target name values, seriatim>
3744 * Recursive helper for EmitDestructuringOps.
3745 * EmitDestructuringOpsHelper assumes the to-be-destructured value has been
3746 * pushed on the stack and emits code to destructure each part of a [] or {}
3749 * If emitOption is InitializeVars, the initial to-be-destructured value is
3750 * left untouched on the stack and the overall depth is not changed.
3752 * If emitOption is PushInitialValues, the to-be-destructured value is replaced
3753 * with the initial values of the N (where 0 <= N) variables assigned in the
3754 * lhs expression. (Same post-condition as EmitDestructuringLHS)
3757 EmitDestructuringOpsHelper(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pattern
,
3758 VarEmitOption emitOption
)
3760 MOZ_ASSERT(emitOption
!= DefineVars
);
3762 if (pattern
->isKind(PNK_ARRAY
))
3763 return EmitDestructuringOpsArrayHelper(cx
, bce
, pattern
, emitOption
);
3764 return EmitDestructuringOpsObjectHelper(cx
, bce
, pattern
, emitOption
);
3768 EmitDestructuringOps(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pattern
,
3772 * Call our recursive helper to emit the destructuring assignments and
3773 * related stack manipulations.
3775 VarEmitOption emitOption
= isLet
? PushInitialValues
: InitializeVars
;
3776 return EmitDestructuringOpsHelper(cx
, bce
, pattern
, emitOption
);
3780 EmitTemplateString(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
3782 MOZ_ASSERT(pn
->isArity(PN_LIST
));
3784 for (ParseNode
* pn2
= pn
->pn_head
; pn2
!= NULL
; pn2
= pn2
->pn_next
) {
3785 if (pn2
->getKind() != PNK_STRING
&& pn2
->getKind() != PNK_TEMPLATE_STRING
) {
3786 // We update source notes before emitting the expression
3787 if (!UpdateSourceCoordNotes(cx
, bce
, pn2
->pn_pos
.begin
))
3790 if (!EmitTree(cx
, bce
, pn2
))
3793 if (pn2
->getKind() != PNK_STRING
&& pn2
->getKind() != PNK_TEMPLATE_STRING
) {
3794 // We need to convert the expression to a string
3795 if (Emit1(cx
, bce
, JSOP_TOSTRING
) < 0)
3799 if (pn2
!= pn
->pn_head
) {
3800 // We've pushed two strings onto the stack. Add them together, leaving just one.
3801 if (Emit1(cx
, bce
, JSOP_ADD
) < 0)
3810 EmitVariables(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, VarEmitOption emitOption
,
3811 bool isLetExpr
= false)
3813 MOZ_ASSERT(pn
->isArity(PN_LIST
));
3814 MOZ_ASSERT(isLetExpr
== (emitOption
== PushInitialValues
));
3817 for (ParseNode
* pn2
= pn
->pn_head
; ; pn2
= next
) {
3818 if (!UpdateSourceCoordNotes(cx
, bce
, pn2
->pn_pos
.begin
))
3820 next
= pn2
->pn_next
;
3823 if (!pn2
->isKind(PNK_NAME
)) {
3824 if (pn2
->isKind(PNK_ARRAY
) || pn2
->isKind(PNK_OBJECT
)) {
3825 // If the emit option is DefineVars, emit variable binding
3826 // ops, but not destructuring ops. The parser (see
3827 // Parser::variables) has ensured that our caller will be the
3828 // PNK_FOR/PNK_FORIN/PNK_FOROF case in EmitTree (we don't have
3829 // to worry about this being a variable declaration, as
3830 // destructuring declarations without initializers, e.g., |var
3831 // [x]|, are not legal syntax), and that case will emit the
3832 // destructuring code only after emitting an enumerating
3833 // opcode and a branch that tests whether the enumeration
3834 // ended. Thus, each iteration's assignment is responsible for
3835 // initializing, and nothing needs to be done here.
3837 // Otherwise this is emitting destructuring let binding
3838 // initialization for a legacy comprehension expression. See
3839 // EmitForInOrOfVariables.
3840 MOZ_ASSERT(pn
->pn_count
== 1);
3841 if (emitOption
== DefineVars
) {
3842 if (!EmitDestructuringDecls(cx
, bce
, pn
->getOp(), pn2
))
3845 // Lexical bindings cannot be used before they are
3846 // initialized. Similar to the JSOP_INITLEXICAL case below.
3847 MOZ_ASSERT(emitOption
!= DefineVars
);
3848 MOZ_ASSERT_IF(emitOption
== InitializeVars
, pn
->pn_xflags
& PNX_POPVAR
);
3849 if (Emit1(cx
, bce
, JSOP_UNDEFINED
) < 0)
3851 if (!EmitInitializeDestructuringDecls(cx
, bce
, pn
->getOp(), pn2
))
3858 * A destructuring initialiser assignment preceded by var will
3859 * never occur to the left of 'in' in a for-in loop. As with 'for
3860 * (var x = i in o)...', this will cause the entire 'var [a, b] =
3861 * i' to be hoisted out of the loop.
3863 MOZ_ASSERT(pn2
->isKind(PNK_ASSIGN
));
3864 MOZ_ASSERT(pn2
->isOp(JSOP_NOP
));
3865 MOZ_ASSERT(emitOption
!= DefineVars
);
3868 * To allow the front end to rewrite var f = x; as f = x; when a
3869 * function f(){} precedes the var, detect simple name assignment
3870 * here and initialize the name.
3872 if (pn2
->pn_left
->isKind(PNK_NAME
)) {
3873 pn3
= pn2
->pn_right
;
3879 if (!EmitDestructuringDecls(cx
, bce
, pn
->getOp(), pn3
))
3882 if (!EmitTree(cx
, bce
, pn2
->pn_right
))
3885 if (!EmitDestructuringOps(cx
, bce
, pn3
, isLetExpr
))
3888 /* If we are not initializing, nothing to pop. */
3889 if (emitOption
!= InitializeVars
) {
3898 * Load initializer early to share code above that jumps to do_name.
3899 * NB: if this var redeclares an existing binding, then pn2 is linked
3900 * on its definition's use-chain and pn_expr has been overlayed with
3903 pn3
= pn2
->maybeExpr();
3906 if (!BindNameToSlot(cx
, bce
, pn2
))
3912 MOZ_ASSERT(op
!= JSOP_CALLEE
);
3913 MOZ_ASSERT(!pn2
->pn_cookie
.isFree() || !pn
->isOp(JSOP_NOP
));
3916 if (!MaybeEmitVarDecl(cx
, bce
, pn
->getOp(), pn2
, &atomIndex
))
3920 MOZ_ASSERT(emitOption
!= DefineVars
);
3921 if (op
== JSOP_SETNAME
||
3922 op
== JSOP_STRICTSETNAME
||
3923 op
== JSOP_SETGNAME
||
3924 op
== JSOP_STRICTSETGNAME
||
3925 op
== JSOP_SETINTRINSIC
)
3927 MOZ_ASSERT(emitOption
!= PushInitialValues
);
3929 if (op
== JSOP_SETNAME
|| op
== JSOP_STRICTSETNAME
)
3930 bindOp
= JSOP_BINDNAME
;
3931 else if (op
== JSOP_SETGNAME
|| op
== JSOP_STRICTSETGNAME
)
3932 bindOp
= JSOP_BINDGNAME
;
3934 bindOp
= JSOP_BINDINTRINSIC
;
3935 if (!EmitIndex32(cx
, bindOp
, atomIndex
, bce
))
3939 bool oldEmittingForInit
= bce
->emittingForInit
;
3940 bce
->emittingForInit
= false;
3941 if (!EmitTree(cx
, bce
, pn3
))
3943 bce
->emittingForInit
= oldEmittingForInit
;
3944 } else if (op
== JSOP_INITLEXICAL
|| isLetExpr
) {
3945 // 'let' bindings cannot be used before they are
3946 // initialized. JSOP_INITLEXICAL distinguishes the binding site.
3947 MOZ_ASSERT(emitOption
!= DefineVars
);
3948 MOZ_ASSERT_IF(emitOption
== InitializeVars
, pn
->pn_xflags
& PNX_POPVAR
);
3949 if (Emit1(cx
, bce
, JSOP_UNDEFINED
) < 0)
3953 // If we are not initializing, nothing to pop. If we are initializing
3954 // lets, we must emit the pops.
3955 if (emitOption
!= InitializeVars
) {
3961 MOZ_ASSERT_IF(pn2
->isDefn(), pn3
== pn2
->pn_expr
);
3962 if (!pn2
->pn_cookie
.isFree()) {
3963 if (!EmitVarOp(cx
, pn2
, op
, bce
))
3966 if (!EmitIndexOp(cx
, op
, atomIndex
, bce
))
3973 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
3977 if (pn
->pn_xflags
& PNX_POPVAR
) {
3978 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
3986 EmitAssignment(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* lhs
, JSOp op
, ParseNode
* rhs
)
3989 * Check left operand type and generate specialized code for it.
3990 * Specialize to avoid ECMA "reference type" values on the operand
3991 * stack, which impose pervasive runtime "GetValue" costs.
3993 jsatomid atomIndex
= (jsatomid
) -1;
3994 jsbytecode offset
= 1;
3996 switch (lhs
->getKind()) {
3998 if (!BindNameToSlot(cx
, bce
, lhs
))
4000 if (lhs
->pn_cookie
.isFree()) {
4001 if (!bce
->makeAtomIndex(lhs
->pn_atom
, &atomIndex
))
4003 if (!lhs
->isConst()) {
4005 if (lhs
->isOp(JSOP_SETNAME
) || lhs
->isOp(JSOP_STRICTSETNAME
))
4006 bindOp
= JSOP_BINDNAME
;
4007 else if (lhs
->isOp(JSOP_SETGNAME
) || lhs
->isOp(JSOP_STRICTSETGNAME
))
4008 bindOp
= JSOP_BINDGNAME
;
4010 bindOp
= JSOP_BINDINTRINSIC
;
4011 if (!EmitIndex32(cx
, bindOp
, atomIndex
, bce
))
4018 if (!EmitTree(cx
, bce
, lhs
->expr()))
4021 if (!bce
->makeAtomIndex(lhs
->pn_atom
, &atomIndex
))
4025 MOZ_ASSERT(lhs
->isArity(PN_BINARY
));
4026 if (!EmitTree(cx
, bce
, lhs
->pn_left
))
4028 if (!EmitTree(cx
, bce
, lhs
->pn_right
))
4036 MOZ_ASSERT(lhs
->pn_xflags
& PNX_SETCALL
);
4037 if (!EmitTree(cx
, bce
, lhs
))
4039 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
4046 if (op
!= JSOP_NOP
) {
4048 switch (lhs
->getKind()) {
4050 if (lhs
->isConst()) {
4051 if (lhs
->isOp(JSOP_CALLEE
)) {
4052 if (Emit1(cx
, bce
, JSOP_CALLEE
) < 0)
4054 } else if (lhs
->isOp(JSOP_GETNAME
) || lhs
->isOp(JSOP_GETGNAME
)) {
4055 if (!EmitIndex32(cx
, lhs
->getOp(), atomIndex
, bce
))
4058 MOZ_ASSERT(JOF_OPTYPE(lhs
->getOp()) != JOF_ATOM
);
4059 if (!EmitVarOp(cx
, lhs
, lhs
->getOp(), bce
))
4062 } else if (lhs
->isOp(JSOP_SETNAME
) || lhs
->isOp(JSOP_STRICTSETNAME
)) {
4063 if (Emit1(cx
, bce
, JSOP_DUP
) < 0)
4065 if (!EmitIndex32(cx
, JSOP_GETXPROP
, atomIndex
, bce
))
4067 } else if (lhs
->isOp(JSOP_SETGNAME
) || lhs
->isOp(JSOP_STRICTSETGNAME
)) {
4068 MOZ_ASSERT(lhs
->pn_cookie
.isFree());
4069 if (!EmitAtomOp(cx
, lhs
, JSOP_GETGNAME
, bce
))
4071 } else if (lhs
->isOp(JSOP_SETINTRINSIC
)) {
4072 MOZ_ASSERT(lhs
->pn_cookie
.isFree());
4073 if (!EmitAtomOp(cx
, lhs
, JSOP_GETINTRINSIC
, bce
))
4077 switch (lhs
->getOp()) {
4078 case JSOP_SETARG
: op
= JSOP_GETARG
; break;
4079 case JSOP_SETLOCAL
: op
= JSOP_GETLOCAL
; break;
4080 case JSOP_SETALIASEDVAR
: op
= JSOP_GETALIASEDVAR
; break;
4081 default: MOZ_CRASH("Bad op");
4083 if (!EmitVarOp(cx
, lhs
, op
, bce
))
4088 if (Emit1(cx
, bce
, JSOP_DUP
) < 0)
4090 bool isLength
= (lhs
->pn_atom
== cx
->names().length
);
4091 if (!EmitIndex32(cx
, isLength
? JSOP_LENGTH
: JSOP_GETPROP
, atomIndex
, bce
))
4096 if (Emit1(cx
, bce
, JSOP_DUP2
) < 0)
4098 if (!EmitElemOpBase(cx
, bce
, JSOP_GETELEM
))
4103 * We just emitted a JSOP_SETCALL (which will always throw) and
4104 * popped the call's return value. Push a random value to make sure
4105 * the stack depth is correct.
4107 MOZ_ASSERT(lhs
->pn_xflags
& PNX_SETCALL
);
4108 if (Emit1(cx
, bce
, JSOP_NULL
) < 0)
4115 /* Now emit the right operand (it may affect the namespace). */
4117 if (!EmitTree(cx
, bce
, rhs
))
4121 * The value to assign is the next enumeration value in a for-in or
4122 * for-of loop. That value has already been emitted: by JSOP_ITERNEXT
4123 * in the for-in case, or via a GETPROP "value" on the result object in
4124 * the for-of case. If offset == 1, that slot is already at the top of
4125 * the stack. Otherwise, rearrange the stack to put that value on top.
4127 if (offset
!= 1 && Emit2(cx
, bce
, JSOP_PICK
, offset
- 1) < 0)
4131 /* If += etc., emit the binary operator with a source note. */
4132 if (op
!= JSOP_NOP
) {
4134 * Take care to avoid SRC_ASSIGNOP if the left-hand side is a const
4135 * declared in the current compilation unit, as in this case (just
4136 * a bit further below) we will avoid emitting the assignment op.
4138 if (!lhs
->isKind(PNK_NAME
) || !lhs
->isConst()) {
4139 if (NewSrcNote(cx
, bce
, SRC_ASSIGNOP
) < 0)
4142 if (Emit1(cx
, bce
, op
) < 0)
4146 /* Finally, emit the specialized assignment bytecode. */
4147 switch (lhs
->getKind()) {
4149 if (lhs
->isOp(JSOP_SETARG
) || lhs
->isOp(JSOP_SETLOCAL
) || lhs
->isOp(JSOP_SETALIASEDVAR
)) {
4150 if (!EmitVarOp(cx
, lhs
, lhs
->getOp(), bce
))
4153 if (!EmitIndexOp(cx
, lhs
->getOp(), atomIndex
, bce
))
4159 JSOp setOp
= bce
->sc
->strict
? JSOP_STRICTSETPROP
: JSOP_SETPROP
;
4160 if (!EmitIndexOp(cx
, setOp
, atomIndex
, bce
))
4165 /* Do nothing. The JSOP_SETCALL we emitted will always throw. */
4166 MOZ_ASSERT(lhs
->pn_xflags
& PNX_SETCALL
);
4170 JSOp setOp
= bce
->sc
->strict
? JSOP_STRICTSETELEM
: JSOP_SETELEM
;
4171 if (Emit1(cx
, bce
, setOp
) < 0)
4177 if (!EmitDestructuringOps(cx
, bce
, lhs
))
4187 ParseNode::getConstantValue(ExclusiveContext
* cx
, AllowConstantObjects allowObjects
, MutableHandleValue vp
)
4189 switch (getKind()) {
4191 vp
.setNumber(pn_dval
);
4193 case PNK_TEMPLATE_STRING
:
4195 vp
.setString(pn_atom
);
4198 vp
.setBoolean(true);
4201 vp
.setBoolean(false);
4206 case PNK_CALLSITEOBJ
:
4208 RootedValue
value(cx
);
4212 if (allowObjects
== DontAllowObjects
) {
4213 vp
.setMagic(JS_GENERIC_MAGIC
);
4216 if (allowObjects
== DontAllowNestedObjects
)
4217 allowObjects
= DontAllowObjects
;
4219 if (getKind() == PNK_CALLSITEOBJ
) {
4220 count
= pn_count
- 1;
4221 pn
= pn_head
->pn_next
;
4223 MOZ_ASSERT(isOp(JSOP_NEWINIT
) && !(pn_xflags
& PNX_NONCONST
));
4228 RootedArrayObject
obj(cx
, NewDenseFullyAllocatedArray(cx
, count
, nullptr, MaybeSingletonObject
));
4234 for (; pn
; idx
++, pn
= pn
->pn_next
) {
4235 if (!pn
->getConstantValue(cx
, allowObjects
, &value
))
4237 if (value
.isMagic(JS_GENERIC_MAGIC
)) {
4238 vp
.setMagic(JS_GENERIC_MAGIC
);
4241 id
= INT_TO_JSID(idx
);
4242 if (!JSObject::defineGeneric(cx
, obj
, id
, value
, nullptr, nullptr, JSPROP_ENUMERATE
))
4245 MOZ_ASSERT(idx
== count
);
4247 types::FixArrayType(cx
, obj
);
4252 MOZ_ASSERT(isOp(JSOP_NEWINIT
));
4253 MOZ_ASSERT(!(pn_xflags
& PNX_NONCONST
));
4255 if (allowObjects
== DontAllowObjects
) {
4256 vp
.setMagic(JS_GENERIC_MAGIC
);
4259 if (allowObjects
== DontAllowNestedObjects
)
4260 allowObjects
= DontAllowObjects
;
4262 gc::AllocKind kind
= GuessObjectGCKind(pn_count
);
4263 RootedPlainObject
obj(cx
,
4264 NewBuiltinClassInstance
<PlainObject
>(cx
, kind
, MaybeSingletonObject
));
4268 RootedValue
value(cx
), idvalue(cx
);
4269 for (ParseNode
* pn
= pn_head
; pn
; pn
= pn
->pn_next
) {
4270 if (!pn
->pn_right
->getConstantValue(cx
, allowObjects
, &value
))
4272 if (value
.isMagic(JS_GENERIC_MAGIC
)) {
4273 vp
.setMagic(JS_GENERIC_MAGIC
);
4277 ParseNode
* pnid
= pn
->pn_left
;
4278 if (pnid
->isKind(PNK_NUMBER
)) {
4279 idvalue
= NumberValue(pnid
->pn_dval
);
4281 MOZ_ASSERT(pnid
->isKind(PNK_NAME
) || pnid
->isKind(PNK_STRING
));
4282 MOZ_ASSERT(pnid
->pn_atom
!= cx
->names().proto
);
4283 idvalue
= StringValue(pnid
->pn_atom
);
4287 if (IsDefinitelyIndex(idvalue
, &index
)) {
4288 if (!JSObject::defineElement(cx
, obj
, index
, value
, nullptr, nullptr,
4297 JSAtom
* name
= ToAtom
<CanGC
>(cx
, idvalue
);
4301 if (name
->isIndex(&index
)) {
4302 if (!JSObject::defineElement(cx
, obj
, index
, value
,
4303 nullptr, nullptr, JSPROP_ENUMERATE
))
4306 if (!JSObject::defineProperty(cx
, obj
, name
->asPropertyName(), value
,
4307 nullptr, nullptr, JSPROP_ENUMERATE
))
4314 types::FixObjectType(cx
, obj
);
4319 MOZ_CRASH("Unexpected node");
4325 EmitSingletonInitialiser(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
4327 RootedValue
value(cx
);
4328 if (!pn
->getConstantValue(cx
, ParseNode::AllowObjects
, &value
))
4331 RootedNativeObject
obj(cx
, &value
.toObject().as
<NativeObject
>());
4332 if (!obj
->is
<ArrayObject
>() && !JSObject::setSingletonType(cx
, obj
))
4335 ObjectBox
* objbox
= bce
->parser
->newObjectBox(obj
);
4339 return EmitObjectOp(cx
, objbox
, JSOP_OBJECT
, bce
);
4343 EmitCallSiteObject(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
4345 RootedValue
value(cx
);
4346 if (!pn
->getConstantValue(cx
, ParseNode::AllowObjects
, &value
))
4349 MOZ_ASSERT(value
.isObject());
4351 ObjectBox
* objbox1
= bce
->parser
->newObjectBox(&value
.toObject().as
<NativeObject
>());
4355 if (!pn
->as
<CallSiteNode
>().getRawArrayValue(cx
, &value
))
4358 MOZ_ASSERT(value
.isObject());
4360 ObjectBox
* objbox2
= bce
->parser
->newObjectBox(&value
.toObject().as
<NativeObject
>());
4364 return EmitObjectPairOp(cx
, objbox1
, objbox2
, JSOP_CALLSITEOBJ
, bce
);
4367 /* See the SRC_FOR source note offsetBias comments later in this file. */
4368 JS_STATIC_ASSERT(JSOP_NOP_LENGTH
== 1);
4369 JS_STATIC_ASSERT(JSOP_POP_LENGTH
== 1);
4373 class EmitLevelManager
4375 BytecodeEmitter
* bce
;
4377 explicit EmitLevelManager(BytecodeEmitter
* bce
) : bce(bce
) { bce
->emitLevel
++; }
4378 ~EmitLevelManager() { bce
->emitLevel
--; }
4381 } /* anonymous namespace */
4384 EmitCatch(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
4387 * Morph STMT_BLOCK to STMT_CATCH, note the block entry code offset,
4388 * and save the block object atom.
4390 StmtInfoBCE
* stmt
= bce
->topStmt
;
4391 MOZ_ASSERT(stmt
->type
== STMT_BLOCK
&& stmt
->isBlockScope
);
4392 stmt
->type
= STMT_CATCH
;
4394 /* Go up one statement info record to the TRY or FINALLY record. */
4396 MOZ_ASSERT(stmt
->type
== STMT_TRY
|| stmt
->type
== STMT_FINALLY
);
4398 /* Pick up the pending exception and bind it to the catch variable. */
4399 if (Emit1(cx
, bce
, JSOP_EXCEPTION
) < 0)
4403 * Dup the exception object if there is a guard for rethrowing to use
4404 * it later when rethrowing or in other catches.
4406 if (pn
->pn_kid2
&& Emit1(cx
, bce
, JSOP_DUP
) < 0)
4409 ParseNode
* pn2
= pn
->pn_kid1
;
4410 switch (pn2
->getKind()) {
4413 if (!EmitDestructuringOps(cx
, bce
, pn2
))
4415 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
4420 /* Inline and specialize BindNameToSlot for pn2. */
4421 MOZ_ASSERT(!pn2
->pn_cookie
.isFree());
4422 if (!EmitVarOp(cx
, pn2
, JSOP_INITLEXICAL
, bce
))
4424 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
4432 // If there is a guard expression, emit it and arrange to jump to the next
4433 // catch block if the guard expression is false.
4435 if (!EmitTree(cx
, bce
, pn
->pn_kid2
))
4438 // If the guard expression is false, fall through, pop the block scope,
4439 // and jump to the next catch block. Otherwise jump over that code and
4440 // pop the dupped exception.
4441 ptrdiff_t guardCheck
= EmitJump(cx
, bce
, JSOP_IFNE
, 0);
4446 NonLocalExitScope
nle(cx
, bce
);
4448 // Move exception back to cx->exception to prepare for
4450 if (Emit1(cx
, bce
, JSOP_THROWING
) < 0)
4453 // Leave the scope for this catch block.
4454 if (!nle
.prepareForNonLocalJump(stmt
))
4457 // Jump to the next handler. The jump target is backpatched by EmitTry.
4458 ptrdiff_t guardJump
= EmitJump(cx
, bce
, JSOP_GOTO
, 0);
4461 stmt
->guardJump() = guardJump
;
4464 // Back to normal control flow.
4465 SetJumpOffsetAt(bce
, guardCheck
);
4467 // Pop duplicated exception object as we no longer need it.
4468 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
4472 /* Emit the catch body. */
4473 return EmitTree(cx
, bce
, pn
->pn_kid3
);
4476 // Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See the
4477 // comment on EmitSwitch.
4479 MOZ_NEVER_INLINE
static bool
4480 EmitTry(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
4482 StmtInfoBCE
stmtInfo(cx
);
4484 // Push stmtInfo to track jumps-over-catches and gosubs-to-finally
4487 // When a finally block is active (STMT_FINALLY in our parse context),
4488 // non-local jumps (including jumps-over-catches) result in a GOSUB
4489 // being written into the bytecode stream and fixed-up later (c.f.
4490 // EmitBackPatchOp and BackPatch).
4492 PushStatementBCE(bce
, &stmtInfo
, pn
->pn_kid3
? STMT_FINALLY
: STMT_TRY
, bce
->offset());
4494 // Since an exception can be thrown at any place inside the try block,
4495 // we need to restore the stack and the scope chain before we transfer
4496 // the control to the exception handler.
4498 // For that we store in a try note associated with the catch or
4499 // finally block the stack depth upon the try entry. The interpreter
4500 // uses this depth to properly unwind the stack and the scope chain.
4502 int depth
= bce
->stackDepth
;
4504 // Record the try location, then emit the try block.
4505 ptrdiff_t noteIndex
= NewSrcNote(cx
, bce
, SRC_TRY
);
4506 if (noteIndex
< 0 || Emit1(cx
, bce
, JSOP_TRY
) < 0)
4508 ptrdiff_t tryStart
= bce
->offset();
4509 if (!EmitTree(cx
, bce
, pn
->pn_kid1
))
4511 MOZ_ASSERT(depth
== bce
->stackDepth
);
4513 // GOSUB to finally, if present.
4515 if (EmitBackPatchOp(cx
, bce
, &stmtInfo
.gosubs()) < 0)
4519 // Source note points to the jump at the end of the try block.
4520 if (!SetSrcNoteOffset(cx
, bce
, noteIndex
, 0, bce
->offset() - tryStart
+ JSOP_TRY_LENGTH
))
4523 // Emit jump over catch and/or finally.
4524 ptrdiff_t catchJump
= -1;
4525 if (EmitBackPatchOp(cx
, bce
, &catchJump
) < 0)
4528 ptrdiff_t tryEnd
= bce
->offset();
4530 // If this try has a catch block, emit it.
4531 if (ParseNode
* pn2
= pn
->pn_kid2
) {
4532 // The emitted code for a catch block looks like:
4534 // [pushblockscope] only if any local aliased
4536 // if there is a catchguard:
4538 // setlocal 0; pop assign or possibly destructure exception
4539 // if there is a catchguard:
4540 // < catchguard code >
4543 // [popblockscope] only if any local aliased
4544 // throwing pop exception to cx->exception
4545 // goto <next catch block>
4547 // < catch block contents >
4549 // [popblockscope] only if any local aliased
4550 // goto <end of catch blocks> non-local; finally applies
4552 // If there's no catch block without a catchguard, the last <next catch
4553 // block> points to rethrow code. This code will [gosub] to the finally
4554 // code if appropriate, and is also used for the catch-all trynote for
4555 // capturing exceptions thrown from catch{} blocks.
4557 for (ParseNode
* pn3
= pn2
->pn_head
; pn3
; pn3
= pn3
->pn_next
) {
4558 MOZ_ASSERT(bce
->stackDepth
== depth
);
4560 // Emit the lexical scope and catch body.
4561 MOZ_ASSERT(pn3
->isKind(PNK_LEXICALSCOPE
));
4562 if (!EmitTree(cx
, bce
, pn3
))
4565 // gosub <finally>, if required.
4567 if (EmitBackPatchOp(cx
, bce
, &stmtInfo
.gosubs()) < 0)
4569 MOZ_ASSERT(bce
->stackDepth
== depth
);
4572 // Jump over the remaining catch blocks. This will get fixed
4573 // up to jump to after catch/finally.
4574 if (EmitBackPatchOp(cx
, bce
, &catchJump
) < 0)
4577 // If this catch block had a guard clause, patch the guard jump to
4579 if (stmtInfo
.guardJump() != -1) {
4580 SetJumpOffsetAt(bce
, stmtInfo
.guardJump());
4581 stmtInfo
.guardJump() = -1;
4583 // If this catch block is the last one, rethrow, delegating
4584 // execution of any finally block to the exception handler.
4585 if (!pn3
->pn_next
) {
4586 if (Emit1(cx
, bce
, JSOP_EXCEPTION
) < 0)
4588 if (Emit1(cx
, bce
, JSOP_THROW
) < 0)
4595 MOZ_ASSERT(bce
->stackDepth
== depth
);
4597 // Emit the finally handler, if there is one.
4598 ptrdiff_t finallyStart
= 0;
4600 // Fix up the gosubs that might have been emitted before non-local
4601 // jumps to the finally code.
4602 if (!BackPatch(cx
, bce
, stmtInfo
.gosubs(), bce
->code().end(), JSOP_GOSUB
))
4605 finallyStart
= bce
->offset();
4607 // Indicate that we're emitting a subroutine body.
4608 stmtInfo
.type
= STMT_SUBROUTINE
;
4609 if (!UpdateSourceCoordNotes(cx
, bce
, pn
->pn_kid3
->pn_pos
.begin
))
4611 if (Emit1(cx
, bce
, JSOP_FINALLY
) < 0 ||
4612 !EmitTree(cx
, bce
, pn
->pn_kid3
) ||
4613 Emit1(cx
, bce
, JSOP_RETSUB
) < 0)
4617 MOZ_ASSERT(bce
->stackDepth
== depth
);
4619 if (!PopStatementBCE(cx
, bce
))
4622 // ReconstructPCStack needs a NOP here to mark the end of the last catch block.
4623 if (Emit1(cx
, bce
, JSOP_NOP
) < 0)
4626 // Fix up the end-of-try/catch jumps to come here.
4627 if (!BackPatch(cx
, bce
, catchJump
, bce
->code().end(), JSOP_GOTO
))
4630 // Add the try note last, to let post-order give us the right ordering
4631 // (first to last for a given nesting level, inner to outer by level).
4632 if (pn
->pn_kid2
&& !bce
->tryNoteList
.append(JSTRY_CATCH
, depth
, tryStart
, tryEnd
))
4635 // If we've got a finally, mark try+catch region with additional
4636 // trynote to catch exceptions (re)thrown from a catch block or
4637 // for the try{}finally{} case.
4638 if (pn
->pn_kid3
&& !bce
->tryNoteList
.append(JSTRY_FINALLY
, depth
, tryStart
, finallyStart
))
4645 EmitIf(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
4647 StmtInfoBCE
stmtInfo(cx
);
4649 /* Initialize so we can detect else-if chains and avoid recursion. */
4650 stmtInfo
.type
= STMT_IF
;
4653 ptrdiff_t noteIndex
= -1;
4656 /* Emit code for the condition before pushing stmtInfo. */
4657 if (!EmitTree(cx
, bce
, pn
->pn_kid1
))
4659 ptrdiff_t top
= bce
->offset();
4660 if (stmtInfo
.type
== STMT_IF
) {
4661 PushStatementBCE(bce
, &stmtInfo
, STMT_IF
, top
);
4664 * We came here from the goto further below that detects else-if
4665 * chains, so we must mutate stmtInfo back into a STMT_IF record.
4666 * Also we need a note offset for SRC_IF_ELSE to help IonMonkey.
4668 MOZ_ASSERT(stmtInfo
.type
== STMT_ELSE
);
4669 stmtInfo
.type
= STMT_IF
;
4670 stmtInfo
.update
= top
;
4671 if (!SetSrcNoteOffset(cx
, bce
, noteIndex
, 0, jmp
- beq
))
4675 /* Emit an annotated branch-if-false around the then part. */
4676 ParseNode
* pn3
= pn
->pn_kid3
;
4677 noteIndex
= NewSrcNote(cx
, bce
, pn3
? SRC_IF_ELSE
: SRC_IF
);
4680 beq
= EmitJump(cx
, bce
, JSOP_IFEQ
, 0);
4684 /* Emit code for the then and optional else parts. */
4685 if (!EmitTree(cx
, bce
, pn
->pn_kid2
))
4688 /* Modify stmtInfo so we know we're in the else part. */
4689 stmtInfo
.type
= STMT_ELSE
;
4692 * Emit a JSOP_BACKPATCH op to jump from the end of our then part
4693 * around the else part. The PopStatementBCE call at the bottom of
4694 * this function will fix up the backpatch chain linked from
4697 jmp
= EmitGoto(cx
, bce
, &stmtInfo
, &stmtInfo
.breaks
);
4701 /* Ensure the branch-if-false comes here, then emit the else. */
4702 SetJumpOffsetAt(bce
, beq
);
4703 if (pn3
->isKind(PNK_IF
)) {
4708 if (!EmitTree(cx
, bce
, pn3
))
4712 * Annotate SRC_IF_ELSE with the offset from branch to jump, for
4713 * IonMonkey's benefit. We can't just "back up" from the pc
4714 * of the else clause, because we don't know whether an extended
4715 * jump was required to leap from the end of the then clause over
4718 if (!SetSrcNoteOffset(cx
, bce
, noteIndex
, 0, jmp
- beq
))
4721 /* No else part, fixup the branch-if-false to come here. */
4722 SetJumpOffsetAt(bce
, beq
);
4724 return PopStatementBCE(cx
, bce
);
4728 * pnLet represents one of:
4730 * let-expression: (let (x = y) EXPR)
4731 * let-statement: let (x = y) { ... }
4733 * For a let-expression 'let (x = a, [y,z] = b) e', EmitLet produces:
4735 * bytecode stackDepth srcnotes
4748 * pushblockscope (if needed)
4751 * popblockscope (if needed)
4753 * Note that, since pushblockscope simply changes fp->scopeChain and does not
4754 * otherwise touch the stack, evaluation of the let-var initializers must leave
4755 * the initial value in the let-var's future slot.
4758 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
4759 * the comment on EmitSwitch.
4761 MOZ_NEVER_INLINE
static bool
4762 EmitLet(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pnLet
)
4764 MOZ_ASSERT(pnLet
->isArity(PN_BINARY
));
4765 ParseNode
* varList
= pnLet
->pn_left
;
4766 MOZ_ASSERT(varList
->isArity(PN_LIST
));
4767 ParseNode
* letBody
= pnLet
->pn_right
;
4768 MOZ_ASSERT(letBody
->isLexical() && letBody
->isKind(PNK_LEXICALSCOPE
));
4770 int letHeadDepth
= bce
->stackDepth
;
4772 if (!EmitVariables(cx
, bce
, varList
, PushInitialValues
, true))
4775 /* Push storage for hoisted let decls (e.g. 'let (x) { let y }'). */
4776 uint32_t valuesPushed
= bce
->stackDepth
- letHeadDepth
;
4777 StmtInfoBCE
stmtInfo(cx
);
4778 if (!EnterBlockScope(cx
, bce
, &stmtInfo
, letBody
->pn_objbox
, JSOP_UNINITIALIZED
, valuesPushed
))
4781 if (!EmitTree(cx
, bce
, letBody
->pn_expr
))
4784 if (!LeaveNestedScope(cx
, bce
, &stmtInfo
))
4791 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
4792 * the comment on EmitSwitch.
4794 MOZ_NEVER_INLINE
static bool
4795 EmitLexicalScope(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
4797 MOZ_ASSERT(pn
->isKind(PNK_LEXICALSCOPE
));
4799 StmtInfoBCE
stmtInfo(cx
);
4800 if (!EnterBlockScope(cx
, bce
, &stmtInfo
, pn
->pn_objbox
, JSOP_UNINITIALIZED
, 0))
4803 if (!EmitTree(cx
, bce
, pn
->pn_expr
))
4806 if (!LeaveNestedScope(cx
, bce
, &stmtInfo
))
4813 EmitWith(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
4815 StmtInfoBCE
stmtInfo(cx
);
4816 if (!EmitTree(cx
, bce
, pn
->pn_left
))
4818 if (!EnterNestedScope(cx
, bce
, &stmtInfo
, pn
->pn_binary_obj
, STMT_WITH
))
4820 if (!EmitTree(cx
, bce
, pn
->pn_right
))
4822 if (!LeaveNestedScope(cx
, bce
, &stmtInfo
))
4828 * EmitIterator expects the iterable to already be on the stack.
4829 * It will replace that stack value with the corresponding iterator
4832 EmitIterator(ExclusiveContext
* cx
, BytecodeEmitter
* bce
)
4834 // Convert iterable to iterator.
4835 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // OBJ OBJ
4837 #ifdef JS_HAS_SYMBOLS
4838 if (Emit2(cx
, bce
, JSOP_SYMBOL
, jsbytecode(JS::SymbolCode::iterator
)) < 0) // OBJ OBJ @@ITERATOR
4840 if (!EmitElemOpBase(cx
, bce
, JSOP_CALLELEM
)) // OBJ ITERFN
4843 if (!EmitAtomOp(cx
, cx
->names().std_iterator
, JSOP_CALLPROP
, bce
)) // OBJ ITERFN
4846 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0) // ITERFN OBJ
4848 if (EmitCall(cx
, bce
, JSOP_CALL
, 0) < 0) // ITER
4850 CheckTypeSet(cx
, bce
, JSOP_CALL
);
4855 EmitForInOrOfVariables(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, bool* letDecl
)
4857 *letDecl
= pn
->isKind(PNK_LEXICALSCOPE
);
4858 MOZ_ASSERT_IF(*letDecl
, pn
->isLexical());
4860 // If the left part is 'var x', emit code to define x if necessary using a
4861 // prolog opcode, but do not emit a pop. If it is 'let x', EnterBlockScope
4862 // will initialize let bindings in EmitForOf and EmitForIn with
4865 // Due to the horror of legacy comprehensions, there is a third case where
4866 // we have PNK_LET without a lexical scope, because those expressions are
4867 // parsed with single lexical scope for the entire comprehension. In this
4868 // case we must initialize the lets to not trigger dead zone checks via
4871 bce
->emittingForInit
= true;
4872 if (pn
->isKind(PNK_VAR
)) {
4873 if (!EmitVariables(cx
, bce
, pn
, DefineVars
))
4876 MOZ_ASSERT(pn
->isKind(PNK_LET
));
4877 if (!EmitVariables(cx
, bce
, pn
, InitializeVars
))
4880 bce
->emittingForInit
= false;
4888 * If type is STMT_FOR_OF_LOOP, it emits bytecode for for-of loop.
4889 * pn should be PNK_FOR, and pn->pn_left should be PNK_FOROF.
4891 * If type is STMT_SPREAD, it emits bytecode for spread operator.
4892 * pn should be nullptr.
4893 * Please refer the comment above EmitSpread for additional information about
4897 EmitForOf(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, StmtType type
, ParseNode
* pn
, ptrdiff_t top
)
4899 MOZ_ASSERT(type
== STMT_FOR_OF_LOOP
|| type
== STMT_SPREAD
);
4900 MOZ_ASSERT_IF(type
== STMT_FOR_OF_LOOP
, pn
&& pn
->pn_left
->isKind(PNK_FOROF
));
4901 MOZ_ASSERT_IF(type
== STMT_SPREAD
, !pn
);
4903 ParseNode
* forHead
= pn
? pn
->pn_left
: nullptr;
4904 ParseNode
* forHeadExpr
= forHead
? forHead
->pn_kid3
: nullptr;
4905 ParseNode
* forBody
= pn
? pn
->pn_right
: nullptr;
4907 ParseNode
* pn1
= forHead
? forHead
->pn_kid1
: nullptr;
4908 bool letDecl
= false;
4909 if (pn1
&& !EmitForInOrOfVariables(cx
, bce
, pn1
, &letDecl
))
4912 if (type
== STMT_FOR_OF_LOOP
) {
4913 // For-of loops run with two values on the stack: the iterator and the
4914 // current result object.
4916 // Compile the object expression to the right of 'of'.
4917 if (!EmitTree(cx
, bce
, forHeadExpr
))
4919 if (!EmitIterator(cx
, bce
))
4922 // Push a dummy result so that we properly enter iteration midstream.
4923 if (Emit1(cx
, bce
, JSOP_UNDEFINED
) < 0) // ITER RESULT
4927 // Enter the block before the loop body, after evaluating the obj.
4928 // Initialize let bindings with undefined when entering, as the name
4929 // assigned to is a plain assignment.
4930 StmtInfoBCE
letStmt(cx
);
4932 if (!EnterBlockScope(cx
, bce
, &letStmt
, pn1
->pn_objbox
, JSOP_UNDEFINED
, 0))
4936 LoopStmtInfo
stmtInfo(cx
);
4937 PushLoopStatement(bce
, &stmtInfo
, type
, top
);
4939 // Jump down to the loop condition to minimize overhead assuming at least
4940 // one iteration, as the other loop forms do. Annotate so IonMonkey can
4941 // find the loop-closing jump.
4942 int noteIndex
= NewSrcNote(cx
, bce
, SRC_FOR_OF
);
4945 ptrdiff_t jmp
= EmitJump(cx
, bce
, JSOP_GOTO
, 0);
4949 top
= bce
->offset();
4950 SET_STATEMENT_TOP(&stmtInfo
, top
);
4951 if (EmitLoopHead(cx
, bce
, nullptr) < 0)
4954 if (type
== STMT_SPREAD
)
4958 int loopDepth
= bce
->stackDepth
;
4961 // Emit code to assign result.value to the iteration variable.
4962 if (type
== STMT_FOR_OF_LOOP
) {
4963 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // ITER RESULT RESULT
4966 if (!EmitAtomOp(cx
, cx
->names().value
, JSOP_GETPROP
, bce
)) // ... RESULT VALUE
4968 if (type
== STMT_FOR_OF_LOOP
) {
4969 if (!EmitAssignment(cx
, bce
, forHead
->pn_kid2
, JSOP_NOP
, nullptr)) // ITER RESULT VALUE
4971 if (Emit1(cx
, bce
, JSOP_POP
) < 0) // ITER RESULT
4974 // The stack should be balanced around the assignment opcode sequence.
4975 MOZ_ASSERT(bce
->stackDepth
== loopDepth
);
4977 // Emit code for the loop body.
4978 if (!EmitTree(cx
, bce
, forBody
))
4981 // Set loop and enclosing "update" offsets, for continue.
4982 StmtInfoBCE
* stmt
= &stmtInfo
;
4984 stmt
->update
= bce
->offset();
4985 } while ((stmt
= stmt
->down
) != nullptr && stmt
->type
== STMT_LABEL
);
4987 if (Emit1(cx
, bce
, JSOP_INITELEM_INC
) < 0) // ITER ARR (I+1)
4990 MOZ_ASSERT(bce
->stackDepth
== loopDepth
- 1);
4992 // STMT_SPREAD never contain continue, so do not set "update" offset.
4995 // COME FROM the beginning of the loop to here.
4996 SetJumpOffsetAt(bce
, jmp
);
4997 if (!EmitLoopEntry(cx
, bce
, forHeadExpr
))
5000 if (type
== STMT_FOR_OF_LOOP
) {
5001 if (Emit1(cx
, bce
, JSOP_POP
) < 0) // ITER
5003 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // ITER ITER
5006 if (!EmitDupAt(cx
, bce
, bce
->stackDepth
- 1 - 2)) // ITER ARR I ITER
5009 if (!EmitIteratorNext(cx
, bce
, forHead
)) // ... RESULT
5011 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // ... RESULT RESULT
5013 if (!EmitAtomOp(cx
, cx
->names().done
, JSOP_GETPROP
, bce
)) // ... RESULT DONE?
5016 ptrdiff_t beq
= EmitJump(cx
, bce
, JSOP_IFEQ
, top
- bce
->offset()); // ... RESULT
5020 MOZ_ASSERT(bce
->stackDepth
== loopDepth
);
5022 // Let Ion know where the closing jump of this loop is.
5023 if (!SetSrcNoteOffset(cx
, bce
, (unsigned)noteIndex
, 0, beq
- jmp
))
5026 // Fixup breaks and continues.
5027 // For STMT_SPREAD, just pop pc->topStmt.
5028 if (!PopStatementBCE(cx
, bce
))
5031 if (!bce
->tryNoteList
.append(JSTRY_FOR_OF
, bce
->stackDepth
, top
, bce
->offset()))
5035 if (!LeaveNestedScope(cx
, bce
, &letStmt
))
5039 if (type
== STMT_SPREAD
) {
5040 if (Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)3) < 0) // ARR I RESULT ITER
5044 // Pop the result and the iter.
5045 EMIT_UINT16_IMM_OP(JSOP_POPN
, 2);
5051 EmitForIn(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, ptrdiff_t top
)
5053 ParseNode
* forHead
= pn
->pn_left
;
5054 ParseNode
* forBody
= pn
->pn_right
;
5056 ParseNode
* pn1
= forHead
->pn_kid1
;
5057 bool letDecl
= false;
5058 if (pn1
&& !EmitForInOrOfVariables(cx
, bce
, pn1
, &letDecl
))
5061 /* Compile the object expression to the right of 'in'. */
5062 if (!EmitTree(cx
, bce
, forHead
->pn_kid3
))
5066 * Emit a bytecode to convert top of stack value to the iterator
5067 * object depending on the loop variant (for-in, for-each-in, or
5068 * destructuring for-in).
5070 MOZ_ASSERT(pn
->isOp(JSOP_ITER
));
5071 if (Emit2(cx
, bce
, JSOP_ITER
, (uint8_t) pn
->pn_iflags
) < 0)
5074 // For-in loops have both the iterator and the value on the stack. Push
5075 // undefined to balance the stack.
5076 if (Emit1(cx
, bce
, JSOP_UNDEFINED
) < 0)
5079 // Enter the block before the loop body, after evaluating the obj.
5080 // Initialize let bindings with undefined when entering, as the name
5081 // assigned to is a plain assignment.
5082 StmtInfoBCE
letStmt(cx
);
5084 if (!EnterBlockScope(cx
, bce
, &letStmt
, pn1
->pn_objbox
, JSOP_UNDEFINED
, 0))
5088 LoopStmtInfo
stmtInfo(cx
);
5089 PushLoopStatement(bce
, &stmtInfo
, STMT_FOR_IN_LOOP
, top
);
5091 /* Annotate so IonMonkey can find the loop-closing jump. */
5092 int noteIndex
= NewSrcNote(cx
, bce
, SRC_FOR_IN
);
5097 * Jump down to the loop condition to minimize overhead assuming at
5098 * least one iteration, as the other loop forms do.
5100 ptrdiff_t jmp
= EmitJump(cx
, bce
, JSOP_GOTO
, 0);
5104 top
= bce
->offset();
5105 SET_STATEMENT_TOP(&stmtInfo
, top
);
5106 if (EmitLoopHead(cx
, bce
, nullptr) < 0)
5110 int loopDepth
= bce
->stackDepth
;
5113 // Emit code to assign the enumeration value to the left hand side, but
5114 // also leave it on the stack.
5115 if (!EmitAssignment(cx
, bce
, forHead
->pn_kid2
, JSOP_NOP
, nullptr))
5118 /* The stack should be balanced around the assignment opcode sequence. */
5119 MOZ_ASSERT(bce
->stackDepth
== loopDepth
);
5121 /* Emit code for the loop body. */
5122 if (!EmitTree(cx
, bce
, forBody
))
5125 /* Set loop and enclosing "update" offsets, for continue. */
5126 StmtInfoBCE
* stmt
= &stmtInfo
;
5128 stmt
->update
= bce
->offset();
5129 } while ((stmt
= stmt
->down
) != nullptr && stmt
->type
== STMT_LABEL
);
5132 * Fixup the goto that starts the loop to jump down to JSOP_MOREITER.
5134 SetJumpOffsetAt(bce
, jmp
);
5135 if (!EmitLoopEntry(cx
, bce
, nullptr))
5137 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
5139 if (Emit1(cx
, bce
, JSOP_MOREITER
) < 0)
5141 if (Emit1(cx
, bce
, JSOP_ISNOITER
) < 0)
5143 ptrdiff_t beq
= EmitJump(cx
, bce
, JSOP_IFEQ
, top
- bce
->offset());
5147 /* Set the srcnote offset so we can find the closing jump. */
5148 if (!SetSrcNoteOffset(cx
, bce
, (unsigned)noteIndex
, 0, beq
- jmp
))
5151 // Fix up breaks and continues.
5152 if (!PopStatementBCE(cx
, bce
))
5155 // Pop the enumeration value.
5156 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
5159 if (!bce
->tryNoteList
.append(JSTRY_FOR_IN
, bce
->stackDepth
, top
, bce
->offset()))
5161 if (Emit1(cx
, bce
, JSOP_ENDITER
) < 0)
5165 if (!LeaveNestedScope(cx
, bce
, &letStmt
))
5173 EmitNormalFor(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, ptrdiff_t top
)
5175 LoopStmtInfo
stmtInfo(cx
);
5176 PushLoopStatement(bce
, &stmtInfo
, STMT_FOR_LOOP
, top
);
5178 ParseNode
* forHead
= pn
->pn_left
;
5179 ParseNode
* forBody
= pn
->pn_right
;
5181 /* C-style for (init; cond; update) ... loop. */
5183 ParseNode
* pn3
= forHead
->pn_kid1
;
5185 // No initializer, but emit a nop so that there's somewhere to put the
5186 // SRC_FOR annotation that IonBuilder will look for.
5189 bce
->emittingForInit
= true;
5190 if (!UpdateSourceCoordNotes(cx
, bce
, pn3
->pn_pos
.begin
))
5192 if (!EmitTree(cx
, bce
, pn3
))
5194 bce
->emittingForInit
= false;
5198 * NB: the SRC_FOR note has offsetBias 1 (JSOP_{NOP,POP}_LENGTH).
5199 * Use tmp to hold the biased srcnote "top" offset, which differs
5200 * from the top local variable by the length of the JSOP_GOTO
5201 * emitted in between tmp and top if this loop has a condition.
5203 int noteIndex
= NewSrcNote(cx
, bce
, SRC_FOR
);
5204 if (noteIndex
< 0 || Emit1(cx
, bce
, op
) < 0)
5206 ptrdiff_t tmp
= bce
->offset();
5209 if (forHead
->pn_kid2
) {
5210 /* Goto the loop condition, which branches back to iterate. */
5211 jmp
= EmitJump(cx
, bce
, JSOP_GOTO
, 0);
5215 if (op
!= JSOP_NOP
&& Emit1(cx
, bce
, JSOP_NOP
) < 0)
5219 top
= bce
->offset();
5220 SET_STATEMENT_TOP(&stmtInfo
, top
);
5222 /* Emit code for the loop body. */
5223 if (EmitLoopHead(cx
, bce
, forBody
) < 0)
5225 if (jmp
== -1 && !EmitLoopEntry(cx
, bce
, forBody
))
5227 if (!EmitTree(cx
, bce
, forBody
))
5230 /* Set the second note offset so we can find the update part. */
5231 MOZ_ASSERT(noteIndex
!= -1);
5232 ptrdiff_t tmp2
= bce
->offset();
5234 /* Set loop and enclosing "update" offsets, for continue. */
5235 StmtInfoBCE
* stmt
= &stmtInfo
;
5237 stmt
->update
= bce
->offset();
5238 } while ((stmt
= stmt
->down
) != nullptr && stmt
->type
== STMT_LABEL
);
5240 /* Check for update code to do before the condition (if any). */
5241 pn3
= forHead
->pn_kid3
;
5243 if (!UpdateSourceCoordNotes(cx
, bce
, pn3
->pn_pos
.begin
))
5246 if (!EmitTree(cx
, bce
, pn3
))
5249 /* Always emit the POP or NOP to help IonBuilder. */
5250 if (Emit1(cx
, bce
, op
) < 0)
5253 /* Restore the absolute line number for source note readers. */
5254 uint32_t lineNum
= bce
->parser
->tokenStream
.srcCoords
.lineNum(pn
->pn_pos
.end
);
5255 if (bce
->currentLine() != lineNum
) {
5256 if (NewSrcNote2(cx
, bce
, SRC_SETLINE
, ptrdiff_t(lineNum
)) < 0)
5258 bce
->current
->currentLine
= lineNum
;
5259 bce
->current
->lastColumn
= 0;
5263 ptrdiff_t tmp3
= bce
->offset();
5265 if (forHead
->pn_kid2
) {
5266 /* Fix up the goto from top to target the loop condition. */
5267 MOZ_ASSERT(jmp
>= 0);
5268 SetJumpOffsetAt(bce
, jmp
);
5269 if (!EmitLoopEntry(cx
, bce
, forHead
->pn_kid2
))
5272 if (!EmitTree(cx
, bce
, forHead
->pn_kid2
))
5276 /* Set the first note offset so we can find the loop condition. */
5277 if (!SetSrcNoteOffset(cx
, bce
, (unsigned)noteIndex
, 0, tmp3
- tmp
))
5279 if (!SetSrcNoteOffset(cx
, bce
, (unsigned)noteIndex
, 1, tmp2
- tmp
))
5281 /* The third note offset helps us find the loop-closing jump. */
5282 if (!SetSrcNoteOffset(cx
, bce
, (unsigned)noteIndex
, 2, bce
->offset() - tmp
))
5285 /* If no loop condition, just emit a loop-closing jump. */
5286 op
= forHead
->pn_kid2
? JSOP_IFNE
: JSOP_GOTO
;
5287 if (EmitJump(cx
, bce
, op
, top
- bce
->offset()) < 0)
5290 if (!bce
->tryNoteList
.append(JSTRY_LOOP
, bce
->stackDepth
, top
, bce
->offset()))
5293 /* Now fixup all breaks and continues. */
5294 return PopStatementBCE(cx
, bce
);
5298 EmitFor(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, ptrdiff_t top
)
5300 if (pn
->pn_left
->isKind(PNK_FORIN
))
5301 return EmitForIn(cx
, bce
, pn
, top
);
5303 if (pn
->pn_left
->isKind(PNK_FOROF
))
5304 return EmitForOf(cx
, bce
, STMT_FOR_OF_LOOP
, pn
, top
);
5306 MOZ_ASSERT(pn
->pn_left
->isKind(PNK_FORHEAD
));
5307 return EmitNormalFor(cx
, bce
, pn
, top
);
5310 static MOZ_NEVER_INLINE
bool
5311 EmitFunc(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
5313 FunctionBox
* funbox
= pn
->pn_funbox
;
5314 RootedFunction
fun(cx
, funbox
->function());
5315 MOZ_ASSERT_IF(fun
->isInterpretedLazy(), fun
->lazyScript());
5318 * Set the EMITTEDFUNCTION flag in function definitions once they have been
5319 * emitted. Function definitions that need hoisting to the top of the
5320 * function will be seen by EmitFunc in two places.
5322 if (pn
->pn_dflags
& PND_EMITTEDFUNCTION
) {
5323 MOZ_ASSERT_IF(fun
->hasScript(), fun
->nonLazyScript());
5324 MOZ_ASSERT(pn
->functionIsHoisted());
5325 MOZ_ASSERT(bce
->sc
->isFunctionBox());
5329 pn
->pn_dflags
|= PND_EMITTEDFUNCTION
;
5332 * Mark as singletons any function which will only be executed once, or
5333 * which is inner to a lambda we only expect to run once. In the latter
5334 * case, if the lambda runs multiple times then CloneFunctionObject will
5335 * make a deep clone of its contents.
5337 if (fun
->isInterpreted()) {
5339 bce
->script
->compileAndGo() &&
5340 fun
->isInterpreted() &&
5341 (bce
->checkSingletonContext() ||
5342 (!bce
->isInLoop() && bce
->isRunOnceLambda()));
5343 if (!JSFunction::setTypeForScriptedFunction(cx
, fun
, singleton
))
5346 if (fun
->isInterpretedLazy()) {
5347 if (!fun
->lazyScript()->sourceObject()) {
5348 JSObject
* scope
= bce
->staticScope
;
5349 if (!scope
&& bce
->sc
->isFunctionBox())
5350 scope
= bce
->sc
->asFunctionBox()->function();
5351 JSObject
* source
= bce
->script
->sourceObject();
5352 fun
->lazyScript()->setParent(scope
, &source
->as
<ScriptSourceObject
>());
5354 if (bce
->emittingRunOnceLambda
)
5355 fun
->lazyScript()->setTreatAsRunOnce();
5357 SharedContext
* outersc
= bce
->sc
;
5359 if (outersc
->isFunctionBox() && outersc
->asFunctionBox()->mightAliasLocals())
5360 funbox
->setMightAliasLocals(); // inherit mightAliasLocals from parent
5361 MOZ_ASSERT_IF(outersc
->strict
, funbox
->strict
);
5363 // Inherit most things (principals, version, etc) from the parent.
5364 Rooted
<JSScript
*> parent(cx
, bce
->script
);
5365 CompileOptions
options(cx
, bce
->parser
->options());
5366 options
.setMutedErrors(parent
->mutedErrors())
5367 .setCompileAndGo(parent
->compileAndGo())
5368 .setSelfHostingMode(parent
->selfHosted())
5369 .setNoScriptRval(false)
5371 .setVersion(parent
->getVersion());
5373 Rooted
<JSObject
*> enclosingScope(cx
, EnclosingStaticScope(bce
));
5374 Rooted
<JSObject
*> sourceObject(cx
, bce
->script
->sourceObject());
5375 Rooted
<JSScript
*> script(cx
, JSScript::Create(cx
, enclosingScope
, false, options
,
5376 parent
->staticLevel() + 1,
5378 funbox
->bufStart
, funbox
->bufEnd
));
5382 script
->bindings
= funbox
->bindings
;
5384 uint32_t lineNum
= bce
->parser
->tokenStream
.srcCoords
.lineNum(pn
->pn_pos
.begin
);
5385 BytecodeEmitter
bce2(bce
, bce
->parser
, funbox
, script
, /* lazyScript = */ js::NullPtr(),
5386 bce
->insideEval
, bce
->evalCaller
, bce
->hasGlobalScope
, lineNum
,
5391 /* We measured the max scope depth when we parsed the function. */
5392 if (!EmitFunctionScript(cx
, &bce2
, pn
->pn_body
))
5395 if (funbox
->usesArguments
&& funbox
->usesApply
&& funbox
->usesThis
)
5396 script
->setUsesArgumentsApplyAndThis();
5399 MOZ_ASSERT(IsAsmJSModuleNative(fun
->native()));
5402 /* Make the function object a literal in the outer script's pool. */
5403 unsigned index
= bce
->objectList
.add(pn
->pn_funbox
);
5405 /* Non-hoisted functions simply emit their respective op. */
5406 if (!pn
->functionIsHoisted()) {
5407 /* JSOP_LAMBDA_ARROW is always preceded by JSOP_THIS. */
5408 MOZ_ASSERT(fun
->isArrow() == (pn
->getOp() == JSOP_LAMBDA_ARROW
));
5409 if (fun
->isArrow() && Emit1(cx
, bce
, JSOP_THIS
) < 0)
5411 return EmitIndex32(cx
, pn
->getOp(), index
, bce
);
5415 * For a script we emit the code as we parse. Thus the bytecode for
5416 * top-level functions should go in the prolog to predefine their
5417 * names in the variable object before the already-generated main code
5418 * is executed. This extra work for top-level scripts is not necessary
5419 * when we emit the code for a function. It is fully parsed prior to
5420 * invocation of the emitter and calls to EmitTree for function
5421 * definitions can be scheduled before generating the rest of code.
5423 if (!bce
->sc
->isFunctionBox()) {
5424 MOZ_ASSERT(pn
->pn_cookie
.isFree());
5425 MOZ_ASSERT(pn
->getOp() == JSOP_NOP
);
5426 MOZ_ASSERT(!bce
->topStmt
);
5427 bce
->switchToProlog();
5428 if (!EmitIndex32(cx
, JSOP_DEFFUN
, index
, bce
))
5430 if (!UpdateSourceCoordNotes(cx
, bce
, pn
->pn_pos
.begin
))
5432 bce
->switchToMain();
5435 BindingIter
bi(bce
->script
);
5436 while (bi
->name() != fun
->atom())
5438 MOZ_ASSERT(bi
->kind() == Binding::VARIABLE
|| bi
->kind() == Binding::CONSTANT
||
5439 bi
->kind() == Binding::ARGUMENT
);
5440 MOZ_ASSERT(bi
.argOrLocalIndex() < JS_BIT(20));
5442 pn
->pn_index
= index
;
5443 if (!EmitIndexOp(cx
, JSOP_LAMBDA
, index
, bce
))
5445 MOZ_ASSERT(pn
->getOp() == JSOP_GETLOCAL
|| pn
->getOp() == JSOP_GETARG
);
5446 JSOp setOp
= pn
->getOp() == JSOP_GETLOCAL
? JSOP_SETLOCAL
: JSOP_SETARG
;
5447 if (!EmitVarOp(cx
, pn
, setOp
, bce
))
5449 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
5457 EmitDo(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
5459 /* Emit an annotated nop so IonBuilder can recognize the 'do' loop. */
5460 ptrdiff_t noteIndex
= NewSrcNote(cx
, bce
, SRC_WHILE
);
5461 if (noteIndex
< 0 || Emit1(cx
, bce
, JSOP_NOP
) < 0)
5464 ptrdiff_t noteIndex2
= NewSrcNote(cx
, bce
, SRC_WHILE
);
5468 /* Compile the loop body. */
5469 ptrdiff_t top
= EmitLoopHead(cx
, bce
, pn
->pn_left
);
5473 LoopStmtInfo
stmtInfo(cx
);
5474 PushLoopStatement(bce
, &stmtInfo
, STMT_DO_LOOP
, top
);
5476 if (!EmitLoopEntry(cx
, bce
, nullptr))
5479 if (!EmitTree(cx
, bce
, pn
->pn_left
))
5482 /* Set loop and enclosing label update offsets, for continue. */
5483 ptrdiff_t off
= bce
->offset();
5484 StmtInfoBCE
* stmt
= &stmtInfo
;
5487 } while ((stmt
= stmt
->down
) != nullptr && stmt
->type
== STMT_LABEL
);
5489 /* Compile the loop condition, now that continues know where to go. */
5490 if (!EmitTree(cx
, bce
, pn
->pn_right
))
5493 ptrdiff_t beq
= EmitJump(cx
, bce
, JSOP_IFNE
, top
- bce
->offset());
5497 if (!bce
->tryNoteList
.append(JSTRY_LOOP
, bce
->stackDepth
, top
, bce
->offset()))
5501 * Update the annotations with the update and back edge positions, for
5504 * Be careful: We must set noteIndex2 before noteIndex in case the noteIndex
5507 if (!SetSrcNoteOffset(cx
, bce
, noteIndex2
, 0, beq
- top
))
5509 if (!SetSrcNoteOffset(cx
, bce
, noteIndex
, 0, 1 + (off
- top
)))
5512 return PopStatementBCE(cx
, bce
);
5516 EmitWhile(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, ptrdiff_t top
)
5519 * Minimize bytecodes issued for one or more iterations by jumping to
5520 * the condition below the body and closing the loop if the condition
5521 * is true with a backward branch. For iteration count i:
5523 * i test at the top test at the bottom
5524 * = =============== ==================
5525 * 0 ifeq-pass goto; ifne-fail
5526 * 1 ifeq-fail; goto; ifne-pass goto; ifne-pass; ifne-fail
5527 * 2 2*(ifeq-fail; goto); ifeq-pass goto; 2*ifne-pass; ifne-fail
5529 * N N*(ifeq-fail; goto); ifeq-pass goto; N*ifne-pass; ifne-fail
5531 LoopStmtInfo
stmtInfo(cx
);
5532 PushLoopStatement(bce
, &stmtInfo
, STMT_WHILE_LOOP
, top
);
5534 ptrdiff_t noteIndex
= NewSrcNote(cx
, bce
, SRC_WHILE
);
5538 ptrdiff_t jmp
= EmitJump(cx
, bce
, JSOP_GOTO
, 0);
5542 top
= EmitLoopHead(cx
, bce
, pn
->pn_right
);
5546 if (!EmitTree(cx
, bce
, pn
->pn_right
))
5549 SetJumpOffsetAt(bce
, jmp
);
5550 if (!EmitLoopEntry(cx
, bce
, pn
->pn_left
))
5552 if (!EmitTree(cx
, bce
, pn
->pn_left
))
5555 ptrdiff_t beq
= EmitJump(cx
, bce
, JSOP_IFNE
, top
- bce
->offset());
5559 if (!bce
->tryNoteList
.append(JSTRY_LOOP
, bce
->stackDepth
, top
, bce
->offset()))
5562 if (!SetSrcNoteOffset(cx
, bce
, noteIndex
, 0, beq
- jmp
))
5565 return PopStatementBCE(cx
, bce
);
5569 EmitBreak(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, PropertyName
* label
)
5571 StmtInfoBCE
* stmt
= bce
->topStmt
;
5572 SrcNoteType noteType
;
5574 while (stmt
->type
!= STMT_LABEL
|| stmt
->label
!= label
)
5576 noteType
= SRC_BREAK2LABEL
;
5578 while (!stmt
->isLoop() && stmt
->type
!= STMT_SWITCH
)
5580 noteType
= (stmt
->type
== STMT_SWITCH
) ? SRC_SWITCHBREAK
: SRC_BREAK
;
5583 return EmitGoto(cx
, bce
, stmt
, &stmt
->breaks
, noteType
) >= 0;
5587 EmitContinue(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, PropertyName
* label
)
5589 StmtInfoBCE
* stmt
= bce
->topStmt
;
5591 /* Find the loop statement enclosed by the matching label. */
5592 StmtInfoBCE
* loop
= nullptr;
5593 while (stmt
->type
!= STMT_LABEL
|| stmt
->label
!= label
) {
5600 while (!stmt
->isLoop())
5604 return EmitGoto(cx
, bce
, stmt
, &stmt
->continues
, SRC_CONTINUE
) >= 0;
5608 InTryBlockWithFinally(BytecodeEmitter
* bce
)
5610 for (StmtInfoBCE
* stmt
= bce
->topStmt
; stmt
; stmt
= stmt
->down
) {
5611 if (stmt
->type
== STMT_FINALLY
)
5618 EmitReturn(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
5620 if (!UpdateSourceCoordNotes(cx
, bce
, pn
->pn_pos
.begin
))
5623 if (bce
->sc
->isFunctionBox() && bce
->sc
->asFunctionBox()->isStarGenerator()) {
5624 if (!EmitPrepareIteratorResult(cx
, bce
))
5628 /* Push a return value */
5629 if (ParseNode
* pn2
= pn
->pn_left
) {
5630 if (!EmitTree(cx
, bce
, pn2
))
5633 /* No explicit return value provided */
5634 if (Emit1(cx
, bce
, JSOP_UNDEFINED
) < 0)
5638 if (bce
->sc
->isFunctionBox() && bce
->sc
->asFunctionBox()->isStarGenerator()) {
5639 if (!EmitFinishIteratorResult(cx
, bce
, true))
5644 * EmitNonLocalJumpFixup may add fixup bytecode to close open try
5645 * blocks having finally clauses and to exit intermingled let blocks.
5646 * We can't simply transfer control flow to our caller in that case,
5647 * because we must gosub to those finally clauses from inner to outer,
5648 * with the correct stack pointer (i.e., after popping any with,
5649 * for/in, etc., slots nested inside the finally's try).
5651 * In this case we mutate JSOP_RETURN into JSOP_SETRVAL and add an
5652 * extra JSOP_RETRVAL after the fixups.
5654 ptrdiff_t top
= bce
->offset();
5656 bool isGenerator
= bce
->sc
->isFunctionBox() && bce
->sc
->asFunctionBox()->isGenerator();
5657 bool useGenRVal
= false;
5659 if (bce
->sc
->asFunctionBox()->isStarGenerator() && InTryBlockWithFinally(bce
)) {
5660 // Emit JSOP_SETALIASEDVAR .genrval to store the return value on the
5661 // scope chain, so it's not lost when we yield in a finally block.
5663 MOZ_ASSERT(pn
->pn_right
);
5664 if (!EmitTree(cx
, bce
, pn
->pn_right
))
5666 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
5669 if (Emit1(cx
, bce
, JSOP_SETRVAL
) < 0)
5673 if (Emit1(cx
, bce
, JSOP_RETURN
) < 0)
5677 NonLocalExitScope
nle(cx
, bce
);
5679 if (!nle
.prepareForNonLocalJump(nullptr))
5684 // We know that .generator and .genrval are on the top scope chain node,
5685 // as we just exited nested scopes.
5688 MOZ_ALWAYS_TRUE(LookupAliasedNameSlot(bce
, bce
->script
, cx
->names().dotGenRVal
, &sc
));
5689 if (!EmitAliasedVarOp(cx
, JSOP_GETALIASEDVAR
, sc
, DontCheckLexical
, bce
))
5691 if (Emit1(cx
, bce
, JSOP_SETRVAL
) < 0)
5695 MOZ_ALWAYS_TRUE(LookupAliasedNameSlot(bce
, bce
->script
, cx
->names().dotGenerator
, &sc
));
5696 if (!EmitAliasedVarOp(cx
, JSOP_GETALIASEDVAR
, sc
, DontCheckLexical
, bce
))
5698 if (!EmitYieldOp(cx
, bce
, JSOP_FINALYIELDRVAL
))
5700 } else if (top
+ static_cast<ptrdiff_t>(JSOP_RETURN_LENGTH
) != bce
->offset()) {
5701 bce
->code()[top
] = JSOP_SETRVAL
;
5702 if (Emit1(cx
, bce
, JSOP_RETRVAL
) < 0)
5710 EmitYield(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
5712 MOZ_ASSERT(bce
->sc
->isFunctionBox());
5714 if (pn
->getOp() == JSOP_YIELD
) {
5715 if (bce
->sc
->asFunctionBox()->isStarGenerator()) {
5716 if (!EmitPrepareIteratorResult(cx
, bce
))
5720 if (!EmitTree(cx
, bce
, pn
->pn_left
))
5723 if (Emit1(cx
, bce
, JSOP_UNDEFINED
) < 0)
5726 if (bce
->sc
->asFunctionBox()->isStarGenerator()) {
5727 if (!EmitFinishIteratorResult(cx
, bce
, false))
5731 MOZ_ASSERT(pn
->getOp() == JSOP_INITIALYIELD
);
5734 if (!EmitTree(cx
, bce
, pn
->pn_right
))
5737 if (!EmitYieldOp(cx
, bce
, pn
->getOp()))
5740 if (pn
->getOp() == JSOP_INITIALYIELD
&& Emit1(cx
, bce
, JSOP_POP
) < 0)
5747 EmitYieldStar(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* iter
, ParseNode
* gen
)
5749 MOZ_ASSERT(bce
->sc
->isFunctionBox());
5750 MOZ_ASSERT(bce
->sc
->asFunctionBox()->isStarGenerator());
5752 if (!EmitTree(cx
, bce
, iter
)) // ITERABLE
5754 if (!EmitIterator(cx
, bce
)) // ITER
5757 // Initial send value is undefined.
5758 if (Emit1(cx
, bce
, JSOP_UNDEFINED
) < 0) // ITER RECEIVED
5761 int depth
= bce
->stackDepth
;
5762 MOZ_ASSERT(depth
>= 2);
5764 ptrdiff_t initialSend
= -1;
5765 if (EmitBackPatchOp(cx
, bce
, &initialSend
) < 0) // goto initialSend
5768 // Try prologue. // ITER RESULT
5769 StmtInfoBCE
stmtInfo(cx
);
5770 PushStatementBCE(bce
, &stmtInfo
, STMT_TRY
, bce
->offset());
5771 ptrdiff_t noteIndex
= NewSrcNote(cx
, bce
, SRC_TRY
);
5772 ptrdiff_t tryStart
= bce
->offset(); // tryStart:
5773 if (noteIndex
< 0 || Emit1(cx
, bce
, JSOP_TRY
) < 0)
5775 MOZ_ASSERT(bce
->stackDepth
== depth
);
5777 // Load the generator object.
5778 if (!EmitTree(cx
, bce
, gen
)) // ITER RESULT GENOBJ
5781 // Yield RESULT as-is, without re-boxing.
5782 if (!EmitYieldOp(cx
, bce
, JSOP_YIELD
)) // ITER RECEIVED
5786 if (!SetSrcNoteOffset(cx
, bce
, noteIndex
, 0, bce
->offset() - tryStart
))
5788 ptrdiff_t subsequentSend
= -1;
5789 if (EmitBackPatchOp(cx
, bce
, &subsequentSend
) < 0) // goto subsequentSend
5791 ptrdiff_t tryEnd
= bce
->offset(); // tryEnd:
5794 bce
->stackDepth
= uint32_t(depth
); // ITER RESULT
5795 if (Emit1(cx
, bce
, JSOP_POP
) < 0) // ITER
5797 // THROW? = 'throw' in ITER
5798 if (Emit1(cx
, bce
, JSOP_EXCEPTION
) < 0) // ITER EXCEPTION
5800 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0) // EXCEPTION ITER
5802 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // EXCEPTION ITER ITER
5804 if (!EmitAtomOp(cx
, cx
->names().throw_
, JSOP_STRING
, bce
)) // EXCEPTION ITER ITER "throw"
5806 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0) // EXCEPTION ITER "throw" ITER
5808 if (Emit1(cx
, bce
, JSOP_IN
) < 0) // EXCEPTION ITER THROW?
5810 // if (THROW?) goto delegate
5811 ptrdiff_t checkThrow
= EmitJump(cx
, bce
, JSOP_IFNE
, 0); // EXCEPTION ITER
5814 if (Emit1(cx
, bce
, JSOP_POP
) < 0) // EXCEPTION
5816 if (Emit1(cx
, bce
, JSOP_THROW
) < 0) // throw EXCEPTION
5819 SetJumpOffsetAt(bce
, checkThrow
); // delegate:
5820 // RESULT = ITER.throw(EXCEPTION) // EXCEPTION ITER
5821 bce
->stackDepth
= uint32_t(depth
);
5822 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // EXCEPTION ITER ITER
5824 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // EXCEPTION ITER ITER ITER
5826 if (!EmitAtomOp(cx
, cx
->names().throw_
, JSOP_CALLPROP
, bce
)) // EXCEPTION ITER ITER THROW
5828 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0) // EXCEPTION ITER THROW ITER
5830 if (Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)3) < 0) // ITER THROW ITER EXCEPTION
5832 if (EmitCall(cx
, bce
, JSOP_CALL
, 1, iter
) < 0) // ITER RESULT
5834 CheckTypeSet(cx
, bce
, JSOP_CALL
);
5835 MOZ_ASSERT(bce
->stackDepth
== depth
);
5836 ptrdiff_t checkResult
= -1;
5837 if (EmitBackPatchOp(cx
, bce
, &checkResult
) < 0) // goto checkResult
5841 if (!PopStatementBCE(cx
, bce
))
5843 // This is a peace offering to ReconstructPCStack. See the note in EmitTry.
5844 if (Emit1(cx
, bce
, JSOP_NOP
) < 0)
5846 if (!bce
->tryNoteList
.append(JSTRY_CATCH
, depth
, tryStart
+ JSOP_TRY_LENGTH
, tryEnd
))
5849 // After the try/catch block: send the received value to the iterator.
5850 if (!BackPatch(cx
, bce
, initialSend
, bce
->code().end(), JSOP_GOTO
)) // initialSend:
5852 if (!BackPatch(cx
, bce
, subsequentSend
, bce
->code().end(), JSOP_GOTO
)) // subsequentSend:
5856 // result = iter.next(received) // ITER RECEIVED
5857 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0) // RECEIVED ITER
5859 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // RECEIVED ITER ITER
5861 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // RECEIVED ITER ITER ITER
5863 if (!EmitAtomOp(cx
, cx
->names().next
, JSOP_CALLPROP
, bce
)) // RECEIVED ITER ITER NEXT
5865 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0) // RECEIVED ITER NEXT ITER
5867 if (Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)3) < 0) // ITER NEXT ITER RECEIVED
5869 if (EmitCall(cx
, bce
, JSOP_CALL
, 1, iter
) < 0) // ITER RESULT
5871 CheckTypeSet(cx
, bce
, JSOP_CALL
);
5872 MOZ_ASSERT(bce
->stackDepth
== depth
);
5874 if (!BackPatch(cx
, bce
, checkResult
, bce
->code().end(), JSOP_GOTO
)) // checkResult:
5876 // if (!result.done) goto tryStart; // ITER RESULT
5877 if (Emit1(cx
, bce
, JSOP_DUP
) < 0) // ITER RESULT RESULT
5879 if (!EmitAtomOp(cx
, cx
->names().done
, JSOP_GETPROP
, bce
)) // ITER RESULT DONE
5881 // if (!DONE) goto tryStart;
5882 if (EmitJump(cx
, bce
, JSOP_IFEQ
, tryStart
- bce
->offset()) < 0) // ITER RESULT
5886 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0) // RESULT ITER
5888 if (Emit1(cx
, bce
, JSOP_POP
) < 0) // RESULT
5890 if (!EmitAtomOp(cx
, cx
->names().value
, JSOP_GETPROP
, bce
)) // VALUE
5893 MOZ_ASSERT(bce
->stackDepth
== depth
- 1);
5899 EmitStatementList(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, ptrdiff_t top
)
5901 MOZ_ASSERT(pn
->isArity(PN_LIST
));
5903 StmtInfoBCE
stmtInfo(cx
);
5904 PushStatementBCE(bce
, &stmtInfo
, STMT_BLOCK
, top
);
5906 ParseNode
* pnchild
= pn
->pn_head
;
5908 if (pn
->pn_xflags
& PNX_DESTRUCT
)
5909 pnchild
= pnchild
->pn_next
;
5911 for (ParseNode
* pn2
= pnchild
; pn2
; pn2
= pn2
->pn_next
) {
5912 if (!EmitTree(cx
, bce
, pn2
))
5916 return PopStatementBCE(cx
, bce
);
5920 EmitStatement(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
5922 MOZ_ASSERT(pn
->isKind(PNK_SEMI
));
5924 ParseNode
* pn2
= pn
->pn_kid
;
5928 if (!UpdateSourceCoordNotes(cx
, bce
, pn
->pn_pos
.begin
))
5932 * Top-level or called-from-a-native JS_Execute/EvaluateScript,
5933 * debugger, and eval frames may need the value of the ultimate
5934 * expression statement as the script's result, despite the fact
5935 * that it appears useless to the compiler.
5937 * API users may also set the JSOPTION_NO_SCRIPT_RVAL option when
5938 * calling JS_Compile* to suppress JSOP_SETRVAL.
5940 bool wantval
= false;
5941 bool useful
= false;
5942 if (bce
->sc
->isFunctionBox()) {
5943 MOZ_ASSERT(!bce
->script
->noScriptRval());
5945 useful
= wantval
= !bce
->script
->noScriptRval();
5948 /* Don't eliminate expressions with side effects. */
5950 if (!CheckSideEffects(cx
, bce
, pn2
, &useful
))
5954 * Don't eliminate apparently useless expressions if they are
5955 * labeled expression statements. The pc->topStmt->update test
5956 * catches the case where we are nesting in EmitTree for a labeled
5957 * compound statement.
5960 bce
->topStmt
->type
== STMT_LABEL
&&
5961 bce
->topStmt
->update
>= bce
->offset())
5968 JSOp op
= wantval
? JSOP_SETRVAL
: JSOP_POP
;
5969 MOZ_ASSERT_IF(pn2
->isKind(PNK_ASSIGN
), pn2
->isOp(JSOP_NOP
));
5970 if (!EmitTree(cx
, bce
, pn2
))
5972 if (Emit1(cx
, bce
, op
) < 0)
5974 } else if (pn
->isDirectivePrologueMember()) {
5975 // Don't complain about directive prologue members; just don't emit
5978 if (JSAtom
* atom
= pn
->isStringExprStatement()) {
5979 // Warn if encountering a non-directive prologue member string
5980 // expression statement, that is inconsistent with the current
5981 // directive prologue. That is, a script *not* starting with
5982 // "use strict" should warn for any "use strict" statements seen
5983 // later in the script, because such statements are misleading.
5984 const char* directive
= nullptr;
5985 if (atom
== cx
->names().useStrict
) {
5986 if (!bce
->sc
->strict
)
5987 directive
= js_useStrict_str
;
5988 } else if (atom
== cx
->names().useAsm
) {
5989 if (bce
->sc
->isFunctionBox()) {
5990 JSFunction
* fun
= bce
->sc
->asFunctionBox()->function();
5991 if (fun
->isNative() && IsAsmJSModuleNative(fun
->native()))
5992 directive
= js_useAsm_str
;
5997 if (!bce
->reportStrictWarning(pn2
, JSMSG_CONTRARY_NONDIRECTIVE
, directive
))
6001 bce
->current
->currentLine
= bce
->parser
->tokenStream
.srcCoords
.lineNum(pn2
->pn_pos
.begin
);
6002 bce
->current
->lastColumn
= 0;
6003 if (!bce
->reportStrictWarning(pn2
, JSMSG_USELESS_EXPR
))
6012 EmitDelete(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
6015 * Under ECMA 3, deleting a non-reference returns true -- but alas we
6016 * must evaluate the operand if it appears it might have side effects.
6018 ParseNode
* pn2
= pn
->pn_kid
;
6019 switch (pn2
->getKind()) {
6021 if (!BindNameToSlot(cx
, bce
, pn2
))
6023 if (!EmitAtomOp(cx
, pn2
, pn2
->getOp(), bce
))
6028 JSOp delOp
= bce
->sc
->strict
? JSOP_STRICTDELPROP
: JSOP_DELPROP
;
6029 if (!EmitPropOp(cx
, pn2
, delOp
, bce
))
6035 JSOp delOp
= bce
->sc
->strict
? JSOP_STRICTDELELEM
: JSOP_DELELEM
;
6036 if (!EmitElemOp(cx
, pn2
, delOp
, bce
))
6043 * If useless, just emit JSOP_TRUE; otherwise convert delete foo()
6044 * to foo(), true (a comma expression).
6046 bool useful
= false;
6047 if (!CheckSideEffects(cx
, bce
, pn2
, &useful
))
6051 MOZ_ASSERT_IF(pn2
->isKind(PNK_CALL
), !(pn2
->pn_xflags
& PNX_SETCALL
));
6052 if (!EmitTree(cx
, bce
, pn2
))
6054 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
6058 if (Emit1(cx
, bce
, JSOP_TRUE
) < 0)
6067 EmitArray(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, uint32_t count
);
6070 EmitSelfHostedCallFunction(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
6072 // Special-casing of callFunction to emit bytecode that directly
6073 // invokes the callee with the correct |this| object and arguments.
6074 // callFunction(fun, thisArg, arg0, arg1) thus becomes:
6075 // - emit lookup for fun
6076 // - emit lookup for thisArg
6077 // - emit lookups for arg0, arg1
6079 // argc is set to the amount of actually emitted args and the
6080 // emitting of args below is disabled by setting emitArgs to false.
6081 if (pn
->pn_count
< 3) {
6082 bce
->reportError(pn
, JSMSG_MORE_ARGS_NEEDED
, "callFunction", "1", "s");
6086 ParseNode
* pn2
= pn
->pn_head
;
6087 ParseNode
* funNode
= pn2
->pn_next
;
6088 if (!EmitTree(cx
, bce
, funNode
))
6091 ParseNode
* thisArg
= funNode
->pn_next
;
6092 if (!EmitTree(cx
, bce
, thisArg
))
6095 bool oldEmittingForInit
= bce
->emittingForInit
;
6096 bce
->emittingForInit
= false;
6098 for (ParseNode
* argpn
= thisArg
->pn_next
; argpn
; argpn
= argpn
->pn_next
) {
6099 if (!EmitTree(cx
, bce
, argpn
))
6103 bce
->emittingForInit
= oldEmittingForInit
;
6105 uint32_t argc
= pn
->pn_count
- 3;
6106 if (EmitCall(cx
, bce
, pn
->getOp(), argc
) < 0)
6109 CheckTypeSet(cx
, bce
, pn
->getOp());
6114 EmitSelfHostedResumeGenerator(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
6116 // Syntax: resumeGenerator(gen, value, 'next'|'throw'|'close')
6117 if (pn
->pn_count
!= 4) {
6118 bce
->reportError(pn
, JSMSG_MORE_ARGS_NEEDED
, "resumeGenerator", "1", "s");
6122 ParseNode
* funNode
= pn
->pn_head
; // The resumeGenerator node.
6124 ParseNode
* genNode
= funNode
->pn_next
;
6125 if (!EmitTree(cx
, bce
, genNode
))
6128 ParseNode
* valNode
= genNode
->pn_next
;
6129 if (!EmitTree(cx
, bce
, valNode
))
6132 ParseNode
* kindNode
= valNode
->pn_next
;
6133 MOZ_ASSERT(kindNode
->isKind(PNK_STRING
));
6134 uint16_t operand
= GeneratorObject::getResumeKind(cx
, kindNode
->pn_atom
);
6135 MOZ_ASSERT(!kindNode
->pn_next
);
6137 if (EmitCall(cx
, bce
, JSOP_RESUME
, operand
) < 0)
6144 EmitSelfHostedForceInterpreter(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
6146 if (Emit1(cx
, bce
, JSOP_FORCEINTERPRETER
) < 0)
6148 if (Emit1(cx
, bce
, JSOP_UNDEFINED
) < 0)
6154 EmitCallOrNew(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
6156 bool callop
= pn
->isKind(PNK_CALL
) || pn
->isKind(PNK_TAGGED_TEMPLATE
);
6158 * Emit callable invocation or operator new (constructor call) code.
6159 * First, emit code for the left operand to evaluate the callable or
6160 * constructable object expression.
6162 * For operator new, we emit JSOP_GETPROP instead of JSOP_CALLPROP, etc.
6163 * This is necessary to interpose the lambda-initialized method read
6164 * barrier -- see the code in jsinterp.cpp for JSOP_LAMBDA followed by
6165 * JSOP_{SET,INIT}PROP.
6167 * Then (or in a call case that has no explicit reference-base
6168 * object) we emit JSOP_UNDEFINED to produce the undefined |this|
6169 * value required for calls (which non-strict mode functions
6170 * will box into the global object).
6172 uint32_t argc
= pn
->pn_count
- 1;
6174 if (argc
>= ARGC_LIMIT
) {
6175 bce
->parser
->tokenStream
.reportError(callop
6176 ? JSMSG_TOO_MANY_FUN_ARGS
6177 : JSMSG_TOO_MANY_CON_ARGS
);
6181 ParseNode
* pn2
= pn
->pn_head
;
6182 bool spread
= JOF_OPTYPE(pn
->getOp()) == JOF_BYTE
;
6183 switch (pn2
->getKind()) {
6185 if (bce
->emitterMode
== BytecodeEmitter::SelfHosting
&& !spread
) {
6186 // We shouldn't see foo(bar) = x in self-hosted code.
6187 MOZ_ASSERT(!(pn
->pn_xflags
& PNX_SETCALL
));
6189 // Calls to "forceInterpreter", "callFunction" or "resumeGenerator"
6190 // in self-hosted code generate inline bytecode.
6191 if (pn2
->name() == cx
->names().callFunction
)
6192 return EmitSelfHostedCallFunction(cx
, bce
, pn
);
6193 if (pn2
->name() == cx
->names().resumeGenerator
)
6194 return EmitSelfHostedResumeGenerator(cx
, bce
, pn
);
6195 if (pn2
->name() == cx
->names().forceInterpreter
)
6196 return EmitSelfHostedForceInterpreter(cx
, bce
, pn
);
6199 if (!EmitNameOp(cx
, bce
, pn2
, callop
))
6203 if (!EmitPropOp(cx
, pn2
, callop
? JSOP_CALLPROP
: JSOP_GETPROP
, bce
))
6207 if (!EmitElemOp(cx
, pn2
, callop
? JSOP_CALLELEM
: JSOP_GETELEM
, bce
))
6210 if (Emit1(cx
, bce
, JSOP_SWAP
) < 0)
6216 * Top level lambdas which are immediately invoked should be
6217 * treated as only running once. Every time they execute we will
6218 * create new types and scripts for their contents, to increase
6219 * the quality of type information within them and enable more
6220 * backend optimizations. Note that this does not depend on the
6221 * lambda being invoked at most once (it may be named or be
6222 * accessed via foo.caller indirection), as multiple executions
6223 * will just cause the inner scripts to be repeatedly cloned.
6225 MOZ_ASSERT(!bce
->emittingRunOnceLambda
);
6226 if (bce
->checkSingletonContext() || (!bce
->isInLoop() && bce
->isRunOnceLambda())) {
6227 bce
->emittingRunOnceLambda
= true;
6228 if (!EmitTree(cx
, bce
, pn2
))
6230 bce
->emittingRunOnceLambda
= false;
6232 if (!EmitTree(cx
, bce
, pn2
))
6238 if (!EmitTree(cx
, bce
, pn2
))
6240 callop
= false; /* trigger JSOP_UNDEFINED after */
6244 JSOp thisop
= pn
->isKind(PNK_GENEXP
) ? JSOP_THIS
: JSOP_UNDEFINED
;
6245 if (Emit1(cx
, bce
, thisop
) < 0)
6250 * Emit code for each argument in order, then emit the JSOP_*CALL or
6251 * JSOP_NEW bytecode with a two-byte immediate telling how many args
6252 * were pushed on the operand stack.
6254 bool oldEmittingForInit
= bce
->emittingForInit
;
6255 bce
->emittingForInit
= false;
6257 for (ParseNode
* pn3
= pn2
->pn_next
; pn3
; pn3
= pn3
->pn_next
) {
6258 if (!EmitTree(cx
, bce
, pn3
))
6262 if (!EmitArray(cx
, bce
, pn2
->pn_next
, argc
))
6265 bce
->emittingForInit
= oldEmittingForInit
;
6268 if (EmitCall(cx
, bce
, pn
->getOp(), argc
, pn
) < 0)
6271 if (Emit1(cx
, bce
, pn
->getOp()) < 0)
6274 CheckTypeSet(cx
, bce
, pn
->getOp());
6275 if (pn
->isOp(JSOP_EVAL
) ||
6276 pn
->isOp(JSOP_STRICTEVAL
) ||
6277 pn
->isOp(JSOP_SPREADEVAL
) ||
6278 pn
->isOp(JSOP_STRICTSPREADEVAL
))
6280 uint32_t lineNum
= bce
->parser
->tokenStream
.srcCoords
.lineNum(pn
->pn_pos
.begin
);
6281 EMIT_UINT16_IMM_OP(JSOP_LINENO
, lineNum
);
6283 if (pn
->pn_xflags
& PNX_SETCALL
) {
6284 if (Emit1(cx
, bce
, JSOP_SETCALL
) < 0)
6291 EmitLogical(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
6294 * JSOP_OR converts the operand on the stack to boolean, leaves the original
6295 * value on the stack and jumps if true; otherwise it falls into the next
6296 * bytecode, which pops the left operand and then evaluates the right operand.
6297 * The jump goes around the right operand evaluation.
6299 * JSOP_AND converts the operand on the stack to boolean and jumps if false;
6300 * otherwise it falls into the right operand's bytecode.
6303 if (pn
->isArity(PN_BINARY
)) {
6304 if (!EmitTree(cx
, bce
, pn
->pn_left
))
6306 ptrdiff_t top
= EmitJump(cx
, bce
, JSOP_BACKPATCH
, 0);
6309 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
6311 if (!EmitTree(cx
, bce
, pn
->pn_right
))
6313 ptrdiff_t off
= bce
->offset();
6314 jsbytecode
* pc
= bce
->code(top
);
6315 SET_JUMP_OFFSET(pc
, off
- top
);
6320 MOZ_ASSERT(pn
->isArity(PN_LIST
));
6321 MOZ_ASSERT(pn
->pn_head
->pn_next
->pn_next
);
6323 /* Left-associative operator chain: avoid too much recursion. */
6324 ParseNode
* pn2
= pn
->pn_head
;
6325 if (!EmitTree(cx
, bce
, pn2
))
6327 ptrdiff_t top
= EmitJump(cx
, bce
, JSOP_BACKPATCH
, 0);
6330 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
6333 /* Emit nodes between the head and the tail. */
6334 ptrdiff_t jmp
= top
;
6335 while ((pn2
= pn2
->pn_next
)->pn_next
) {
6336 if (!EmitTree(cx
, bce
, pn2
))
6338 ptrdiff_t off
= EmitJump(cx
, bce
, JSOP_BACKPATCH
, 0);
6341 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
6343 SET_JUMP_OFFSET(bce
->code(jmp
), off
- jmp
);
6346 if (!EmitTree(cx
, bce
, pn2
))
6350 ptrdiff_t off
= bce
->offset();
6352 jsbytecode
* pc
= bce
->code(top
);
6353 ptrdiff_t tmp
= GET_JUMP_OFFSET(pc
);
6354 SET_JUMP_OFFSET(pc
, off
- top
);
6357 } while ((pn2
= pn2
->pn_next
)->pn_next
);
6363 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
6364 * the comment on EmitSwitch.
6366 MOZ_NEVER_INLINE
static bool
6367 EmitIncOrDec(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
6369 /* Emit lvalue-specialized code for ++/-- operators. */
6370 ParseNode
* pn2
= pn
->pn_kid
;
6371 switch (pn2
->getKind()) {
6373 if (!EmitPropIncDec(cx
, pn
, bce
))
6377 if (!EmitElemIncDec(cx
, pn
, bce
))
6381 MOZ_ASSERT(pn2
->pn_xflags
& PNX_SETCALL
);
6382 if (!EmitTree(cx
, bce
, pn2
))
6386 MOZ_ASSERT(pn2
->isKind(PNK_NAME
));
6387 pn2
->setOp(JSOP_SETNAME
);
6388 if (!BindNameToSlot(cx
, bce
, pn2
))
6390 JSOp op
= pn2
->getOp();
6395 case JSOP_SETALIASEDVAR
:
6397 case JSOP_STRICTSETNAME
:
6399 case JSOP_STRICTSETGNAME
:
6405 if (op
== JSOP_CALLEE
) {
6406 if (Emit1(cx
, bce
, op
) < 0)
6408 } else if (!pn2
->pn_cookie
.isFree()) {
6410 if (!EmitVarIncDec(cx
, pn
, bce
))
6413 if (!EmitVarOp(cx
, pn2
, op
, bce
))
6417 MOZ_ASSERT(JOF_OPTYPE(op
) == JOF_ATOM
);
6419 if (!EmitNameIncDec(cx
, pn
, bce
))
6422 if (!EmitAtomOp(cx
, pn2
, op
, bce
))
6427 if (pn2
->isConst()) {
6428 if (Emit1(cx
, bce
, JSOP_POS
) < 0)
6431 JSOp binop
= GetIncDecInfo(pn
->getKind(), &post
);
6433 if (Emit1(cx
, bce
, JSOP_ONE
) < 0)
6435 if (Emit1(cx
, bce
, binop
) < 0)
6444 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
6445 * the comment on EmitSwitch.
6447 MOZ_NEVER_INLINE
static bool
6448 EmitLabeledStatement(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, const LabeledStatement
* pn
)
6451 * Emit a JSOP_LABEL instruction. The argument is the offset to the statement
6452 * following the labeled statement.
6455 if (!bce
->makeAtomIndex(pn
->label(), &index
))
6458 ptrdiff_t top
= EmitJump(cx
, bce
, JSOP_LABEL
, 0);
6462 /* Emit code for the labeled statement. */
6463 StmtInfoBCE
stmtInfo(cx
);
6464 PushStatementBCE(bce
, &stmtInfo
, STMT_LABEL
, bce
->offset());
6465 stmtInfo
.label
= pn
->label();
6466 if (!EmitTree(cx
, bce
, pn
->statement()))
6468 if (!PopStatementBCE(cx
, bce
))
6471 /* Patch the JSOP_LABEL offset. */
6472 SetJumpOffsetAt(bce
, top
);
6477 EmitSyntheticStatements(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, ptrdiff_t top
)
6479 MOZ_ASSERT(pn
->isArity(PN_LIST
));
6480 StmtInfoBCE
stmtInfo(cx
);
6481 PushStatementBCE(bce
, &stmtInfo
, STMT_SEQ
, top
);
6482 ParseNode
* pn2
= pn
->pn_head
;
6483 if (pn
->pn_xflags
& PNX_DESTRUCT
)
6485 for (; pn2
; pn2
= pn2
->pn_next
) {
6486 if (!EmitTree(cx
, bce
, pn2
))
6489 return PopStatementBCE(cx
, bce
);
6493 EmitConditionalExpression(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ConditionalExpression
& conditional
)
6495 /* Emit the condition, then branch if false to the else part. */
6496 if (!EmitTree(cx
, bce
, &conditional
.condition()))
6498 ptrdiff_t noteIndex
= NewSrcNote(cx
, bce
, SRC_COND
);
6501 ptrdiff_t beq
= EmitJump(cx
, bce
, JSOP_IFEQ
, 0);
6502 if (beq
< 0 || !EmitTree(cx
, bce
, &conditional
.thenExpression()))
6505 /* Jump around else, fixup the branch, emit else, fixup jump. */
6506 ptrdiff_t jmp
= EmitJump(cx
, bce
, JSOP_GOTO
, 0);
6509 SetJumpOffsetAt(bce
, beq
);
6512 * Because each branch pushes a single value, but our stack budgeting
6513 * analysis ignores branches, we now have to adjust bce->stackDepth to
6514 * ignore the value pushed by the first branch. Execution will follow
6515 * only one path, so we must decrement bce->stackDepth.
6517 * Failing to do this will foil code, such as let expression and block
6518 * code generation, which must use the stack depth to compute local
6519 * stack indexes correctly.
6521 MOZ_ASSERT(bce
->stackDepth
> 0);
6523 if (!EmitTree(cx
, bce
, &conditional
.elseExpression()))
6525 SetJumpOffsetAt(bce
, jmp
);
6526 return SetSrcNoteOffset(cx
, bce
, noteIndex
, 0, jmp
- beq
);
6530 * Using MOZ_NEVER_INLINE in here is a workaround for llvm.org/pr14047. See
6531 * the comment on EmitSwitch.
6533 MOZ_NEVER_INLINE
static bool
6534 EmitObject(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
6536 if (!(pn
->pn_xflags
& PNX_NONCONST
) && pn
->pn_head
&& bce
->checkSingletonContext())
6537 return EmitSingletonInitialiser(cx
, bce
, pn
);
6540 * Emit code for {p:a, '%q':b, 2:c} that is equivalent to constructing
6541 * a new object and defining (in source order) each property on the object
6542 * (or mutating the object's [[Prototype]], in the case of __proto__).
6544 ptrdiff_t offset
= bce
->offset();
6545 if (!EmitNewInit(cx
, bce
, JSProto_Object
))
6549 * Try to construct the shape of the object as we go, so we can emit a
6550 * JSOP_NEWOBJECT with the final shape instead.
6552 RootedPlainObject
obj(cx
);
6553 if (bce
->script
->compileAndGo()) {
6554 gc::AllocKind kind
= GuessObjectGCKind(pn
->pn_count
);
6555 obj
= NewBuiltinClassInstance
<PlainObject
>(cx
, kind
, TenuredObject
);
6560 for (ParseNode
* propdef
= pn
->pn_head
; propdef
; propdef
= propdef
->pn_next
) {
6561 if (!UpdateSourceCoordNotes(cx
, bce
, propdef
->pn_pos
.begin
))
6564 // Handle __proto__: v specially because *only* this form, and no other
6565 // involving "__proto__", performs [[Prototype]] mutation.
6566 if (propdef
->isKind(PNK_MUTATEPROTO
)) {
6567 if (!EmitTree(cx
, bce
, propdef
->pn_kid
))
6570 if (!Emit1(cx
, bce
, JSOP_MUTATEPROTO
))
6575 /* Emit an index for t[2] for later consumption by JSOP_INITELEM. */
6576 ParseNode
* key
= propdef
->pn_left
;
6577 bool isIndex
= false;
6578 if (key
->isKind(PNK_NUMBER
)) {
6579 if (!EmitNumberOp(cx
, key
->pn_dval
, bce
))
6582 } else if (key
->isKind(PNK_NAME
) || key
->isKind(PNK_STRING
)) {
6583 // The parser already checked for atoms representing indexes and
6584 // used PNK_NUMBER instead, but also watch for ids which TI treats
6585 // as indexes for simpliciation of downstream analysis.
6586 jsid id
= NameToId(key
->pn_atom
->asPropertyName());
6587 if (id
!= types::IdToTypeId(id
)) {
6588 if (!EmitTree(cx
, bce
, key
))
6593 MOZ_ASSERT(key
->isKind(PNK_COMPUTED_NAME
));
6594 if (!EmitTree(cx
, bce
, key
->pn_kid
))
6599 /* Emit code for the property initializer. */
6600 if (!EmitTree(cx
, bce
, propdef
->pn_right
))
6603 JSOp op
= propdef
->getOp();
6604 MOZ_ASSERT(op
== JSOP_INITPROP
||
6605 op
== JSOP_INITPROP_GETTER
||
6606 op
== JSOP_INITPROP_SETTER
);
6608 if (op
== JSOP_INITPROP_GETTER
|| op
== JSOP_INITPROP_SETTER
)
6614 case JSOP_INITPROP
: op
= JSOP_INITELEM
; break;
6615 case JSOP_INITPROP_GETTER
: op
= JSOP_INITELEM_GETTER
; break;
6616 case JSOP_INITPROP_SETTER
: op
= JSOP_INITELEM_SETTER
; break;
6617 default: MOZ_CRASH("Invalid op");
6619 if (Emit1(cx
, bce
, op
) < 0)
6622 MOZ_ASSERT(key
->isKind(PNK_NAME
) || key
->isKind(PNK_STRING
));
6625 if (!bce
->makeAtomIndex(key
->pn_atom
, &index
))
6629 MOZ_ASSERT(!obj
->inDictionaryMode());
6630 Rooted
<jsid
> id(cx
, AtomToId(key
->pn_atom
));
6631 RootedValue
undefinedValue(cx
, UndefinedValue());
6632 if (!DefineNativeProperty(cx
, obj
, id
, undefinedValue
, nullptr, nullptr,
6637 if (obj
->inDictionaryMode())
6641 if (!EmitIndex32(cx
, op
, index
, bce
))
6648 * The object survived and has a predictable shape: update the original
6651 ObjectBox
* objbox
= bce
->parser
->newObjectBox(obj
);
6655 static_assert(JSOP_NEWINIT_LENGTH
== JSOP_NEWOBJECT_LENGTH
,
6656 "newinit and newobject must have equal length to edit in-place");
6658 uint32_t index
= bce
->objectList
.add(objbox
);
6659 jsbytecode
* code
= bce
->code(offset
);
6660 code
[0] = JSOP_NEWOBJECT
;
6661 code
[1] = jsbytecode(index
>> 24);
6662 code
[2] = jsbytecode(index
>> 16);
6663 code
[3] = jsbytecode(index
>> 8);
6664 code
[4] = jsbytecode(index
);
6671 EmitArrayComp(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
6673 if (!EmitNewInit(cx
, bce
, JSProto_Array
))
6677 * Pass the new array's stack index to the PNK_ARRAYPUSH case via
6678 * bce->arrayCompDepth, then simply traverse the PNK_FOR node and
6679 * its kids under pn2 to generate this comprehension.
6681 MOZ_ASSERT(bce
->stackDepth
> 0);
6682 uint32_t saveDepth
= bce
->arrayCompDepth
;
6683 bce
->arrayCompDepth
= (uint32_t) (bce
->stackDepth
- 1);
6684 if (!EmitTree(cx
, bce
, pn
->pn_head
))
6686 bce
->arrayCompDepth
= saveDepth
;
6692 * EmitSpread expects the current index (I) of the array, the array itself and the iterator to be
6693 * on the stack in that order (iterator on the bottom).
6694 * It will pop the iterator and I, then iterate over the iterator by calling |.next()|
6695 * and put the results into the I-th element of array with incrementing I, then
6696 * push the result I (it will be original I + iteration count).
6697 * The stack after iteration will look like |ARRAY INDEX|.
6700 EmitSpread(ExclusiveContext
* cx
, BytecodeEmitter
* bce
)
6702 return EmitForOf(cx
, bce
, STMT_SPREAD
, nullptr, -1);
6706 EmitArray(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
, uint32_t count
)
6709 * Emit code for [a, b, c] that is equivalent to constructing a new
6710 * array and in source order evaluating each element value and adding
6711 * it to the array, without invoking latent setters. We use the
6712 * JSOP_NEWINIT and JSOP_INITELEM_ARRAY bytecodes to ignore setters and
6713 * to avoid dup'ing and popping the array as each element is added, as
6714 * JSOP_SETELEM/JSOP_SETPROP would do.
6717 int32_t nspread
= 0;
6718 for (ParseNode
* elt
= pn
; elt
; elt
= elt
->pn_next
) {
6719 if (elt
->isKind(PNK_SPREAD
))
6723 ptrdiff_t off
= EmitN(cx
, bce
, JSOP_NEWARRAY
, 3); // ARRAY
6726 CheckTypeSet(cx
, bce
, JSOP_NEWARRAY
);
6727 jsbytecode
* pc
= bce
->code(off
);
6729 // For arrays with spread, this is a very pessimistic allocation, the
6730 // minimum possible final size.
6731 SET_UINT24(pc
, count
- nspread
);
6733 ParseNode
* pn2
= pn
;
6735 bool afterSpread
= false;
6736 for (atomIndex
= 0; pn2
; atomIndex
++, pn2
= pn2
->pn_next
) {
6737 if (!afterSpread
&& pn2
->isKind(PNK_SPREAD
)) {
6739 if (!EmitNumberOp(cx
, atomIndex
, bce
)) // ARRAY INDEX
6742 if (!UpdateSourceCoordNotes(cx
, bce
, pn2
->pn_pos
.begin
))
6744 if (pn2
->isKind(PNK_ELISION
)) {
6745 if (Emit1(cx
, bce
, JSOP_HOLE
) < 0)
6748 ParseNode
* expr
= pn2
->isKind(PNK_SPREAD
) ? pn2
->pn_kid
: pn2
;
6749 if (!EmitTree(cx
, bce
, expr
)) // ARRAY INDEX? VALUE
6752 if (pn2
->isKind(PNK_SPREAD
)) {
6753 if (!EmitIterator(cx
, bce
)) // ARRAY INDEX ITER
6755 if (Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)2) < 0) // INDEX ITER ARRAY
6757 if (Emit2(cx
, bce
, JSOP_PICK
, (jsbytecode
)2) < 0) // ITER ARRAY INDEX
6759 if (!EmitSpread(cx
, bce
)) // ARRAY INDEX
6761 } else if (afterSpread
) {
6762 if (Emit1(cx
, bce
, JSOP_INITELEM_INC
) < 0)
6765 off
= EmitN(cx
, bce
, JSOP_INITELEM_ARRAY
, 3);
6768 SET_UINT24(bce
->code(off
), atomIndex
);
6771 MOZ_ASSERT(atomIndex
== count
);
6773 if (Emit1(cx
, bce
, JSOP_POP
) < 0) // ARRAY
6780 EmitUnary(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
6782 if (!UpdateSourceCoordNotes(cx
, bce
, pn
->pn_pos
.begin
))
6784 /* Unary op, including unary +/-. */
6785 JSOp op
= pn
->getOp();
6786 ParseNode
* pn2
= pn
->pn_kid
;
6788 if (op
== JSOP_TYPEOF
&& !pn2
->isKind(PNK_NAME
))
6789 op
= JSOP_TYPEOFEXPR
;
6791 bool oldEmittingForInit
= bce
->emittingForInit
;
6792 bce
->emittingForInit
= false;
6793 if (!EmitTree(cx
, bce
, pn2
))
6796 bce
->emittingForInit
= oldEmittingForInit
;
6797 return Emit1(cx
, bce
, op
) >= 0;
6801 EmitDefaults(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
6803 MOZ_ASSERT(pn
->isKind(PNK_ARGSBODY
));
6805 ParseNode
* arg
, *pnlast
= pn
->last();
6806 for (arg
= pn
->pn_head
; arg
!= pnlast
; arg
= arg
->pn_next
) {
6807 if (!(arg
->pn_dflags
& PND_DEFAULT
))
6809 if (!BindNameToSlot(cx
, bce
, arg
))
6811 if (!EmitVarOp(cx
, arg
, JSOP_GETARG
, bce
))
6813 if (Emit1(cx
, bce
, JSOP_UNDEFINED
) < 0)
6815 if (Emit1(cx
, bce
, JSOP_STRICTEQ
) < 0)
6817 // Emit source note to enable ion compilation.
6818 if (NewSrcNote(cx
, bce
, SRC_IF
) < 0)
6820 ptrdiff_t jump
= EmitJump(cx
, bce
, JSOP_IFEQ
, 0);
6823 if (!EmitTree(cx
, bce
, arg
->expr()))
6825 if (!EmitVarOp(cx
, arg
, JSOP_SETARG
, bce
))
6827 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
6829 SET_JUMP_OFFSET(bce
->code(jump
), bce
->offset() - jump
);
6836 frontend::EmitTree(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, ParseNode
* pn
)
6838 JS_CHECK_RECURSION(cx
, return false);
6840 EmitLevelManager
elm(bce
);
6843 ptrdiff_t top
= bce
->offset();
6844 pn
->pn_offset
= top
;
6846 /* Emit notes to tell the current bytecode's source line number. */
6847 if (!UpdateLineNumberNotes(cx
, bce
, pn
->pn_pos
.begin
))
6850 switch (pn
->getKind()) {
6852 ok
= EmitFunc(cx
, bce
, pn
);
6857 RootedFunction
fun(cx
, bce
->sc
->asFunctionBox()->function());
6858 ParseNode
* pnlast
= pn
->last();
6860 // Carefully emit everything in the right order:
6864 ParseNode
* pnchild
= pnlast
->pn_head
;
6865 if (pnlast
->pn_xflags
& PNX_DESTRUCT
) {
6866 // Assign the destructuring arguments before defining any functions,
6868 MOZ_ASSERT(pnchild
->isKind(PNK_SEMI
));
6869 MOZ_ASSERT(pnchild
->pn_kid
->isKind(PNK_VAR
) || pnchild
->pn_kid
->isKind(PNK_GLOBALCONST
));
6870 if (!EmitTree(cx
, bce
, pnchild
))
6872 pnchild
= pnchild
->pn_next
;
6874 bool hasDefaults
= bce
->sc
->asFunctionBox()->hasDefaults();
6876 ParseNode
* rest
= nullptr;
6877 bool restIsDefn
= false;
6878 if (fun
->hasRest()) {
6879 MOZ_ASSERT(!bce
->sc
->asFunctionBox()->argumentsHasLocalBinding());
6881 // Defaults with a rest parameter need special handling. The
6882 // rest parameter needs to be undefined while defaults are being
6883 // processed. To do this, we create the rest argument and let it
6884 // sit on the stack while processing defaults. The rest
6885 // parameter's slot is set to undefined for the course of
6886 // default processing.
6888 while (rest
->pn_next
!= pnlast
)
6889 rest
= rest
->pn_next
;
6890 restIsDefn
= rest
->isDefn();
6891 if (Emit1(cx
, bce
, JSOP_REST
) < 0)
6893 CheckTypeSet(cx
, bce
, JSOP_REST
);
6895 // Only set the rest parameter if it's not aliased by a nested
6896 // function in the body.
6898 if (Emit1(cx
, bce
, JSOP_UNDEFINED
) < 0)
6900 if (!BindNameToSlot(cx
, bce
, rest
))
6902 if (!EmitVarOp(cx
, rest
, JSOP_SETARG
, bce
))
6904 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
6908 if (!EmitDefaults(cx
, bce
, pn
))
6910 if (fun
->hasRest()) {
6911 if (restIsDefn
&& !EmitVarOp(cx
, rest
, JSOP_SETARG
, bce
))
6913 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
6917 for (ParseNode
* pn2
= pn
->pn_head
; pn2
!= pnlast
; pn2
= pn2
->pn_next
) {
6918 // Only bind the parameter if it's not aliased by a nested function
6922 if (!BindNameToSlot(cx
, bce
, pn2
))
6924 if (pn2
->pn_next
== pnlast
&& fun
->hasRest() && !hasDefaults
) {
6925 // Fill rest parameter. We handled the case with defaults above.
6926 MOZ_ASSERT(!bce
->sc
->asFunctionBox()->argumentsHasLocalBinding());
6927 bce
->switchToProlog();
6928 if (Emit1(cx
, bce
, JSOP_REST
) < 0)
6930 CheckTypeSet(cx
, bce
, JSOP_REST
);
6931 if (!EmitVarOp(cx
, pn2
, JSOP_SETARG
, bce
))
6933 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
6935 bce
->switchToMain();
6938 if (pnlast
->pn_xflags
& PNX_FUNCDEFS
) {
6939 // This block contains top-level function definitions. To ensure
6940 // that we emit the bytecode defining them before the rest of code
6941 // in the block we use a separate pass over functions. During the
6942 // main pass later the emitter will add JSOP_NOP with source notes
6943 // for the function to preserve the original functions position
6944 // when decompiling.
6946 // Currently this is used only for functions, as compile-as-we go
6947 // mode for scripts does not allow separate emitter passes.
6948 for (ParseNode
* pn2
= pnchild
; pn2
; pn2
= pn2
->pn_next
) {
6949 if (pn2
->isKind(PNK_FUNCTION
) && pn2
->functionIsHoisted()) {
6950 if (!EmitTree(cx
, bce
, pn2
))
6955 ok
= EmitTree(cx
, bce
, pnlast
);
6960 ok
= EmitIf(cx
, bce
, pn
);
6964 ok
= EmitSwitch(cx
, bce
, pn
);
6968 ok
= EmitWhile(cx
, bce
, pn
, top
);
6972 ok
= EmitDo(cx
, bce
, pn
);
6976 ok
= EmitFor(cx
, bce
, pn
, top
);
6980 ok
= EmitBreak(cx
, bce
, pn
->as
<BreakStatement
>().label());
6984 ok
= EmitContinue(cx
, bce
, pn
->as
<ContinueStatement
>().label());
6988 ok
= EmitWith(cx
, bce
, pn
);
6992 if (!EmitTry(cx
, bce
, pn
))
6997 if (!EmitCatch(cx
, bce
, pn
))
7002 case PNK_GLOBALCONST
:
7003 if (!EmitVariables(cx
, bce
, pn
, InitializeVars
))
7008 ok
= EmitReturn(cx
, bce
, pn
);
7011 case PNK_YIELD_STAR
:
7012 ok
= EmitYieldStar(cx
, bce
, pn
->pn_left
, pn
->pn_right
);
7016 if (Emit1(cx
, bce
, JSOP_GENERATOR
) < 0)
7021 ok
= EmitYield(cx
, bce
, pn
);
7024 case PNK_STATEMENTLIST
:
7025 ok
= EmitStatementList(cx
, bce
, pn
, top
);
7029 ok
= EmitSyntheticStatements(cx
, bce
, pn
, top
);
7033 ok
= EmitStatement(cx
, bce
, pn
);
7037 ok
= EmitLabeledStatement(cx
, bce
, &pn
->as
<LabeledStatement
>());
7042 for (ParseNode
* pn2
= pn
->pn_head
; ; pn2
= pn2
->pn_next
) {
7043 if (!UpdateSourceCoordNotes(cx
, bce
, pn2
->pn_pos
.begin
))
7045 if (!EmitTree(cx
, bce
, pn2
))
7049 if (Emit1(cx
, bce
, JSOP_POP
) < 0)
7058 case PNK_BITORASSIGN
:
7059 case PNK_BITXORASSIGN
:
7060 case PNK_BITANDASSIGN
:
7063 case PNK_URSHASSIGN
:
7067 if (!EmitAssignment(cx
, bce
, pn
->pn_left
, pn
->getOp(), pn
->pn_right
))
7071 case PNK_CONDITIONAL
:
7072 ok
= EmitConditionalExpression(cx
, bce
, pn
->as
<ConditionalExpression
>());
7077 ok
= EmitLogical(cx
, bce
, pn
);
7094 case PNK_INSTANCEOF
:
7101 if (pn
->isArity(PN_LIST
)) {
7102 /* Left-associative operator chain: avoid too much recursion. */
7103 ParseNode
* pn2
= pn
->pn_head
;
7104 if (!EmitTree(cx
, bce
, pn2
))
7106 JSOp op
= pn
->getOp();
7107 while ((pn2
= pn2
->pn_next
) != nullptr) {
7108 if (!EmitTree(cx
, bce
, pn2
))
7110 if (Emit1(cx
, bce
, op
) < 0)
7114 /* Binary operators that evaluate both operands unconditionally. */
7115 if (!EmitTree(cx
, bce
, pn
->pn_left
))
7117 if (!EmitTree(cx
, bce
, pn
->pn_right
))
7119 if (Emit1(cx
, bce
, pn
->getOp()) < 0)
7131 ok
= EmitUnary(cx
, bce
, pn
);
7134 case PNK_PREINCREMENT
:
7135 case PNK_PREDECREMENT
:
7136 case PNK_POSTINCREMENT
:
7137 case PNK_POSTDECREMENT
:
7138 ok
= EmitIncOrDec(cx
, bce
, pn
);
7142 ok
= EmitDelete(cx
, bce
, pn
);
7146 ok
= EmitPropOp(cx
, pn
, JSOP_GETPROP
, bce
);
7150 ok
= EmitElemOp(cx
, pn
, JSOP_GETELEM
, bce
);
7154 case PNK_TAGGED_TEMPLATE
:
7157 ok
= EmitCallOrNew(cx
, bce
, pn
);
7160 case PNK_LEXICALSCOPE
:
7161 ok
= EmitLexicalScope(cx
, bce
, pn
);
7166 MOZ_ASSERT_IF(pn
->isKind(PNK_CONST
), !pn
->isArity(PN_BINARY
));
7167 ok
= pn
->isArity(PN_BINARY
)
7168 ? EmitLet(cx
, bce
, pn
)
7169 : EmitVariables(cx
, bce
, pn
, InitializeVars
);
7174 case PNK_EXPORT_FROM
:
7175 // TODO: Implement emitter support for modules
7176 bce
->reportError(nullptr, JSMSG_MODULES_NOT_IMPLEMENTED
);
7179 case PNK_ARRAYPUSH
: {
7181 * The array object's stack index is in bce->arrayCompDepth. See below
7182 * under the array initialiser code generator for array comprehension
7183 * special casing. Note that the array object is a pure stack value,
7184 * unaliased by blocks, so we can EmitUnaliasedVarOp.
7186 if (!EmitTree(cx
, bce
, pn
->pn_kid
))
7188 if (!EmitDupAt(cx
, bce
, bce
->arrayCompDepth
))
7190 if (Emit1(cx
, bce
, JSOP_ARRAYPUSH
) < 0)
7195 case PNK_CALLSITEOBJ
:
7196 ok
= EmitCallSiteObject(cx
, bce
, pn
);
7200 if (!(pn
->pn_xflags
& PNX_NONCONST
) && pn
->pn_head
) {
7201 if (bce
->checkSingletonContext()) {
7202 // Bake in the object entirely if it will only be created once.
7203 ok
= EmitSingletonInitialiser(cx
, bce
, pn
);
7207 // If the array consists entirely of primitive values, make a
7208 // template object with copy on write elements that can be reused
7209 // every time the initializer executes.
7210 if (bce
->emitterMode
!= BytecodeEmitter::SelfHosting
&& pn
->pn_count
!= 0) {
7211 RootedValue
value(cx
);
7212 if (!pn
->getConstantValue(cx
, ParseNode::DontAllowNestedObjects
, &value
))
7214 if (!value
.isMagic(JS_GENERIC_MAGIC
)) {
7215 // Note: the type of the template object might not yet reflect
7216 // that the object has copy on write elements. When the
7217 // interpreter or JIT compiler fetches the template, it should
7218 // use types::GetOrFixupCopyOnWriteObject to make sure the type
7219 // for the template is accurate. We don't do this here as we
7220 // want to use types::InitObject, which requires a finished
7222 NativeObject
* obj
= &value
.toObject().as
<NativeObject
>();
7223 if (!ObjectElements::MakeElementsCopyOnWrite(cx
, obj
))
7226 ObjectBox
* objbox
= bce
->parser
->newObjectBox(obj
);
7230 ok
= EmitObjectOp(cx
, objbox
, JSOP_NEWARRAY_COPYONWRITE
, bce
);
7236 ok
= EmitArray(cx
, bce
, pn
->pn_head
, pn
->pn_count
);
7240 ok
= EmitArrayComp(cx
, bce
, pn
);
7244 ok
= EmitObject(cx
, bce
, pn
);
7248 if (!EmitNameOp(cx
, bce
, pn
, false))
7252 case PNK_TEMPLATE_STRING_LIST
:
7253 ok
= EmitTemplateString(cx
, bce
, pn
);
7256 case PNK_TEMPLATE_STRING
:
7258 ok
= EmitAtomOp(cx
, pn
, JSOP_STRING
, bce
);
7262 ok
= EmitNumberOp(cx
, pn
->pn_dval
, bce
);
7266 ok
= EmitRegExp(cx
, bce
->regexpList
.add(pn
->as
<RegExpLiteral
>().objbox()), bce
);
7273 if (Emit1(cx
, bce
, pn
->getOp()) < 0)
7278 if (!UpdateSourceCoordNotes(cx
, bce
, pn
->pn_pos
.begin
))
7280 if (Emit1(cx
, bce
, JSOP_DEBUGGER
) < 0)
7285 MOZ_ASSERT(pn
->getArity() == PN_NULLARY
);
7292 /* bce->emitLevel == 1 means we're last on the stack, so finish up. */
7293 if (ok
&& bce
->emitLevel
== 1) {
7294 if (!UpdateSourceCoordNotes(cx
, bce
, pn
->pn_pos
.end
))
7302 AllocSrcNote(ExclusiveContext
* cx
, SrcNotesVector
& notes
)
7304 // Start it off moderately large to avoid repeated resizings early on.
7305 // ~99% of cases fit within 256 bytes.
7306 if (notes
.capacity() == 0 && !notes
.reserve(256))
7309 jssrcnote dummy
= 0;
7310 if (!notes
.append(dummy
)) {
7311 js_ReportOutOfMemory(cx
);
7314 return notes
.length() - 1;
7318 frontend::NewSrcNote(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, SrcNoteType type
)
7320 SrcNotesVector
& notes
= bce
->notes();
7323 index
= AllocSrcNote(cx
, notes
);
7328 * Compute delta from the last annotated bytecode's offset. If it's too
7329 * big to fit in sn, allocate one or more xdelta notes and reset sn.
7331 ptrdiff_t offset
= bce
->offset();
7332 ptrdiff_t delta
= offset
- bce
->lastNoteOffset();
7333 bce
->current
->lastNoteOffset
= offset
;
7334 if (delta
>= SN_DELTA_LIMIT
) {
7336 ptrdiff_t xdelta
= Min(delta
, SN_XDELTA_MASK
);
7337 SN_MAKE_XDELTA(¬es
[index
], xdelta
);
7339 index
= AllocSrcNote(cx
, notes
);
7342 } while (delta
>= SN_DELTA_LIMIT
);
7346 * Initialize type and delta, then allocate the minimum number of notes
7347 * needed for type's arity. Usually, we won't need more, but if an offset
7348 * does take two bytes, SetSrcNoteOffset will grow notes.
7350 SN_MAKE_NOTE(¬es
[index
], type
, delta
);
7351 for (int n
= (int)js_SrcNoteSpec
[type
].arity
; n
> 0; n
--) {
7352 if (NewSrcNote(cx
, bce
, SRC_NULL
) < 0)
7359 frontend::NewSrcNote2(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, SrcNoteType type
, ptrdiff_t offset
)
7363 index
= NewSrcNote(cx
, bce
, type
);
7365 if (!SetSrcNoteOffset(cx
, bce
, index
, 0, offset
))
7372 frontend::NewSrcNote3(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, SrcNoteType type
, ptrdiff_t offset1
,
7377 index
= NewSrcNote(cx
, bce
, type
);
7379 if (!SetSrcNoteOffset(cx
, bce
, index
, 0, offset1
))
7381 if (!SetSrcNoteOffset(cx
, bce
, index
, 1, offset2
))
7388 frontend::AddToSrcNoteDelta(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, jssrcnote
* sn
, ptrdiff_t delta
)
7391 * Called only from FinishTakingSrcNotes to add to main script note
7392 * deltas, and only by a small positive amount.
7394 MOZ_ASSERT(bce
->current
== &bce
->main
);
7395 MOZ_ASSERT((unsigned) delta
< (unsigned) SN_XDELTA_LIMIT
);
7397 ptrdiff_t base
= SN_DELTA(sn
);
7398 ptrdiff_t limit
= SN_IS_XDELTA(sn
) ? SN_XDELTA_LIMIT
: SN_DELTA_LIMIT
;
7399 ptrdiff_t newdelta
= base
+ delta
;
7400 if (newdelta
< limit
) {
7401 SN_SET_DELTA(sn
, newdelta
);
7404 SN_MAKE_XDELTA(&xdelta
, delta
);
7405 if (!(sn
= bce
->main
.notes
.insert(sn
, xdelta
)))
7412 SetSrcNoteOffset(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, unsigned index
, unsigned which
,
7415 if (!SN_REPRESENTABLE_OFFSET(offset
)) {
7416 ReportStatementTooLarge(bce
->parser
->tokenStream
, bce
->topStmt
);
7420 SrcNotesVector
& notes
= bce
->notes();
7422 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7423 jssrcnote
* sn
= notes
.begin() + index
;
7424 MOZ_ASSERT(SN_TYPE(sn
) != SRC_XDELTA
);
7425 MOZ_ASSERT((int) which
< js_SrcNoteSpec
[SN_TYPE(sn
)].arity
);
7426 for (sn
++; which
; sn
++, which
--) {
7427 if (*sn
& SN_4BYTE_OFFSET_FLAG
)
7432 * See if the new offset requires four bytes either by being too big or if
7433 * the offset has already been inflated (in which case, we need to stay big
7434 * to not break the srcnote encoding if this isn't the last srcnote).
7436 if (offset
> (ptrdiff_t)SN_4BYTE_OFFSET_MASK
|| (*sn
& SN_4BYTE_OFFSET_FLAG
)) {
7437 /* Maybe this offset was already set to a four-byte value. */
7438 if (!(*sn
& SN_4BYTE_OFFSET_FLAG
)) {
7439 /* Insert three dummy bytes that will be overwritten shortly. */
7440 jssrcnote dummy
= 0;
7441 if (!(sn
= notes
.insert(sn
, dummy
)) ||
7442 !(sn
= notes
.insert(sn
, dummy
)) ||
7443 !(sn
= notes
.insert(sn
, dummy
)))
7445 js_ReportOutOfMemory(cx
);
7449 *sn
++ = (jssrcnote
)(SN_4BYTE_OFFSET_FLAG
| (offset
>> 24));
7450 *sn
++ = (jssrcnote
)(offset
>> 16);
7451 *sn
++ = (jssrcnote
)(offset
>> 8);
7453 *sn
= (jssrcnote
)offset
;
7458 * Finish taking source notes in cx's notePool.
7459 * If successful, the final source note count is stored in the out outparam.
7462 frontend::FinishTakingSrcNotes(ExclusiveContext
* cx
, BytecodeEmitter
* bce
, uint32_t* out
)
7464 MOZ_ASSERT(bce
->current
== &bce
->main
);
7466 unsigned prologCount
= bce
->prolog
.notes
.length();
7467 if (prologCount
&& bce
->prolog
.currentLine
!= bce
->firstLine
) {
7468 bce
->switchToProlog();
7469 if (NewSrcNote2(cx
, bce
, SRC_SETLINE
, (ptrdiff_t)bce
->firstLine
) < 0)
7471 bce
->switchToMain();
7474 * Either no prolog srcnotes, or no line number change over prolog.
7475 * We don't need a SRC_SETLINE, but we may need to adjust the offset
7476 * of the first main note, by adding to its delta and possibly even
7477 * prepending SRC_XDELTA notes to it to account for prolog bytecodes
7478 * that came at and after the last annotated bytecode.
7480 ptrdiff_t offset
= bce
->prologOffset() - bce
->prolog
.lastNoteOffset
;
7481 MOZ_ASSERT(offset
>= 0);
7482 if (offset
> 0 && bce
->main
.notes
.length() != 0) {
7483 /* NB: Use as much of the first main note's delta as we can. */
7484 jssrcnote
* sn
= bce
->main
.notes
.begin();
7485 ptrdiff_t delta
= SN_IS_XDELTA(sn
)
7486 ? SN_XDELTA_MASK
- (*sn
& SN_XDELTA_MASK
)
7487 : SN_DELTA_MASK
- (*sn
& SN_DELTA_MASK
);
7491 if (!AddToSrcNoteDelta(cx
, bce
, sn
, delta
))
7496 delta
= Min(offset
, SN_XDELTA_MASK
);
7497 sn
= bce
->main
.notes
.begin();
7502 // The prolog count might have changed, so we can't reuse prologCount.
7503 // The + 1 is to account for the final SN_MAKE_TERMINATOR that is appended
7504 // when the notes are copied to their final destination by CopySrcNotes.
7505 *out
= bce
->prolog
.notes
.length() + bce
->main
.notes
.length() + 1;
7510 frontend::CopySrcNotes(BytecodeEmitter
* bce
, jssrcnote
* destination
, uint32_t nsrcnotes
)
7512 unsigned prologCount
= bce
->prolog
.notes
.length();
7513 unsigned mainCount
= bce
->main
.notes
.length();
7514 unsigned totalCount
= prologCount
+ mainCount
;
7515 MOZ_ASSERT(totalCount
== nsrcnotes
- 1);
7517 PodCopy(destination
, bce
->prolog
.notes
.begin(), prologCount
);
7518 PodCopy(destination
+ prologCount
, bce
->main
.notes
.begin(), mainCount
);
7519 SN_MAKE_TERMINATOR(&destination
[totalCount
]);
7523 CGConstList::finish(ConstArray
* array
)
7525 MOZ_ASSERT(length() == array
->length
);
7527 for (unsigned i
= 0; i
< length(); i
++)
7528 array
->vector
[i
] = list
[i
];
7532 * Find the index of the given object for code generator.
7534 * Since the emitter refers to each parsed object only once, for the index we
7535 * use the number of already indexes objects. We also add the object to a list
7536 * to convert the list to a fixed-size array when we complete code generation,
7537 * see js::CGObjectList::finish below.
7539 * Most of the objects go to BytecodeEmitter::objectList but for regexp we use
7540 * a separated BytecodeEmitter::regexpList. In this way the emitted index can
7541 * be directly used to store and fetch a reference to a cloned RegExp object
7542 * that shares the same JSRegExp private data created for the object literal in
7543 * objbox. We need a cloned object to hold lastIndex and other direct
7544 * properties that should not be shared among threads sharing a precompiled
7545 * function or script.
7547 * If the code being compiled is function code, allocate a reserved slot in
7548 * the cloned function object that shares its precompiled script with other
7549 * cloned function objects and with the compiler-created clone-parent. There
7550 * are nregexps = script->regexps()->length such reserved slots in each
7551 * function object cloned from fun->object. NB: during compilation, a funobj
7552 * slots element must never be allocated, because JSObject::allocSlot could
7553 * hand out one of the slots that should be given to a regexp clone.
7555 * If the code being compiled is global code, the cloned regexp are stored in
7556 * fp->vars slot and to protect regexp slots from GC we set fp->nvars to
7559 * The slots initially contain undefined or null. We populate them lazily when
7560 * JSOP_REGEXP is executed for the first time.
7562 * Why clone regexp objects? ECMA specifies that when a regular expression
7563 * literal is scanned, a RegExp object is created. In the spec, compilation
7564 * and execution happen indivisibly, but in this implementation and many of
7565 * its embeddings, code is precompiled early and re-executed in multiple
7566 * threads, or using multiple global objects, or both, for efficiency.
7568 * In such cases, naively following ECMA leads to wrongful sharing of RegExp
7569 * objects, which makes for collisions on the lastIndex property (especially
7570 * for global regexps) and on any ad-hoc properties. Also, __proto__ refers to
7571 * the pre-compilation prototype, a pigeon-hole problem for instanceof tests.
7574 CGObjectList::add(ObjectBox
* objbox
)
7576 MOZ_ASSERT(!objbox
->emitLink
);
7577 objbox
->emitLink
= lastbox
;
7583 CGObjectList::indexOf(JSObject
* obj
)
7585 MOZ_ASSERT(length
> 0);
7586 unsigned index
= length
- 1;
7587 for (ObjectBox
* box
= lastbox
; box
->object
!= obj
; box
= box
->emitLink
)
7593 CGObjectList::finish(ObjectArray
* array
)
7595 MOZ_ASSERT(length
<= INDEX_LIMIT
);
7596 MOZ_ASSERT(length
== array
->length
);
7598 js::HeapPtrNativeObject
* cursor
= array
->vector
+ array
->length
;
7599 ObjectBox
* objbox
= lastbox
;
7602 MOZ_ASSERT(!*cursor
);
7603 *cursor
= objbox
->object
;
7604 } while ((objbox
= objbox
->emitLink
) != nullptr);
7605 MOZ_ASSERT(cursor
== array
->vector
);
7609 CGObjectList::find(uint32_t index
)
7611 MOZ_ASSERT(index
< length
);
7612 ObjectBox
* box
= lastbox
;
7613 for (unsigned n
= length
- 1; n
> index
; n
--)
7614 box
= box
->emitLink
;
7619 CGTryNoteList::append(JSTryNoteKind kind
, uint32_t stackDepth
, size_t start
, size_t end
)
7621 MOZ_ASSERT(start
<= end
);
7622 MOZ_ASSERT(size_t(uint32_t(start
)) == start
);
7623 MOZ_ASSERT(size_t(uint32_t(end
)) == end
);
7627 note
.stackDepth
= stackDepth
;
7628 note
.start
= uint32_t(start
);
7629 note
.length
= uint32_t(end
- start
);
7631 return list
.append(note
);
7635 CGTryNoteList::finish(TryNoteArray
* array
)
7637 MOZ_ASSERT(length() == array
->length
);
7639 for (unsigned i
= 0; i
< length(); i
++)
7640 array
->vector
[i
] = list
[i
];
7644 CGBlockScopeList::append(uint32_t scopeObject
, uint32_t offset
, uint32_t parent
)
7646 BlockScopeNote note
;
7647 mozilla::PodZero(¬e
);
7649 note
.index
= scopeObject
;
7650 note
.start
= offset
;
7651 note
.parent
= parent
;
7653 return list
.append(note
);
7657 CGBlockScopeList::findEnclosingScope(uint32_t index
)
7659 MOZ_ASSERT(index
< length());
7660 MOZ_ASSERT(list
[index
].index
!= BlockScopeNote::NoBlockScopeIndex
);
7662 DebugOnly
<uint32_t> pos
= list
[index
].start
;
7664 MOZ_ASSERT(list
[index
].start
<= pos
);
7665 if (list
[index
].length
== 0) {
7666 // We are looking for the nearest enclosing live scope. If the
7667 // scope contains POS, it should still be open, so its length should
7669 return list
[index
].index
;
7671 // Conversely, if the length is not zero, it should not contain
7673 MOZ_ASSERT(list
[index
].start
+ list
[index
].length
<= pos
);
7677 return BlockScopeNote::NoBlockScopeIndex
;
7681 CGBlockScopeList::recordEnd(uint32_t index
, uint32_t offset
)
7683 MOZ_ASSERT(index
< length());
7684 MOZ_ASSERT(offset
>= list
[index
].start
);
7685 MOZ_ASSERT(list
[index
].length
== 0);
7687 list
[index
].length
= offset
- list
[index
].start
;
7691 CGBlockScopeList::finish(BlockScopeArray
* array
)
7693 MOZ_ASSERT(length() == array
->length
);
7695 for (unsigned i
= 0; i
< length(); i
++)
7696 array
->vector
[i
] = list
[i
];
7700 CGYieldOffsetList::finish(YieldOffsetArray
& array
, uint32_t prologLength
)
7702 MOZ_ASSERT(length() == array
.length());
7704 for (unsigned i
= 0; i
< length(); i
++)
7705 array
[i
] = prologLength
+ list
[i
];
7709 * We should try to get rid of offsetBias (always 0 or 1, where 1 is
7710 * JSOP_{NOP,POP}_LENGTH), which is used only by SRC_FOR.
7712 const JSSrcNoteSpec js_SrcNoteSpec
[] = {
7713 #define DEFINE_SRC_NOTE_SPEC(sym, name, arity) { name, arity },
7714 FOR_EACH_SRC_NOTE_TYPE(DEFINE_SRC_NOTE_SPEC
)
7715 #undef DEFINE_SRC_NOTE_SPEC
7719 SrcNoteArity(jssrcnote
* sn
)
7721 MOZ_ASSERT(SN_TYPE(sn
) < SRC_LAST
);
7722 return js_SrcNoteSpec
[SN_TYPE(sn
)].arity
;
7725 JS_FRIEND_API(unsigned)
7726 js_SrcNoteLength(jssrcnote
* sn
)
7731 arity
= SrcNoteArity(sn
);
7732 for (base
= sn
++; arity
; sn
++, arity
--) {
7733 if (*sn
& SN_4BYTE_OFFSET_FLAG
)
7739 JS_FRIEND_API(ptrdiff_t)
7740 js_GetSrcNoteOffset(jssrcnote
* sn
, unsigned which
)
7742 /* Find the offset numbered which (i.e., skip exactly which offsets). */
7743 MOZ_ASSERT(SN_TYPE(sn
) != SRC_XDELTA
);
7744 MOZ_ASSERT((int) which
< SrcNoteArity(sn
));
7745 for (sn
++; which
; sn
++, which
--) {
7746 if (*sn
& SN_4BYTE_OFFSET_FLAG
)
7749 if (*sn
& SN_4BYTE_OFFSET_FLAG
) {
7750 return (ptrdiff_t)(((uint32_t)(sn
[0] & SN_4BYTE_OFFSET_MASK
) << 24)
7755 return (ptrdiff_t)*sn
;